2011-02-18 23:12:40 +03:00
|
|
|
import smtplib
|
|
|
|
import sys
|
|
|
|
import traceback
|
2011-02-09 11:10:55 +03:00
|
|
|
|
2016-02-19 21:05:49 +03:00
|
|
|
from django.utils.encoding import force_bytes
|
2013-06-20 01:18:22 +04:00
|
|
|
from email.Utils import formatdate
|
|
|
|
from email.mime.text import MIMEText
|
|
|
|
from time import time
|
|
|
|
from urlparse import parse_qsl
|
2011-02-25 04:44:51 +03:00
|
|
|
|
2016-02-19 20:59:45 +03:00
|
|
|
from services.utils import mypool, settings
|
2014-03-26 23:57:38 +04:00
|
|
|
|
2011-09-01 21:43:40 +04:00
|
|
|
# This has to be imported after the settings so statsd knows where to log to.
|
2012-09-13 00:41:55 +04:00
|
|
|
from django_statsd.clients import statsd
|
2011-02-09 11:10:55 +03:00
|
|
|
|
2013-06-20 01:18:22 +04:00
|
|
|
import commonware.log
|
|
|
|
import MySQLdb as mysql
|
|
|
|
import sqlalchemy.pool as pool
|
|
|
|
|
2011-02-09 11:10:55 +03:00
|
|
|
try:
|
|
|
|
from compare import version_int
|
|
|
|
except ImportError:
|
2015-12-17 09:59:16 +03:00
|
|
|
from olympia.versions.compare import version_int
|
2011-02-09 11:10:55 +03:00
|
|
|
|
2015-12-16 11:46:10 +03:00
|
|
|
from olympia.constants import applications, base
|
|
|
|
|
|
|
|
from utils import (
|
2016-09-29 21:37:44 +03:00
|
|
|
APP_GUIDS, get_mirror, log_configure, PLATFORMS)
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2012-02-10 04:46:00 +04:00
|
|
|
# Go configure the log.
|
|
|
|
log_configure()
|
2011-02-08 08:25:33 +03:00
|
|
|
|
|
|
|
good_rdf = """<?xml version="1.0"?>
|
|
|
|
<RDF:RDF xmlns:RDF="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
|
|
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
|
|
|
<RDF:Description about="urn:mozilla:%(type)s:%(guid)s">
|
|
|
|
<em:updates>
|
|
|
|
<RDF:Seq>
|
|
|
|
<RDF:li resource="urn:mozilla:%(type)s:%(guid)s:%(version)s"/>
|
|
|
|
</RDF:Seq>
|
|
|
|
</em:updates>
|
|
|
|
</RDF:Description>
|
|
|
|
|
|
|
|
<RDF:Description about="urn:mozilla:%(type)s:%(guid)s:%(version)s">
|
|
|
|
<em:version>%(version)s</em:version>
|
|
|
|
<em:targetApplication>
|
|
|
|
<RDF:Description>
|
|
|
|
<em:id>%(appguid)s</em:id>
|
|
|
|
<em:minVersion>%(min)s</em:minVersion>
|
|
|
|
<em:maxVersion>%(max)s</em:maxVersion>
|
|
|
|
<em:updateLink>%(url)s</em:updateLink>
|
|
|
|
%(if_update)s
|
|
|
|
%(if_hash)s
|
|
|
|
</RDF:Description>
|
|
|
|
</em:targetApplication>
|
|
|
|
</RDF:Description>
|
|
|
|
</RDF:RDF>"""
|
|
|
|
|
|
|
|
|
|
|
|
bad_rdf = """<?xml version="1.0"?>
|
|
|
|
<RDF:RDF xmlns:RDF="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
|
|
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
|
|
|
</RDF:RDF>"""
|
|
|
|
|
|
|
|
|
2011-05-06 02:54:58 +04:00
|
|
|
no_updates_rdf = """<?xml version="1.0"?>
|
|
|
|
<RDF:RDF xmlns:RDF="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
|
|
xmlns:em="http://www.mozilla.org/2004/em-rdf#">
|
|
|
|
<RDF:Description about="urn:mozilla:%(type)s:%(guid)s">
|
|
|
|
<em:updates>
|
|
|
|
<RDF:Seq>
|
|
|
|
</RDF:Seq>
|
|
|
|
</em:updates>
|
|
|
|
</RDF:Description>
|
|
|
|
</RDF:RDF>"""
|
|
|
|
|
|
|
|
|
2011-02-22 21:32:22 +03:00
|
|
|
timing_log = commonware.log.getLogger('z.timer')
|
2011-05-12 03:07:02 +04:00
|
|
|
error_log = commonware.log.getLogger('z.services')
|
2011-02-22 21:32:22 +03:00
|
|
|
|
|
|
|
|
2011-02-08 08:25:33 +03:00
|
|
|
class Update(object):
|
|
|
|
|
2011-11-15 23:43:43 +04:00
|
|
|
def __init__(self, data, compat_mode='strict'):
|
2011-02-09 11:10:55 +03:00
|
|
|
self.conn, self.cursor = None, None
|
|
|
|
self.data = data.copy()
|
2011-02-08 08:25:33 +03:00
|
|
|
self.data['row'] = {}
|
|
|
|
self.version_int = 0
|
2011-11-15 23:43:43 +04:00
|
|
|
self.compat_mode = compat_mode
|
2011-02-08 08:25:33 +03:00
|
|
|
|
|
|
|
def is_valid(self):
|
2011-02-10 02:46:46 +03:00
|
|
|
# If you accessing this from unit tests, then before calling
|
|
|
|
# is valid, you can assign your own cursor.
|
2011-02-09 11:10:55 +03:00
|
|
|
if not self.cursor:
|
|
|
|
self.conn = mypool.connect()
|
|
|
|
self.cursor = self.conn.cursor()
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2011-02-09 11:10:55 +03:00
|
|
|
data = self.data
|
2012-01-12 23:36:57 +04:00
|
|
|
# Version can be blank.
|
|
|
|
data['version'] = data.get('version', '')
|
|
|
|
for field in ['reqVersion', 'id', 'appID', 'appVersion']:
|
2011-02-09 11:10:55 +03:00
|
|
|
if field not in data:
|
2011-02-08 08:25:33 +03:00
|
|
|
return False
|
|
|
|
|
2011-02-09 11:10:55 +03:00
|
|
|
data['app_id'] = APP_GUIDS.get(data['appID'])
|
|
|
|
if not data['app_id']:
|
2011-02-08 08:25:33 +03:00
|
|
|
return False
|
|
|
|
|
2011-05-06 02:54:58 +04:00
|
|
|
sql = """SELECT id, status, addontype_id, guid FROM addons
|
2012-03-29 03:35:11 +04:00
|
|
|
WHERE guid = %(guid)s AND
|
|
|
|
inactive = 0 AND
|
2015-03-27 16:35:25 +03:00
|
|
|
status != %(STATUS_DELETED)s AND
|
|
|
|
is_listed != 0
|
2012-03-29 03:35:11 +04:00
|
|
|
LIMIT 1;"""
|
|
|
|
self.cursor.execute(sql, {'guid': self.data['id'],
|
|
|
|
'STATUS_DELETED': base.STATUS_DELETED})
|
2011-02-08 08:25:33 +03:00
|
|
|
result = self.cursor.fetchone()
|
|
|
|
if result is None:
|
|
|
|
return False
|
|
|
|
|
2011-05-06 02:54:58 +04:00
|
|
|
data['id'], data['addon_status'], data['type'], data['guid'] = result
|
2011-02-09 11:10:55 +03:00
|
|
|
data['version_int'] = version_int(data['appVersion'])
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2011-02-09 11:10:55 +03:00
|
|
|
if 'appOS' in data:
|
2011-02-08 08:25:33 +03:00
|
|
|
for k, v in PLATFORMS.items():
|
2011-02-09 11:10:55 +03:00
|
|
|
if k in data['appOS']:
|
|
|
|
data['appOS'] = v
|
2011-02-08 08:25:33 +03:00
|
|
|
break
|
|
|
|
else:
|
2011-02-09 11:10:55 +03:00
|
|
|
data['appOS'] = None
|
2011-02-08 08:25:33 +03:00
|
|
|
|
|
|
|
return True
|
|
|
|
|
|
|
|
def get_update(self):
|
|
|
|
data = self.data
|
|
|
|
|
2016-09-29 21:37:44 +03:00
|
|
|
data['STATUS_PUBLIC'] = base.STATUS_PUBLIC
|
2014-02-21 01:19:55 +04:00
|
|
|
data['STATUS_BETA'] = base.STATUS_BETA
|
2015-01-15 21:55:03 +03:00
|
|
|
data['STATUS_DISABLED'] = base.STATUS_DISABLED
|
2014-02-21 01:19:55 +04:00
|
|
|
|
2011-11-15 23:43:43 +04:00
|
|
|
sql = ["""
|
2011-02-09 11:10:55 +03:00
|
|
|
SELECT
|
|
|
|
addons.guid as guid, addons.addontype_id as type,
|
2014-09-06 02:01:19 +04:00
|
|
|
addons.inactive as disabled_by_user, appmin.version as min,
|
2011-02-09 11:10:55 +03:00
|
|
|
appmax.version as max, files.id as file_id,
|
|
|
|
files.status as file_status, files.hash,
|
|
|
|
files.filename, versions.id as version_id,
|
|
|
|
files.datestatuschanged as datestatuschanged,
|
2011-11-15 23:43:43 +04:00
|
|
|
files.strict_compatibility as strict_compat,
|
2015-12-03 15:16:42 +03:00
|
|
|
versions.releasenotes, versions.version as version
|
2011-02-09 11:10:55 +03:00
|
|
|
FROM versions
|
|
|
|
INNER JOIN addons
|
|
|
|
ON addons.id = versions.addon_id AND addons.id = %(id)s
|
|
|
|
INNER JOIN applications_versions
|
|
|
|
ON applications_versions.version_id = versions.id
|
|
|
|
INNER JOIN appversions appmin
|
|
|
|
ON appmin.id = applications_versions.min
|
2014-09-06 02:01:19 +04:00
|
|
|
AND appmin.application_id = %(app_id)s
|
2011-02-09 11:10:55 +03:00
|
|
|
INNER JOIN appversions appmax
|
|
|
|
ON appmax.id = applications_versions.max
|
2014-09-06 02:01:19 +04:00
|
|
|
AND appmax.application_id = %(app_id)s
|
2011-02-09 11:10:55 +03:00
|
|
|
INNER JOIN files
|
|
|
|
ON files.version_id = versions.id AND (files.platform_id = 1
|
2011-11-15 23:43:43 +04:00
|
|
|
"""]
|
2011-02-08 08:25:33 +03:00
|
|
|
if data.get('appOS'):
|
2011-11-15 23:43:43 +04:00
|
|
|
sql.append(' OR files.platform_id = %(appOS)s')
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2014-02-21 01:19:55 +04:00
|
|
|
sql.append("""
|
|
|
|
)
|
|
|
|
-- Find a reference to the user's current version, if it exists.
|
|
|
|
-- These should never be inner joins. We need results even if we
|
|
|
|
-- can't find the current version.
|
|
|
|
LEFT JOIN versions curver
|
|
|
|
ON curver.addon_id = addons.id AND curver.version = %(version)s
|
|
|
|
LEFT JOIN files curfile
|
|
|
|
ON curfile.version_id = curver.id
|
|
|
|
WHERE
|
2016-02-29 20:15:56 +03:00
|
|
|
versions.deleted = 0 AND
|
2014-02-21 01:19:55 +04:00
|
|
|
-- Note that the WHEN clauses here will evaluate to the same
|
|
|
|
-- thing for each row we examine. The JOINs above narrow the
|
|
|
|
-- rows matched by the WHERE clause to versions of a specific
|
|
|
|
-- add-on, and the ORDER BY and LIMIT 1 clauses below make it
|
|
|
|
-- unlikely that we'll be examining a large number of rows,
|
|
|
|
-- so this is fairly cheap.
|
|
|
|
CASE
|
|
|
|
WHEN curfile.status = %(STATUS_BETA)s
|
|
|
|
THEN
|
|
|
|
-- User's current version is a known beta version.
|
|
|
|
--
|
|
|
|
-- Serve only beta updates. Serving a full version here
|
|
|
|
-- will forever kick users out of the beta update channel.
|
|
|
|
--
|
|
|
|
-- If the add-on does not have full review, serve no
|
|
|
|
-- updates.
|
|
|
|
|
|
|
|
addons.status = %(STATUS_PUBLIC)s AND
|
|
|
|
files.status = %(STATUS_BETA)s
|
|
|
|
|
|
|
|
ELSE
|
|
|
|
-- Anything else, including:
|
|
|
|
--
|
|
|
|
-- * Add-on has full review
|
|
|
|
-- * User's current version has full review, regardless
|
|
|
|
-- of add-on status
|
|
|
|
--
|
|
|
|
-- Serve only full-reviewed updates.
|
|
|
|
files.status = %(STATUS_PUBLIC)s
|
|
|
|
END
|
|
|
|
""")
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2011-11-15 23:43:43 +04:00
|
|
|
sql.append('AND appmin.version_int <= %(version_int)s ')
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2011-11-15 23:43:43 +04:00
|
|
|
if self.compat_mode == 'ignore':
|
|
|
|
pass # no further SQL modification required.
|
|
|
|
|
|
|
|
elif self.compat_mode == 'normal':
|
2011-11-29 00:24:51 +04:00
|
|
|
# When file has strict_compatibility enabled, or file has binary
|
|
|
|
# components, default to compatible is disabled.
|
2011-12-03 00:47:21 +04:00
|
|
|
sql.append("""AND
|
2012-01-19 00:21:53 +04:00
|
|
|
CASE WHEN files.strict_compatibility = 1 OR
|
|
|
|
files.binary_components = 1
|
2011-12-03 00:47:21 +04:00
|
|
|
THEN appmax.version_int >= %(version_int)s ELSE 1 END
|
2011-11-15 23:43:43 +04:00
|
|
|
""")
|
2012-05-22 02:48:59 +04:00
|
|
|
# Filter out versions that don't have the minimum maxVersion
|
|
|
|
# requirement to qualify for default-to-compatible.
|
|
|
|
d2c_max = applications.D2C_MAX_VERSIONS.get(data['app_id'])
|
|
|
|
if d2c_max:
|
|
|
|
data['d2c_max_version'] = version_int(d2c_max)
|
|
|
|
sql.append("AND appmax.version_int >= %(d2c_max_version)s ")
|
|
|
|
|
2011-12-03 00:47:21 +04:00
|
|
|
# Filter out versions found in compat overrides
|
|
|
|
sql.append("""AND
|
|
|
|
NOT versions.id IN (
|
|
|
|
SELECT version_id FROM incompatible_versions
|
|
|
|
WHERE app_id=%(app_id)s AND
|
|
|
|
(min_app_version='0' AND
|
|
|
|
max_app_version_int >= %(version_int)s) OR
|
|
|
|
(min_app_version_int <= %(version_int)s AND
|
|
|
|
max_app_version='*') OR
|
|
|
|
(min_app_version_int <= %(version_int)s AND
|
|
|
|
max_app_version_int >= %(version_int)s)) """)
|
2011-11-15 23:43:43 +04:00
|
|
|
|
|
|
|
else: # Not defined or 'strict'.
|
|
|
|
sql.append('AND appmax.version_int >= %(version_int)s ')
|
|
|
|
|
2014-07-02 23:40:13 +04:00
|
|
|
# Special case for bug 1031516.
|
2014-07-03 20:11:57 +04:00
|
|
|
if data['guid'] == 'firefox-hotfix@mozilla.org':
|
|
|
|
app_version = data['version_int']
|
2014-07-07 21:30:45 +04:00
|
|
|
hotfix_version = data['version']
|
2014-07-03 20:11:57 +04:00
|
|
|
if version_int('10') <= app_version <= version_int('16.0.1'):
|
|
|
|
if hotfix_version < '20121019.01':
|
|
|
|
sql.append("AND versions.version = '20121019.01' ")
|
|
|
|
elif hotfix_version < '20130826.01':
|
|
|
|
sql.append("AND versions.version = '20130826.01' ")
|
|
|
|
elif version_int('16.0.2') <= app_version <= version_int('24.*'):
|
|
|
|
if hotfix_version < '20130826.01':
|
|
|
|
sql.append("AND versions.version = '20130826.01' ")
|
2014-07-02 23:40:13 +04:00
|
|
|
|
2011-11-15 23:43:43 +04:00
|
|
|
sql.append('ORDER BY versions.id DESC LIMIT 1;')
|
|
|
|
|
|
|
|
self.cursor.execute(''.join(sql), data)
|
2011-02-08 08:25:33 +03:00
|
|
|
result = self.cursor.fetchone()
|
2011-11-15 23:43:43 +04:00
|
|
|
|
2011-02-08 08:25:33 +03:00
|
|
|
if result:
|
|
|
|
row = dict(zip([
|
2014-09-06 02:01:19 +04:00
|
|
|
'guid', 'type', 'disabled_by_user', 'min', 'max',
|
2011-02-09 11:10:55 +03:00
|
|
|
'file_id', 'file_status', 'hash', 'filename', 'version_id',
|
2011-11-15 23:43:43 +04:00
|
|
|
'datestatuschanged', 'strict_compat', 'releasenotes',
|
2015-12-03 15:16:42 +03:00
|
|
|
'version'],
|
2011-02-08 08:25:33 +03:00
|
|
|
list(result)))
|
2011-10-06 02:03:59 +04:00
|
|
|
row['type'] = base.ADDON_SLUGS_UPDATE[row['type']]
|
2014-02-21 01:19:55 +04:00
|
|
|
row['url'] = get_mirror(data['addon_status'],
|
|
|
|
data['id'], row)
|
2014-09-06 02:01:19 +04:00
|
|
|
row['appguid'] = applications.APPS_ALL[data['app_id']].guid
|
2011-02-09 11:10:55 +03:00
|
|
|
data['row'] = row
|
2011-02-08 08:25:33 +03:00
|
|
|
return True
|
|
|
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
def get_bad_rdf(self):
|
|
|
|
return bad_rdf
|
|
|
|
|
|
|
|
def get_rdf(self):
|
2011-05-06 02:54:58 +04:00
|
|
|
if self.is_valid():
|
|
|
|
if self.get_update():
|
|
|
|
rdf = self.get_good_rdf()
|
|
|
|
else:
|
|
|
|
rdf = self.get_no_updates_rdf()
|
2011-02-08 08:25:33 +03:00
|
|
|
else:
|
|
|
|
rdf = self.get_bad_rdf()
|
|
|
|
self.cursor.close()
|
|
|
|
if self.conn:
|
|
|
|
self.conn.close()
|
|
|
|
return rdf
|
|
|
|
|
2011-05-06 02:54:58 +04:00
|
|
|
def get_no_updates_rdf(self):
|
2011-10-06 02:03:59 +04:00
|
|
|
name = base.ADDON_SLUGS_UPDATE[self.data['type']]
|
2011-05-06 02:54:58 +04:00
|
|
|
return no_updates_rdf % ({'guid': self.data['guid'], 'type': name})
|
|
|
|
|
2011-02-08 08:25:33 +03:00
|
|
|
def get_good_rdf(self):
|
|
|
|
data = self.data['row']
|
|
|
|
data['if_hash'] = ''
|
|
|
|
if data['hash']:
|
|
|
|
data['if_hash'] = ('<em:updateHash>%s</em:updateHash>' %
|
|
|
|
data['hash'])
|
|
|
|
|
|
|
|
data['if_update'] = ''
|
|
|
|
if data['releasenotes']:
|
|
|
|
data['if_update'] = ('<em:updateInfoURL>%s%s%s/%%APP_LOCALE%%/'
|
|
|
|
'</em:updateInfoURL>' %
|
|
|
|
(settings.SITE_URL, '/versions/updateInfo/',
|
|
|
|
data['version_id']))
|
|
|
|
|
|
|
|
return good_rdf % data
|
|
|
|
|
2011-02-10 02:46:46 +03:00
|
|
|
def format_date(self, secs):
|
|
|
|
return '%s GMT' % formatdate(time() + secs)[:25]
|
2011-02-08 08:25:33 +03:00
|
|
|
|
2011-02-10 02:46:46 +03:00
|
|
|
def get_headers(self, length):
|
|
|
|
return [('Content-Type', 'text/xml'),
|
|
|
|
('Cache-Control', 'public, max-age=3600'),
|
|
|
|
('Last-Modified', self.format_date(0)),
|
|
|
|
('Expires', self.format_date(3600)),
|
|
|
|
('Content-Length', str(length))]
|
2011-02-09 11:10:55 +03:00
|
|
|
|
|
|
|
|
2011-02-18 23:12:40 +03:00
|
|
|
def mail_exception(data):
|
|
|
|
if settings.EMAIL_BACKEND != 'django.core.mail.backends.smtp.EmailBackend':
|
|
|
|
return
|
|
|
|
|
|
|
|
msg = MIMEText('%s\n\n%s' % (
|
|
|
|
'\n'.join(traceback.format_exception(*sys.exc_info())), data))
|
|
|
|
msg['Subject'] = '[Update] ERROR at /services/update'
|
2011-05-11 22:16:03 +04:00
|
|
|
msg['To'] = ','.join([a[1] for a in settings.ADMINS])
|
2011-02-18 23:12:40 +03:00
|
|
|
msg['From'] = settings.DEFAULT_FROM_EMAIL
|
|
|
|
|
2011-05-11 21:48:27 +04:00
|
|
|
conn = smtplib.SMTP(getattr(settings, 'EMAIL_HOST', 'localhost'),
|
|
|
|
getattr(settings, 'EMAIL_PORT', '25'))
|
2011-05-11 22:16:03 +04:00
|
|
|
conn.sendmail(settings.DEFAULT_FROM_EMAIL, msg['To'], msg.as_string())
|
2011-02-18 23:12:40 +03:00
|
|
|
conn.close()
|
|
|
|
|
|
|
|
|
2011-05-12 03:07:02 +04:00
|
|
|
def log_exception(data):
|
|
|
|
(typ, value, traceback) = sys.exc_info()
|
|
|
|
error_log.error(u'Type: %s, %s. Query: %s' % (typ, value, data))
|
|
|
|
|
|
|
|
|
2011-02-08 08:25:33 +03:00
|
|
|
def application(environ, start_response):
|
|
|
|
status = '200 OK'
|
2011-09-01 21:43:40 +04:00
|
|
|
with statsd.timer('services.update'):
|
|
|
|
data = dict(parse_qsl(environ['QUERY_STRING']))
|
2011-11-15 23:43:43 +04:00
|
|
|
compat_mode = data.pop('compatMode', 'strict')
|
2011-09-01 21:43:40 +04:00
|
|
|
try:
|
2012-01-20 04:48:01 +04:00
|
|
|
update = Update(data, compat_mode)
|
2016-02-19 21:05:49 +03:00
|
|
|
output = force_bytes(update.get_rdf())
|
2011-09-01 21:43:40 +04:00
|
|
|
start_response(status, update.get_headers(len(output)))
|
|
|
|
except:
|
|
|
|
#mail_exception(data)
|
|
|
|
log_exception(data)
|
|
|
|
raise
|
2011-03-04 21:38:05 +03:00
|
|
|
return [output]
|