This commit is contained in:
Clinton.Collins 2019-06-05 14:41:16 -07:00
Родитель 34c6fd61ea
Коммит e9096c8e47
8 изменённых файлов: 92 добавлений и 47 удалений

Просмотреть файл

@ -22,7 +22,6 @@ ADD docker_scripts/pip.conf /etc/pip.conf
ENV PYTHONPATH=/app:${PYTHONPATH}
ENV PATH=/app:${PATH}
ENV FLASK_APP=seattle_flu_incidence_mapper.app:app
WORKDIR /app
ENV MODEL_STORE=/model_store

Просмотреть файл

@ -1,10 +1,13 @@
#!/bin/bash
if [ "$DEBUG" = "1" ]; then
export FLASK_DEV=1
export FLASK_ENV=development
echo "Running Flask Debug Server"
export PYTHONPATH=/app
cd /app
python seattle_flu/app.py manage run --host=0.0.0.0
flask run --host=0.0.0.0
else
echo "Launching Nginx"
cd /app

Просмотреть файл

@ -2,6 +2,23 @@ Usage
=====
Production Environment File:
============================
#. The following command will produce a production environment file for docker-compose containing a random
password for postgres and random secret for JWT
`export DB_PASS=$(</dev/urandom tr -dc 'A-Za-z0-9!"#%&'\''()*+,-./:;<=>?@[\]^_`{|}~' | head -c 32 ); printf "POSTGRES_PASS=%s\nJWT_SECRET=%s\nSQLALCHEMY_URI=postgres+psycopg2://seattle_flu:%s@db" $DB_PASS $(</dev/urandom tr -dc 'A-Za-z0-9!"#%&'\''()*+,-./:;<=>?@[\]^_`{|}~' | head -c 256 ) $DB_PASS > production.env`
# if you have a ramdsik, add that as the job_host_path
printf "MODEL_HOST_PATH=/model_store\nWORKER_JOB_HOST_PATH=/mnt/ramdisk\nMODEL_JOB_PATH=/jobs" >> production.env
#. Run `docker-compose -f docker-compose.production.yml run service flask db upgrade`
#. Create users. Change user1, user2, etc below to list of users. Be sure to save the output
```
for user in user1 user2
do
docker-compose -f docker-compose.production.yml run service flask generate-token $user
done
```
#.
Migrations
==========

Просмотреть файл

@ -33,7 +33,6 @@ connex_app = connexion.App("seattle_flu_incidence_mapper.config", specification_
# Get the underlying Flask app instance
app = connex_app.app
app.config['WORKER_IMAGE'] = os.environ.get('WORKER_IMAGE', 'idm-docker-production.packages.idmod.org/sfim-worker:latest')
app.config['MODEL_STORE'] = os.environ.get('MODEL_STORE', os.path.abspath(os.path.join(os.getcwd(), "../../test_model_store")))
app.config['MODEL_HOST_PATH'] = os.environ.get('MODEL_HOST_PATH', os.path.abspath(os.path.join(os.getcwd(), "../../test_model_store")))
@ -47,6 +46,7 @@ app.config['JWT_ALGORITHM'] = os.environ.get('JWT_ALGORITHM', 'HS256')
db = setup_db(basedir, app)
migrate = Migrate(app, db)
# DO NOT MOVE this line. The order matters here
# we need to init our db before loading our models
from seattle_flu_incidence_mapper.models import *

Просмотреть файл

@ -57,41 +57,49 @@ def create():
:return: 201 on success, 406 on pathogen_model exists
"""
rds_key = None
model_key = None
if 'rds' in request.files:
rds_key = hashlib.md5(request.files['rds'].read()).hexdigest()
if 'model' in request.files:
model_key = hashlib.md5(request.files['model'].read()).hexdigest()
model_id = get_model_id(request.form['query_str'])
#build our pathogenmodel object first
model = dict(id=model_id,
name=request.form['name'],
query_str=request.form['query_str'],
rds_key=rds_key,
model_type=request.form['model_type'],
model_key=model_key)
schema = GenericModelSchema()
new_model = schema.load(model, session=db.session).data
# Add the pathogen_model to the database
db.session.add(new_model)
# if the item comes in as a single, let's [
if type(request.form['name']) is str:
new_model, schema = insert_one_model(form=request.form, files=request.files)
else:
for i in range(len(request.form['name'])):
new_model, schema = insert_one_model(form={k: request.form[k][i] for k in request.keys()},
files={k: request.form[k][i] for k in request.files.keys()})
db.session.commit()
# save the files to our config directory
save_model_file(request.files['model'], f'{new_model.id}.csv')
if 'rds' in request.files:
save_model_file(request.files['model'], f'{rds_key}.RDS')
# Serialize and return the newly created pathogen_model in the response
data = schema.dump(new_model).data
return data, 201
def insert_one_model(form, files):
rds_key = None
model_key = None
if 'rds' in request.files:
rds_key = hashlib.md5(files['rds'].read()).hexdigest()
if 'model' in request.files:
model_key = hashlib.md5(files['model'].read()).hexdigest()
model_id = get_model_id(form['query_str'])
# build our pathogenmodel object first
model = dict(id=model_id,
name=form['name'],
query_str=form['query_str'],
rds_key=rds_key,
model_type=form['model_type'],
model_key=model_key)
schema = GenericModelSchema()
new_model = schema.load(model, session=db.session).data
# Add the pathogen_model to the database
db.session.add(new_model)
# save the files to our config directory
save_model_file(files['model'], f'{new_model.id}.csv')
if 'rds' in files:
save_model_file(files['model'], f'{rds_key}.RDS')
return new_model, schema
def update(pathogen_model_id, pathogen_model):
"""
This function updates an existing pathogen_model in the pathogen_models structure

Просмотреть файл

@ -117,16 +117,24 @@ paths:
- query_str
properties:
name:
type: string
type: array
items:
type: string
description: Name of pathogen model
query_str:
type: string
type: array
items:
type: string
description: Query of pathogen model
model_type:
type: string
type: array
items:
type: string
description: Type of model
model:
type: string
type: array
items:
type: string
description: Model File.
format: binary
required: true

Просмотреть файл

@ -23,7 +23,7 @@ class TestUploadModel(BaseApiTest):
"5MSwiZXhwIjoxNTkxMTQ0MzkxLCJzdWIiOiJ0ZXN0cyJ9.OL0NyX3iPm2zByCudPBB7yu0v1CO2wblsgW4BcgX7UY")
test_data = {
"name": 'test_model',
"query_str": 'test',
"query_str": '{"observed":"x", "model_type": "smooth"}',
"model": read_file_and_format_for_upload('{}.csv'.format(test_filehash)),
"model_type": "smooth",
"modelRDS": read_file_and_format_for_upload('{}.csv'.format(test_filehash)),

Просмотреть файл

@ -8,9 +8,12 @@ logger = getLogger()
def upload_model(model, api_url, models_path, api_key, continue_on_error=False):
headers = {'X-Auth': api_key}
headers = dict(Authorization=f'Bearer {api_key}')
model_path = os.path.join(models_path, f"{model['filename']}.csv")
rds_path = os.path.join(models_path, os.path.basename(model['rds']))
if len(model['rds']) > 0:
rds_path = os.path.join(models_path, os.path.basename(model['rds']))
else:
rds_path = None
model_data = {
"id": model['filename'],
"name": model['name'],
@ -19,17 +22,24 @@ def upload_model(model, api_url, models_path, api_key, continue_on_error=False):
"created": model['created'],
}
files = {
'model': (os.path.basename(model_path), open(model_path, 'rb'), 'text/plain'),
'rds': (os.path.basename(rds_path), open(rds_path, 'rb'), 'application/octet-stream')
}
try:
files = {
'model': (os.path.basename(model_path), open(model_path, 'rb'), 'text/plain')
}
if rds_path and os.path.exists(rds_path):
files['rds'] = (os.path.basename(rds_path), open(rds_path, 'rb'), 'application/octet-stream')
r = requests.post(api_url, data=model_data, headers=headers, files=files)
if r.status_code != 201 and not continue_on_error:
logger.error(f"Failed to upload: {model['filename']}")
raise Exception(f"upload failed of {model['filename']}: {r.status_code}: {r.content}")
else:
logger.warning(f"Failed to upload: {model['filename']}: {r.status_code}: {r.content}")
r = requests.post(api_url, data=model_data, headers=headers, files=files)
if r.status_code != 201 and not continue_on_error:
logger.error(f"Failed to upload: {model['filename']}")
raise Exception(f"upload failed of {model['filename']}: {r.status_code}: {r.content}")
elif r.status_code != 201:
logger.warning(f"Failed to upload: {model['filename']}: {r.status_code}: {r.content}")
except FileNotFoundError as e:
if not continue_on_error:
raise e
else:
logger.warning(f"Could not find file from modelDB.tsv for {model['name']}")
def get_models(filename):