зеркало из https://github.com/mozilla/sugardough.git
Replace fabricate with a script that auto-regenerates test project.
This commit is contained in:
Родитель
26fd93d821
Коммит
8bc84925a2
59
README.md
59
README.md
|
@ -26,12 +26,14 @@ Create a sugardough project
|
|||
|
||||
1. Get cookiecutter:
|
||||
|
||||
$ pip install cookiecutter
|
||||
|
||||
```sh
|
||||
$ pip install cookiecutter
|
||||
```
|
||||
2. Run cookiecutter with sugardough template
|
||||
|
||||
$ cookiecutter https://github.com/mozilla/sugardough
|
||||
|
||||
```sh
|
||||
$ cookiecutter https://github.com/mozilla/sugardough
|
||||
```
|
||||
3. Done!
|
||||
|
||||
|
||||
|
@ -40,18 +42,47 @@ Cooking sugardough
|
|||
|
||||
To contribute to sugardough development:
|
||||
|
||||
1. Clone this repository
|
||||
2. Make your changes in [sugardough](https://github.com/mozilla/sugardough/tree/master/sugardough) directory
|
||||
3. Update [cookiecutter.json](https://github.com/mozilla/sugardough/blob/master/cookiecutter.json) with new variables if needed.
|
||||
4. Delete existing template directory:
|
||||
`$ rm -rf "{{ cookiecutter.project_name }}"`
|
||||
5. Run `./bin/fabricate-cookiecutter.py`
|
||||
6. Git commit changes. Note both "sugardough" and "{{ cookiecutter.project_name }}" directories must be committed.
|
||||
7. Pull request!
|
||||
1. Clone this repository
|
||||
2. Create a [virtualenv](https://virtualenv.pypa.io/en/latest/).
|
||||
3. Install development requirements using pip:
|
||||
|
||||
Alternativelly you can use the pre-commit git hook to do steps 4 and 5 automagically every time you commit. Just link `./git-hooks/post-commit` to `./.git/hooks/post-commit`:
|
||||
```sh
|
||||
$ pip install -r requirements.txt
|
||||
```
|
||||
4. Update the [template directory], and [cookiecutter.json] as well with new
|
||||
variables if needed.
|
||||
5. Run the regeneration script that auto-creates a test site:
|
||||
|
||||
`$ ln -s ../../git-hooks/post-commit .git/hooks/post-commit`
|
||||
```sh
|
||||
$ ./bin/regenerate.py
|
||||
```
|
||||
6. Launch the test site to see your changes:
|
||||
|
||||
```sh
|
||||
$ cd test_project/sugardough
|
||||
$ fig up
|
||||
```
|
||||
6. Git commit changes.
|
||||
7. Pull request!
|
||||
|
||||
[template directory]: https://github.com/mozilla/sugardough/tree/master/%7B%7B%20cookiecutter.project_name%20%7D%7D
|
||||
[cookiecutter.json]: https://github.com/mozilla/sugardough/blob/master/cookiecutter.json
|
||||
|
||||
The `regenerate.py` command can also watch for changes and auto-regenerate the
|
||||
test project:
|
||||
|
||||
```sh
|
||||
$ ./bin/regenerate.py --watch
|
||||
```
|
||||
|
||||
If you want the test project to use a different value for a variable than the
|
||||
default defined in `cookiecutter.json`, add the value to your
|
||||
`~/.cookiecutterrc` file:
|
||||
|
||||
```
|
||||
default_context:
|
||||
project_name: "Foo Bar"
|
||||
```
|
||||
|
||||
|
||||
Opinions
|
||||
|
|
|
@ -1,50 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import fileinput
|
||||
import shutil
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
BASEDIR = os.path.dirname(os.path.dirname(__file__))
|
||||
DOUGHDIR = os.path.join(BASEDIR, 'sugardough')
|
||||
DOUGHDIR_TEMP = os.path.join(BASEDIR, 'sugardough-temp')
|
||||
|
||||
|
||||
def global_replace(FROM, TO, dry_run=False):
|
||||
for dirpath, dirnames, filenames in os.walk(DOUGHDIR_TEMP, topdown=False):
|
||||
for filename in filenames:
|
||||
full_path = os.path.join(dirpath, filename)
|
||||
new_filename = filename.replace(FROM, TO)
|
||||
new_full_path = os.path.join(dirpath, new_filename)
|
||||
shutil.move(full_path, new_full_path)
|
||||
|
||||
for dirname in dirnames:
|
||||
full_path = os.path.join(dirpath, dirname)
|
||||
new_dirname = dirname.replace(FROM, TO)
|
||||
new_full_path = os.path.join(dirpath, new_dirname)
|
||||
shutil.move(full_path, new_full_path)
|
||||
|
||||
files = []
|
||||
for dirpath, dirnames, filenames in os.walk(DOUGHDIR_TEMP, topdown=False):
|
||||
for filename in filenames:
|
||||
files.append(os.path.join(dirpath, filename))
|
||||
|
||||
fi = fileinput.input(files, inplace=1)
|
||||
for lines in fi:
|
||||
print lines.replace(FROM, TO),
|
||||
|
||||
subprocess.call(
|
||||
'git archive --format=tar --prefix=sugardough-temp/ HEAD:sugardough | tar xf -',
|
||||
shell=True
|
||||
)
|
||||
|
||||
with open(os.path.join(BASEDIR, 'cookiecutter.json')) as fp:
|
||||
cookiecutter = json.load(fp)
|
||||
|
||||
for key, value in sorted(cookiecutter.items(), key=lambda x: len(x[1]), reverse=True):
|
||||
global_replace(value, '{{ cookiecutter.%s }}' % key)
|
||||
|
||||
new_dirname = os.path.basename(DOUGHDIR_TEMP).replace('-temp', '')
|
||||
new_dirname = new_dirname.replace('sugardough', '{{ cookiecutter.project_name }}')
|
||||
new_full_path = os.path.join(BASEDIR, new_dirname)
|
||||
shutil.move(DOUGHDIR_TEMP, new_full_path)
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python
|
||||
"""Script for regenerating a test project from the sugardough template.
|
||||
|
||||
Usage:
|
||||
regenerate.py [--watch]
|
||||
regenerate.py (-h | --help)
|
||||
|
||||
Options:
|
||||
-h --help Show this screen.
|
||||
--watch Watch template directory for changes and regenerate
|
||||
automatically.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import time
|
||||
|
||||
from cookiecutter.main import cookiecutter
|
||||
from docopt import docopt
|
||||
from watchdog.events import PatternMatchingEventHandler
|
||||
from watchdog.observers import Observer
|
||||
|
||||
|
||||
BASEDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
TESTDIR = os.path.join(BASEDIR, 'test_project')
|
||||
DOUGHDIR = os.path.join(TESTDIR, 'sugardough')
|
||||
TEMPLATEDIR = os.path.join(BASEDIR, '{{ cookiecutter.project_name }}')
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def working_directory(path):
|
||||
"""
|
||||
A context manager which changes the working directory to the given
|
||||
path, and then changes it back to its previous value on exit.
|
||||
|
||||
Original by Greg Warner:
|
||||
http://code.activestate.com/recipes/576620-changedirectory-context-manager/#c3
|
||||
"""
|
||||
prev_cwd = os.getcwd()
|
||||
os.chdir(path)
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
os.chdir(prev_cwd)
|
||||
|
||||
|
||||
def regenerate():
|
||||
with working_directory(TESTDIR):
|
||||
print('Regenerating test directory...', end='')
|
||||
cookiecutter(BASEDIR, no_input=True)
|
||||
print('Done')
|
||||
|
||||
|
||||
class RegenerateSugardoughHandler(PatternMatchingEventHandler):
|
||||
"""
|
||||
Regenerate the sugardough test project whenever anything changes.
|
||||
"""
|
||||
def on_any_event(self, event):
|
||||
regenerate()
|
||||
|
||||
|
||||
def main(watch):
|
||||
# Regenerate at least once.
|
||||
regenerate()
|
||||
|
||||
if watch:
|
||||
observer = Observer()
|
||||
|
||||
# Observe both the template directory and cookiecutter.json.
|
||||
observer.schedule(RegenerateSugardoughHandler(), TEMPLATEDIR, recursive=True)
|
||||
cookiecutter_json_handler = RegenerateSugardoughHandler(patterns=[
|
||||
os.path.join(BASEDIR, 'cookiecutter.json')
|
||||
])
|
||||
observer.schedule(cookiecutter_json_handler, BASEDIR)
|
||||
|
||||
print('Watching for changes...')
|
||||
observer.start()
|
||||
try:
|
||||
while True:
|
||||
time.sleep(1)
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
arguments = docopt(__doc__)
|
||||
main(watch=arguments['--watch'])
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
from uuid import uuid4
|
||||
|
||||
print('Generating development .env file')
|
||||
|
||||
variables = {
|
||||
'DEBUG': 'True',
|
||||
|
|
|
@ -0,0 +1,4 @@
|
|||
cookiecutter
|
||||
watchdog
|
||||
docopt
|
||||
tox
|
|
@ -1,5 +0,0 @@
|
|||
[run]
|
||||
source = sugardough
|
||||
|
||||
[report]
|
||||
omit = */migrations/*
|
|
@ -1 +0,0 @@
|
|||
.git
|
|
@ -1,7 +0,0 @@
|
|||
.env
|
||||
*.pyc
|
||||
.DS_Store
|
||||
docs/_build
|
||||
.tox/
|
||||
MANIFEST
|
||||
.coverage
|
|
@ -1,17 +0,0 @@
|
|||
language: python
|
||||
python:
|
||||
- "2.7"
|
||||
addons:
|
||||
postgresql: "9.3"
|
||||
before_script:
|
||||
- createdb sugardough_db
|
||||
install:
|
||||
- pip install tox coveralls
|
||||
env:
|
||||
- TOX_ENV=flake8
|
||||
- TOX_ENV=docs
|
||||
- TOX_ENV=tests
|
||||
script:
|
||||
- tox -e $TOX_ENV
|
||||
after_success:
|
||||
- coveralls
|
|
@ -1,20 +0,0 @@
|
|||
FROM debian:wheezy
|
||||
RUN apt-get update && \
|
||||
apt-get install -y --no-install-recommends build-essential python python-pip python-dev libpq-dev postgresql-client gettext
|
||||
|
||||
# Using PIL or Pillow? You probably want to uncomment next line
|
||||
# RUN apt-get update && apt-get install -y --no-install-recommends libjpeg8-dev
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# First copy requirements.txt and peep so we can take advantage of
|
||||
# docker caching.
|
||||
COPY requirements.txt /app/requirements.txt
|
||||
COPY ./bin/peep.py /app/bin/peep.py
|
||||
RUN ./bin/peep.py install -r requirements.txt
|
||||
|
||||
COPY . /app
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
CMD ["./bin/run-docker.sh"]
|
|
@ -1,373 +0,0 @@
|
|||
Mozilla Public License Version 2.0
|
||||
==================================
|
||||
|
||||
1. Definitions
|
||||
--------------
|
||||
|
||||
1.1. "Contributor"
|
||||
means each individual or legal entity that creates, contributes to
|
||||
the creation of, or owns Covered Software.
|
||||
|
||||
1.2. "Contributor Version"
|
||||
means the combination of the Contributions of others (if any) used
|
||||
by a Contributor and that particular Contributor's Contribution.
|
||||
|
||||
1.3. "Contribution"
|
||||
means Covered Software of a particular Contributor.
|
||||
|
||||
1.4. "Covered Software"
|
||||
means Source Code Form to which the initial Contributor has attached
|
||||
the notice in Exhibit A, the Executable Form of such Source Code
|
||||
Form, and Modifications of such Source Code Form, in each case
|
||||
including portions thereof.
|
||||
|
||||
1.5. "Incompatible With Secondary Licenses"
|
||||
means
|
||||
|
||||
(a) that the initial Contributor has attached the notice described
|
||||
in Exhibit B to the Covered Software; or
|
||||
|
||||
(b) that the Covered Software was made available under the terms of
|
||||
version 1.1 or earlier of the License, but not also under the
|
||||
terms of a Secondary License.
|
||||
|
||||
1.6. "Executable Form"
|
||||
means any form of the work other than Source Code Form.
|
||||
|
||||
1.7. "Larger Work"
|
||||
means a work that combines Covered Software with other material, in
|
||||
a separate file or files, that is not Covered Software.
|
||||
|
||||
1.8. "License"
|
||||
means this document.
|
||||
|
||||
1.9. "Licensable"
|
||||
means having the right to grant, to the maximum extent possible,
|
||||
whether at the time of the initial grant or subsequently, any and
|
||||
all of the rights conveyed by this License.
|
||||
|
||||
1.10. "Modifications"
|
||||
means any of the following:
|
||||
|
||||
(a) any file in Source Code Form that results from an addition to,
|
||||
deletion from, or modification of the contents of Covered
|
||||
Software; or
|
||||
|
||||
(b) any new file in Source Code Form that contains any Covered
|
||||
Software.
|
||||
|
||||
1.11. "Patent Claims" of a Contributor
|
||||
means any patent claim(s), including without limitation, method,
|
||||
process, and apparatus claims, in any patent Licensable by such
|
||||
Contributor that would be infringed, but for the grant of the
|
||||
License, by the making, using, selling, offering for sale, having
|
||||
made, import, or transfer of either its Contributions or its
|
||||
Contributor Version.
|
||||
|
||||
1.12. "Secondary License"
|
||||
means either the GNU General Public License, Version 2.0, the GNU
|
||||
Lesser General Public License, Version 2.1, the GNU Affero General
|
||||
Public License, Version 3.0, or any later versions of those
|
||||
licenses.
|
||||
|
||||
1.13. "Source Code Form"
|
||||
means the form of the work preferred for making modifications.
|
||||
|
||||
1.14. "You" (or "Your")
|
||||
means an individual or a legal entity exercising rights under this
|
||||
License. For legal entities, "You" includes any entity that
|
||||
controls, is controlled by, or is under common control with You. For
|
||||
purposes of this definition, "control" means (a) the power, direct
|
||||
or indirect, to cause the direction or management of such entity,
|
||||
whether by contract or otherwise, or (b) ownership of more than
|
||||
fifty percent (50%) of the outstanding shares or beneficial
|
||||
ownership of such entity.
|
||||
|
||||
2. License Grants and Conditions
|
||||
--------------------------------
|
||||
|
||||
2.1. Grants
|
||||
|
||||
Each Contributor hereby grants You a world-wide, royalty-free,
|
||||
non-exclusive license:
|
||||
|
||||
(a) under intellectual property rights (other than patent or trademark)
|
||||
Licensable by such Contributor to use, reproduce, make available,
|
||||
modify, display, perform, distribute, and otherwise exploit its
|
||||
Contributions, either on an unmodified basis, with Modifications, or
|
||||
as part of a Larger Work; and
|
||||
|
||||
(b) under Patent Claims of such Contributor to make, use, sell, offer
|
||||
for sale, have made, import, and otherwise transfer either its
|
||||
Contributions or its Contributor Version.
|
||||
|
||||
2.2. Effective Date
|
||||
|
||||
The licenses granted in Section 2.1 with respect to any Contribution
|
||||
become effective for each Contribution on the date the Contributor first
|
||||
distributes such Contribution.
|
||||
|
||||
2.3. Limitations on Grant Scope
|
||||
|
||||
The licenses granted in this Section 2 are the only rights granted under
|
||||
this License. No additional rights or licenses will be implied from the
|
||||
distribution or licensing of Covered Software under this License.
|
||||
Notwithstanding Section 2.1(b) above, no patent license is granted by a
|
||||
Contributor:
|
||||
|
||||
(a) for any code that a Contributor has removed from Covered Software;
|
||||
or
|
||||
|
||||
(b) for infringements caused by: (i) Your and any other third party's
|
||||
modifications of Covered Software, or (ii) the combination of its
|
||||
Contributions with other software (except as part of its Contributor
|
||||
Version); or
|
||||
|
||||
(c) under Patent Claims infringed by Covered Software in the absence of
|
||||
its Contributions.
|
||||
|
||||
This License does not grant any rights in the trademarks, service marks,
|
||||
or logos of any Contributor (except as may be necessary to comply with
|
||||
the notice requirements in Section 3.4).
|
||||
|
||||
2.4. Subsequent Licenses
|
||||
|
||||
No Contributor makes additional grants as a result of Your choice to
|
||||
distribute the Covered Software under a subsequent version of this
|
||||
License (see Section 10.2) or under the terms of a Secondary License (if
|
||||
permitted under the terms of Section 3.3).
|
||||
|
||||
2.5. Representation
|
||||
|
||||
Each Contributor represents that the Contributor believes its
|
||||
Contributions are its original creation(s) or it has sufficient rights
|
||||
to grant the rights to its Contributions conveyed by this License.
|
||||
|
||||
2.6. Fair Use
|
||||
|
||||
This License is not intended to limit any rights You have under
|
||||
applicable copyright doctrines of fair use, fair dealing, or other
|
||||
equivalents.
|
||||
|
||||
2.7. Conditions
|
||||
|
||||
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
|
||||
in Section 2.1.
|
||||
|
||||
3. Responsibilities
|
||||
-------------------
|
||||
|
||||
3.1. Distribution of Source Form
|
||||
|
||||
All distribution of Covered Software in Source Code Form, including any
|
||||
Modifications that You create or to which You contribute, must be under
|
||||
the terms of this License. You must inform recipients that the Source
|
||||
Code Form of the Covered Software is governed by the terms of this
|
||||
License, and how they can obtain a copy of this License. You may not
|
||||
attempt to alter or restrict the recipients' rights in the Source Code
|
||||
Form.
|
||||
|
||||
3.2. Distribution of Executable Form
|
||||
|
||||
If You distribute Covered Software in Executable Form then:
|
||||
|
||||
(a) such Covered Software must also be made available in Source Code
|
||||
Form, as described in Section 3.1, and You must inform recipients of
|
||||
the Executable Form how they can obtain a copy of such Source Code
|
||||
Form by reasonable means in a timely manner, at a charge no more
|
||||
than the cost of distribution to the recipient; and
|
||||
|
||||
(b) You may distribute such Executable Form under the terms of this
|
||||
License, or sublicense it under different terms, provided that the
|
||||
license for the Executable Form does not attempt to limit or alter
|
||||
the recipients' rights in the Source Code Form under this License.
|
||||
|
||||
3.3. Distribution of a Larger Work
|
||||
|
||||
You may create and distribute a Larger Work under terms of Your choice,
|
||||
provided that You also comply with the requirements of this License for
|
||||
the Covered Software. If the Larger Work is a combination of Covered
|
||||
Software with a work governed by one or more Secondary Licenses, and the
|
||||
Covered Software is not Incompatible With Secondary Licenses, this
|
||||
License permits You to additionally distribute such Covered Software
|
||||
under the terms of such Secondary License(s), so that the recipient of
|
||||
the Larger Work may, at their option, further distribute the Covered
|
||||
Software under the terms of either this License or such Secondary
|
||||
License(s).
|
||||
|
||||
3.4. Notices
|
||||
|
||||
You may not remove or alter the substance of any license notices
|
||||
(including copyright notices, patent notices, disclaimers of warranty,
|
||||
or limitations of liability) contained within the Source Code Form of
|
||||
the Covered Software, except that You may alter any license notices to
|
||||
the extent required to remedy known factual inaccuracies.
|
||||
|
||||
3.5. Application of Additional Terms
|
||||
|
||||
You may choose to offer, and to charge a fee for, warranty, support,
|
||||
indemnity or liability obligations to one or more recipients of Covered
|
||||
Software. However, You may do so only on Your own behalf, and not on
|
||||
behalf of any Contributor. You must make it absolutely clear that any
|
||||
such warranty, support, indemnity, or liability obligation is offered by
|
||||
You alone, and You hereby agree to indemnify every Contributor for any
|
||||
liability incurred by such Contributor as a result of warranty, support,
|
||||
indemnity or liability terms You offer. You may include additional
|
||||
disclaimers of warranty and limitations of liability specific to any
|
||||
jurisdiction.
|
||||
|
||||
4. Inability to Comply Due to Statute or Regulation
|
||||
---------------------------------------------------
|
||||
|
||||
If it is impossible for You to comply with any of the terms of this
|
||||
License with respect to some or all of the Covered Software due to
|
||||
statute, judicial order, or regulation then You must: (a) comply with
|
||||
the terms of this License to the maximum extent possible; and (b)
|
||||
describe the limitations and the code they affect. Such description must
|
||||
be placed in a text file included with all distributions of the Covered
|
||||
Software under this License. Except to the extent prohibited by statute
|
||||
or regulation, such description must be sufficiently detailed for a
|
||||
recipient of ordinary skill to be able to understand it.
|
||||
|
||||
5. Termination
|
||||
--------------
|
||||
|
||||
5.1. The rights granted under this License will terminate automatically
|
||||
if You fail to comply with any of its terms. However, if You become
|
||||
compliant, then the rights granted under this License from a particular
|
||||
Contributor are reinstated (a) provisionally, unless and until such
|
||||
Contributor explicitly and finally terminates Your grants, and (b) on an
|
||||
ongoing basis, if such Contributor fails to notify You of the
|
||||
non-compliance by some reasonable means prior to 60 days after You have
|
||||
come back into compliance. Moreover, Your grants from a particular
|
||||
Contributor are reinstated on an ongoing basis if such Contributor
|
||||
notifies You of the non-compliance by some reasonable means, this is the
|
||||
first time You have received notice of non-compliance with this License
|
||||
from such Contributor, and You become compliant prior to 30 days after
|
||||
Your receipt of the notice.
|
||||
|
||||
5.2. If You initiate litigation against any entity by asserting a patent
|
||||
infringement claim (excluding declaratory judgment actions,
|
||||
counter-claims, and cross-claims) alleging that a Contributor Version
|
||||
directly or indirectly infringes any patent, then the rights granted to
|
||||
You by any and all Contributors for the Covered Software under Section
|
||||
2.1 of this License shall terminate.
|
||||
|
||||
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
|
||||
end user license agreements (excluding distributors and resellers) which
|
||||
have been validly granted by You or Your distributors under this License
|
||||
prior to termination shall survive termination.
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 6. Disclaimer of Warranty *
|
||||
* ------------------------- *
|
||||
* *
|
||||
* Covered Software is provided under this License on an "as is" *
|
||||
* basis, without warranty of any kind, either expressed, implied, or *
|
||||
* statutory, including, without limitation, warranties that the *
|
||||
* Covered Software is free of defects, merchantable, fit for a *
|
||||
* particular purpose or non-infringing. The entire risk as to the *
|
||||
* quality and performance of the Covered Software is with You. *
|
||||
* Should any Covered Software prove defective in any respect, You *
|
||||
* (not any Contributor) assume the cost of any necessary servicing, *
|
||||
* repair, or correction. This disclaimer of warranty constitutes an *
|
||||
* essential part of this License. No use of any Covered Software is *
|
||||
* authorized under this License except under this disclaimer. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
************************************************************************
|
||||
* *
|
||||
* 7. Limitation of Liability *
|
||||
* -------------------------- *
|
||||
* *
|
||||
* Under no circumstances and under no legal theory, whether tort *
|
||||
* (including negligence), contract, or otherwise, shall any *
|
||||
* Contributor, or anyone who distributes Covered Software as *
|
||||
* permitted above, be liable to You for any direct, indirect, *
|
||||
* special, incidental, or consequential damages of any character *
|
||||
* including, without limitation, damages for lost profits, loss of *
|
||||
* goodwill, work stoppage, computer failure or malfunction, or any *
|
||||
* and all other commercial damages or losses, even if such party *
|
||||
* shall have been informed of the possibility of such damages. This *
|
||||
* limitation of liability shall not apply to liability for death or *
|
||||
* personal injury resulting from such party's negligence to the *
|
||||
* extent applicable law prohibits such limitation. Some *
|
||||
* jurisdictions do not allow the exclusion or limitation of *
|
||||
* incidental or consequential damages, so this exclusion and *
|
||||
* limitation may not apply to You. *
|
||||
* *
|
||||
************************************************************************
|
||||
|
||||
8. Litigation
|
||||
-------------
|
||||
|
||||
Any litigation relating to this License may be brought only in the
|
||||
courts of a jurisdiction where the defendant maintains its principal
|
||||
place of business and such litigation shall be governed by laws of that
|
||||
jurisdiction, without reference to its conflict-of-law provisions.
|
||||
Nothing in this Section shall prevent a party's ability to bring
|
||||
cross-claims or counter-claims.
|
||||
|
||||
9. Miscellaneous
|
||||
----------------
|
||||
|
||||
This License represents the complete agreement concerning the subject
|
||||
matter hereof. If any provision of this License is held to be
|
||||
unenforceable, such provision shall be reformed only to the extent
|
||||
necessary to make it enforceable. Any law or regulation which provides
|
||||
that the language of a contract shall be construed against the drafter
|
||||
shall not be used to construe this License against a Contributor.
|
||||
|
||||
10. Versions of the License
|
||||
---------------------------
|
||||
|
||||
10.1. New Versions
|
||||
|
||||
Mozilla Foundation is the license steward. Except as provided in Section
|
||||
10.3, no one other than the license steward has the right to modify or
|
||||
publish new versions of this License. Each version will be given a
|
||||
distinguishing version number.
|
||||
|
||||
10.2. Effect of New Versions
|
||||
|
||||
You may distribute the Covered Software under the terms of the version
|
||||
of the License under which You originally received the Covered Software,
|
||||
or under the terms of any subsequent version published by the license
|
||||
steward.
|
||||
|
||||
10.3. Modified Versions
|
||||
|
||||
If you create software not governed by this License, and you want to
|
||||
create a new license for such software, you may create and use a
|
||||
modified version of this License if you rename the license and remove
|
||||
any references to the name of the license steward (except to note that
|
||||
such modified license differs from this License).
|
||||
|
||||
10.4. Distributing Source Code Form that is Incompatible With Secondary
|
||||
Licenses
|
||||
|
||||
If You choose to distribute Source Code Form that is Incompatible With
|
||||
Secondary Licenses under the terms of this version of the License, the
|
||||
notice described in Exhibit B of this License must be attached.
|
||||
|
||||
Exhibit A - Source Code Form License Notice
|
||||
-------------------------------------------
|
||||
|
||||
This Source Code Form is subject to the terms of the Mozilla Public
|
||||
License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
If it is not possible or desirable to put the notice in a particular
|
||||
file, then You may include the notice in a location (such as a LICENSE
|
||||
file in a relevant directory) where a recipient would be likely to look
|
||||
for such a notice.
|
||||
|
||||
You may add additional accurate notices of copyright ownership.
|
||||
|
||||
Exhibit B - "Incompatible With Secondary Licenses" Notice
|
||||
---------------------------------------------------------
|
||||
|
||||
This Source Code Form is "Incompatible With Secondary Licenses", as
|
||||
defined by the Mozilla Public License, v. 2.0.
|
|
@ -1,59 +0,0 @@
|
|||
sugardough
|
||||
==========
|
||||
|
||||
[![Build Status](https://img.shields.io/travis/mozilla/sugardough/master.svg)](https://travis-ci.org/mozilla/sugardough)
|
||||
|
||||
[![Coverage status](https://img.shields.io/coveralls/mozilla/sugardough/master.svg)](https://coveralls.io/r/mozilla/sugardough)
|
||||
|
||||
Run the tests
|
||||
-------------
|
||||
|
||||
There's a sample test in `sugardough/base/tests.py` for your convenience, that
|
||||
you can run using the following command:
|
||||
|
||||
python manage.py test
|
||||
|
||||
If you want to run the full suite, with flake8 and coverage, you may use
|
||||
[tox](https://testrun.org/tox/latest/). This will run the tests the same way
|
||||
they are run by [travis](https://travis-ci.org)):
|
||||
|
||||
pip install tox
|
||||
tox
|
||||
|
||||
The `.travis.yml` file will also run [coveralls](https://coveralls.io) by
|
||||
default.
|
||||
|
||||
If you want to benefit from Travis and Coveralls, you will need to activate
|
||||
them both for your project.
|
||||
|
||||
Oh, and you might want to change the "Build Status" and "Coverage Status" links
|
||||
at the top of this file to point to your own travis and coveralls accounts.
|
||||
|
||||
|
||||
Docker for development
|
||||
----------------------
|
||||
|
||||
0. Make sure you have [docker](https://docker.io) and [fig](https://pypi.python.org/pypi/fig)
|
||||
1. fig up
|
||||
|
||||
|
||||
Docker for deploying to production
|
||||
-----------------------------------
|
||||
|
||||
1. Add your project in [Docker Registry](https://registry.hub.docker.com/) as [Automated Build](http://docs.docker.com/docker-hub/builds/)
|
||||
2. Prepare a 'env' file with all the variables needed by dev, stage or production.
|
||||
3. Run the image:
|
||||
|
||||
docker run --env-file env -p 80:80 mozilla/sugardough
|
||||
|
||||
|
||||
NewRelic Monitoring
|
||||
-------------------
|
||||
|
||||
A newrelic.ini file is already included. To enable NewRelic monitoring
|
||||
add two enviroment variables:
|
||||
|
||||
- NEW_RELIC_LICENSE_KEY
|
||||
- NEW_RELIC_APP_NAME
|
||||
|
||||
See the [full list of supported environment variables](https://docs.newrelic.com/docs/agents/python-agent/installation-configuration/python-agent-configuration#environment-variables).
|
|
@ -1,842 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
"""peep ("prudently examine every package") verifies that packages conform to a
|
||||
trusted, locally stored hash and only then installs them::
|
||||
|
||||
peep install -r requirements.txt
|
||||
|
||||
This makes your deployments verifiably repeatable without having to maintain a
|
||||
local PyPI mirror or use a vendor lib. Just update the version numbers and
|
||||
hashes in requirements.txt, and you're all set.
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
try:
|
||||
xrange = xrange
|
||||
except NameError:
|
||||
xrange = range
|
||||
from base64 import urlsafe_b64encode
|
||||
import cgi
|
||||
from collections import defaultdict
|
||||
from functools import wraps
|
||||
from hashlib import sha256
|
||||
from itertools import chain
|
||||
from linecache import getline
|
||||
import mimetypes
|
||||
from optparse import OptionParser
|
||||
from os import listdir
|
||||
from os.path import join, basename, splitext, isdir
|
||||
from pickle import dumps, loads
|
||||
import re
|
||||
import sys
|
||||
from shutil import rmtree, copy
|
||||
from sys import argv, exit
|
||||
from tempfile import mkdtemp
|
||||
import traceback
|
||||
try:
|
||||
from urllib2 import build_opener, HTTPHandler, HTTPSHandler, HTTPError
|
||||
except ImportError:
|
||||
from urllib.request import build_opener, HTTPHandler, HTTPSHandler
|
||||
from urllib.error import HTTPError
|
||||
try:
|
||||
from urlparse import urlparse
|
||||
except ImportError:
|
||||
from urllib.parse import urlparse # 3.4
|
||||
# TODO: Probably use six to make urllib stuff work across 2/3.
|
||||
|
||||
from pkg_resources import require, VersionConflict, DistributionNotFound
|
||||
|
||||
# We don't admit our dependency on pip in setup.py, lest a naive user simply
|
||||
# say `pip install peep.tar.gz` and thus pull down an untrusted copy of pip
|
||||
# from PyPI. Instead, we make sure it's installed and new enough here and spit
|
||||
# out an error message if not:
|
||||
def activate(specifier):
|
||||
"""Make a compatible version of pip importable. Raise a RuntimeError if we
|
||||
couldn't."""
|
||||
try:
|
||||
for distro in require(specifier):
|
||||
distro.activate()
|
||||
except (VersionConflict, DistributionNotFound):
|
||||
raise RuntimeError('The installed version of pip is too old; peep '
|
||||
'requires ' + specifier)
|
||||
|
||||
activate('pip>=0.6.2') # Before 0.6.2, the log module wasn't there, so some
|
||||
# of our monkeypatching fails. It probably wouldn't be
|
||||
# much work to support even earlier, though.
|
||||
|
||||
import pip
|
||||
from pip.commands.install import InstallCommand
|
||||
try:
|
||||
from pip.download import url_to_path # 1.5.6
|
||||
except ImportError:
|
||||
try:
|
||||
from pip.util import url_to_path # 0.7.0
|
||||
except ImportError:
|
||||
from pip.util import url_to_filename as url_to_path # 0.6.2
|
||||
from pip.index import PackageFinder, Link
|
||||
try:
|
||||
from pip.log import logger
|
||||
except ImportError:
|
||||
from pip import logger # 6.0
|
||||
from pip.req import parse_requirements
|
||||
|
||||
|
||||
__version__ = 2, 1, 0
|
||||
|
||||
|
||||
ITS_FINE_ITS_FINE = 0
|
||||
SOMETHING_WENT_WRONG = 1
|
||||
# "Traditional" for command-line errors according to optparse docs:
|
||||
COMMAND_LINE_ERROR = 2
|
||||
|
||||
ARCHIVE_EXTENSIONS = ('.tar.bz2', '.tar.gz', '.tgz', '.tar', '.zip')
|
||||
|
||||
MARKER = object()
|
||||
|
||||
|
||||
class PipException(Exception):
|
||||
"""When I delegated to pip, it exited with an error."""
|
||||
|
||||
def __init__(self, error_code):
|
||||
self.error_code = error_code
|
||||
|
||||
|
||||
class UnsupportedRequirementError(Exception):
|
||||
"""An unsupported line was encountered in a requirements file."""
|
||||
|
||||
|
||||
class DownloadError(Exception):
|
||||
def __init__(self, link, exc):
|
||||
self.link = link
|
||||
self.reason = str(exc)
|
||||
|
||||
def __str__(self):
|
||||
return 'Downloading %s failed: %s' % (self.link, self.reason)
|
||||
|
||||
|
||||
def encoded_hash(sha):
|
||||
"""Return a short, 7-bit-safe representation of a hash.
|
||||
|
||||
If you pass a sha256, this results in the hash algorithm that the Wheel
|
||||
format (PEP 427) uses, except here it's intended to be run across the
|
||||
downloaded archive before unpacking.
|
||||
|
||||
"""
|
||||
return urlsafe_b64encode(sha.digest()).decode('ascii').rstrip('=')
|
||||
|
||||
|
||||
def run_pip(initial_args):
|
||||
"""Delegate to pip the given args (starting with the subcommand), and raise
|
||||
``PipException`` if something goes wrong."""
|
||||
status_code = pip.main(initial_args)
|
||||
|
||||
# Clear out the registrations in the pip "logger" singleton. Otherwise,
|
||||
# loggers keep getting appended to it with every run. Pip assumes only one
|
||||
# command invocation will happen per interpreter lifetime.
|
||||
logger.consumers = []
|
||||
|
||||
if status_code:
|
||||
raise PipException(status_code)
|
||||
|
||||
|
||||
def hash_of_file(path):
|
||||
"""Return the hash of a downloaded file."""
|
||||
with open(path, 'rb') as archive:
|
||||
sha = sha256()
|
||||
while True:
|
||||
data = archive.read(2 ** 20)
|
||||
if not data:
|
||||
break
|
||||
sha.update(data)
|
||||
return encoded_hash(sha)
|
||||
|
||||
|
||||
def is_git_sha(text):
|
||||
"""Return whether this is probably a git sha"""
|
||||
# Handle both the full sha as well as the 7-character abbreviation
|
||||
if len(text) in (40, 7):
|
||||
try:
|
||||
int(text, 16)
|
||||
return True
|
||||
except ValueError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def filename_from_url(url):
|
||||
parsed = urlparse(url)
|
||||
path = parsed.path
|
||||
return path.split('/')[-1]
|
||||
|
||||
|
||||
def requirement_args(argv, want_paths=False, want_other=False):
|
||||
"""Return an iterable of filtered arguments.
|
||||
|
||||
:arg argv: Arguments, starting after the subcommand
|
||||
:arg want_paths: If True, the returned iterable includes the paths to any
|
||||
requirements files following a ``-r`` or ``--requirement`` option.
|
||||
:arg want_other: If True, the returned iterable includes the args that are
|
||||
not a requirement-file path or a ``-r`` or ``--requirement`` flag.
|
||||
|
||||
"""
|
||||
was_r = False
|
||||
for arg in argv:
|
||||
# Allow for requirements files named "-r", don't freak out if there's a
|
||||
# trailing "-r", etc.
|
||||
if was_r:
|
||||
if want_paths:
|
||||
yield arg
|
||||
was_r = False
|
||||
elif arg in ['-r', '--requirement']:
|
||||
was_r = True
|
||||
else:
|
||||
if want_other:
|
||||
yield arg
|
||||
|
||||
|
||||
HASH_COMMENT_RE = re.compile(
|
||||
r"""
|
||||
\s*\#\s+ # Lines that start with a '#'
|
||||
(?P<hash_type>sha256):\s+ # Hash type is hardcoded to be sha256 for now.
|
||||
(?P<hash>[^\s]+) # Hashes can be anything except '#' or spaces.
|
||||
\s* # Suck up whitespace before the comment or
|
||||
# just trailing whitespace if there is no
|
||||
# comment. Also strip trailing newlines.
|
||||
(?:\#(?P<comment>.*))? # Comments can be anything after a whitespace+#
|
||||
$""", re.X) # and are optional.
|
||||
|
||||
|
||||
def peep_hash(argv):
|
||||
"""Return the peep hash of one or more files, returning a shell status code
|
||||
or raising a PipException.
|
||||
|
||||
:arg argv: The commandline args, starting after the subcommand
|
||||
|
||||
"""
|
||||
parser = OptionParser(
|
||||
usage='usage: %prog hash file [file ...]',
|
||||
description='Print a peep hash line for one or more files: for '
|
||||
'example, "# sha256: '
|
||||
'oz42dZy6Gowxw8AelDtO4gRgTW_xPdooH484k7I5EOY".')
|
||||
_, paths = parser.parse_args(args=argv)
|
||||
if paths:
|
||||
for path in paths:
|
||||
print('# sha256:', hash_of_file(path))
|
||||
return ITS_FINE_ITS_FINE
|
||||
else:
|
||||
parser.print_usage()
|
||||
return COMMAND_LINE_ERROR
|
||||
|
||||
|
||||
class EmptyOptions(object):
|
||||
"""Fake optparse options for compatibility with pip<1.2
|
||||
|
||||
pip<1.2 had a bug in parse_requirments() in which the ``options`` kwarg
|
||||
was required. We work around that by passing it a mock object.
|
||||
|
||||
"""
|
||||
default_vcs = None
|
||||
skip_requirements_regex = None
|
||||
isolated_mode = False
|
||||
|
||||
|
||||
def memoize(func):
|
||||
"""Memoize a method that should return the same result every time on a
|
||||
given instance.
|
||||
|
||||
"""
|
||||
@wraps(func)
|
||||
def memoizer(self):
|
||||
if not hasattr(self, '_cache'):
|
||||
self._cache = {}
|
||||
if func.__name__ not in self._cache:
|
||||
self._cache[func.__name__] = func(self)
|
||||
return self._cache[func.__name__]
|
||||
return memoizer
|
||||
|
||||
|
||||
def package_finder(argv):
|
||||
"""Return a PackageFinder respecting command-line options.
|
||||
|
||||
:arg argv: Everything after the subcommand
|
||||
|
||||
"""
|
||||
# We instantiate an InstallCommand and then use some of its private
|
||||
# machinery--its arg parser--for our own purposes, like a virus. This
|
||||
# approach is portable across many pip versions, where more fine-grained
|
||||
# ones are not. Ignoring options that don't exist on the parser (for
|
||||
# instance, --use-wheel) gives us a straightforward method of backward
|
||||
# compatibility.
|
||||
try:
|
||||
command = InstallCommand()
|
||||
except TypeError:
|
||||
# This is likely pip 1.3.0's "__init__() takes exactly 2 arguments (1
|
||||
# given)" error. In that version, InstallCommand takes a top=level
|
||||
# parser passed in from outside.
|
||||
from pip.baseparser import create_main_parser
|
||||
command = InstallCommand(create_main_parser())
|
||||
# The downside is that it essentially ruins the InstallCommand class for
|
||||
# further use. Calling out to pip.main() within the same interpreter, for
|
||||
# example, would result in arguments parsed this time turning up there.
|
||||
# Thus, we deepcopy the arg parser so we don't trash its singletons. Of
|
||||
# course, deepcopy doesn't work on these objects, because they contain
|
||||
# uncopyable regex patterns, so we pickle and unpickle instead. Fun!
|
||||
options, _ = loads(dumps(command.parser)).parse_args(argv)
|
||||
|
||||
# Carry over PackageFinder kwargs that have [about] the same names as
|
||||
# options attr names:
|
||||
possible_options = [
|
||||
'find_links', 'use_wheel', 'allow_external', 'allow_unverified',
|
||||
'allow_all_external', ('allow_all_prereleases', 'pre'),
|
||||
'process_dependency_links']
|
||||
kwargs = {}
|
||||
for option in possible_options:
|
||||
kw, attr = option if isinstance(option, tuple) else (option, option)
|
||||
value = getattr(options, attr, MARKER)
|
||||
if value is not MARKER:
|
||||
kwargs[kw] = value
|
||||
|
||||
# Figure out index_urls:
|
||||
index_urls = [options.index_url] + options.extra_index_urls
|
||||
if options.no_index:
|
||||
index_urls = []
|
||||
index_urls += getattr(options, 'mirrors', [])
|
||||
|
||||
# If pip is new enough to have a PipSession, initialize one, since
|
||||
# PackageFinder requires it:
|
||||
if hasattr(command, '_build_session'):
|
||||
kwargs['session'] = command._build_session(options)
|
||||
|
||||
return PackageFinder(index_urls=index_urls, **kwargs)
|
||||
|
||||
|
||||
class DownloadedReq(object):
|
||||
"""A wrapper around InstallRequirement which offers additional information
|
||||
based on downloading and examining a corresponding package archive
|
||||
|
||||
These are conceptually immutable, so we can get away with memoizing
|
||||
expensive things.
|
||||
|
||||
"""
|
||||
def __init__(self, req, argv):
|
||||
"""Download a requirement, compare its hashes, and return a subclass
|
||||
of DownloadedReq depending on its state.
|
||||
|
||||
:arg req: The InstallRequirement I am based on
|
||||
:arg argv: The args, starting after the subcommand
|
||||
|
||||
"""
|
||||
self._req = req
|
||||
self._argv = argv
|
||||
|
||||
# We use a separate temp dir for each requirement so requirements
|
||||
# (from different indices) that happen to have the same archive names
|
||||
# don't overwrite each other, leading to a security hole in which the
|
||||
# latter is a hash mismatch, the former has already passed the
|
||||
# comparison, and the latter gets installed.
|
||||
self._temp_path = mkdtemp(prefix='peep-')
|
||||
# Think of DownloadedReq as a one-shot state machine. It's an abstract
|
||||
# class that ratchets forward to being one of its own subclasses,
|
||||
# depending on its package status. Then it doesn't move again.
|
||||
self.__class__ = self._class()
|
||||
|
||||
def dispose(self):
|
||||
"""Delete temp files and dirs I've made. Render myself useless.
|
||||
|
||||
Do not call further methods on me after calling dispose().
|
||||
|
||||
"""
|
||||
rmtree(self._temp_path)
|
||||
|
||||
def _version(self):
|
||||
"""Deduce the version number of the downloaded package from its filename."""
|
||||
# TODO: Can we delete this method and just print the line from the
|
||||
# reqs file verbatim instead?
|
||||
def version_of_archive(filename, package_name):
|
||||
# Since we know the project_name, we can strip that off the left, strip
|
||||
# any archive extensions off the right, and take the rest as the
|
||||
# version.
|
||||
for ext in ARCHIVE_EXTENSIONS:
|
||||
if filename.endswith(ext):
|
||||
filename = filename[:-len(ext)]
|
||||
break
|
||||
# Handle github sha tarball downloads.
|
||||
if is_git_sha(filename):
|
||||
filename = package_name + '-' + filename
|
||||
if not filename.lower().replace('_', '-').startswith(package_name.lower()):
|
||||
# TODO: Should we replace runs of [^a-zA-Z0-9.], not just _, with -?
|
||||
give_up(filename, package_name)
|
||||
return filename[len(package_name) + 1:] # Strip off '-' before version.
|
||||
|
||||
def version_of_wheel(filename, package_name):
|
||||
# For Wheel files (http://legacy.python.org/dev/peps/pep-0427/#file-
|
||||
# name-convention) we know the format bits are '-' separated.
|
||||
whl_package_name, version, _rest = filename.split('-', 2)
|
||||
# Do the alteration to package_name from PEP 427:
|
||||
our_package_name = re.sub(r'[^\w\d.]+', '_', package_name, re.UNICODE)
|
||||
if whl_package_name != our_package_name:
|
||||
give_up(filename, whl_package_name)
|
||||
return version
|
||||
|
||||
def give_up(filename, package_name):
|
||||
raise RuntimeError("The archive '%s' didn't start with the package name '%s', so I couldn't figure out the version number. My bad; improve me." %
|
||||
(filename, package_name))
|
||||
|
||||
get_version = (version_of_wheel
|
||||
if self._downloaded_filename().endswith('.whl')
|
||||
else version_of_archive)
|
||||
return get_version(self._downloaded_filename(), self._project_name())
|
||||
|
||||
def _is_always_unsatisfied(self):
|
||||
"""Returns whether this requirement is always unsatisfied
|
||||
|
||||
This would happen in cases where we can't determine the version
|
||||
from the filename.
|
||||
|
||||
"""
|
||||
# If this is a github sha tarball, then it is always unsatisfied
|
||||
# because the url has a commit sha in it and not the version
|
||||
# number.
|
||||
url = self._req.url
|
||||
if url:
|
||||
filename = filename_from_url(url)
|
||||
if filename.endswith(ARCHIVE_EXTENSIONS):
|
||||
filename, ext = splitext(filename)
|
||||
if is_git_sha(filename):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _path_and_line(self):
|
||||
"""Return the path and line number of the file from which our
|
||||
InstallRequirement came.
|
||||
|
||||
"""
|
||||
path, line = (re.match(r'-r (.*) \(line (\d+)\)$',
|
||||
self._req.comes_from).groups())
|
||||
return path, int(line)
|
||||
|
||||
@memoize # Avoid hitting the file[cache] over and over.
|
||||
def _expected_hashes(self):
|
||||
"""Return a list of known-good hashes for this package."""
|
||||
|
||||
def hashes_above(path, line_number):
|
||||
"""Yield hashes from contiguous comment lines before line
|
||||
``line_number``.
|
||||
|
||||
"""
|
||||
for line_number in xrange(line_number - 1, 0, -1):
|
||||
line = getline(path, line_number)
|
||||
match = HASH_COMMENT_RE.match(line)
|
||||
if match:
|
||||
yield match.groupdict()['hash']
|
||||
elif not line.lstrip().startswith('#'):
|
||||
# If we hit a non-comment line, abort
|
||||
break
|
||||
|
||||
hashes = list(hashes_above(*self._path_and_line()))
|
||||
hashes.reverse() # because we read them backwards
|
||||
return hashes
|
||||
|
||||
def _download(self, link):
|
||||
"""Download a file, and return its name within my temp dir.
|
||||
|
||||
This does no verification of HTTPS certs, but our checking hashes
|
||||
makes that largely unimportant. It would be nice to be able to use the
|
||||
requests lib, which can verify certs, but it is guaranteed to be
|
||||
available only in pip >= 1.5.
|
||||
|
||||
This also drops support for proxies and basic auth, though those could
|
||||
be added back in.
|
||||
|
||||
"""
|
||||
# Based on pip 1.4.1's URLOpener but with cert verification removed
|
||||
def opener(is_https):
|
||||
if is_https:
|
||||
opener = build_opener(HTTPSHandler())
|
||||
# Strip out HTTPHandler to prevent MITM spoof:
|
||||
for handler in opener.handlers:
|
||||
if isinstance(handler, HTTPHandler):
|
||||
opener.handlers.remove(handler)
|
||||
else:
|
||||
opener = build_opener()
|
||||
return opener
|
||||
|
||||
# Descended from unpack_http_url() in pip 1.4.1
|
||||
def best_filename(link, response):
|
||||
"""Return the most informative possible filename for a download,
|
||||
ideally with a proper extension.
|
||||
|
||||
"""
|
||||
content_type = response.info().get('content-type', '')
|
||||
filename = link.filename # fallback
|
||||
# Have a look at the Content-Disposition header for a better guess:
|
||||
content_disposition = response.info().get('content-disposition')
|
||||
if content_disposition:
|
||||
type, params = cgi.parse_header(content_disposition)
|
||||
# We use ``or`` here because we don't want to use an "empty" value
|
||||
# from the filename param:
|
||||
filename = params.get('filename') or filename
|
||||
ext = splitext(filename)[1]
|
||||
if not ext:
|
||||
ext = mimetypes.guess_extension(content_type)
|
||||
if ext:
|
||||
filename += ext
|
||||
if not ext and link.url != response.geturl():
|
||||
ext = splitext(response.geturl())[1]
|
||||
if ext:
|
||||
filename += ext
|
||||
return filename
|
||||
|
||||
# Descended from _download_url() in pip 1.4.1
|
||||
def pipe_to_file(response, path):
|
||||
"""Pull the data off an HTTP response, and shove it in a new file."""
|
||||
# TODO: Indicate progress.
|
||||
with open(path, 'wb') as file:
|
||||
while True:
|
||||
chunk = response.read(4096)
|
||||
if not chunk:
|
||||
break
|
||||
file.write(chunk)
|
||||
|
||||
url = link.url.split('#', 1)[0]
|
||||
try:
|
||||
response = opener(urlparse(url).scheme != 'http').open(url)
|
||||
except (HTTPError, IOError) as exc:
|
||||
raise DownloadError(link, exc)
|
||||
filename = best_filename(link, response)
|
||||
pipe_to_file(response, join(self._temp_path, filename))
|
||||
return filename
|
||||
|
||||
|
||||
# Based on req_set.prepare_files() in pip bb2a8428d4aebc8d313d05d590f386fa3f0bbd0f
|
||||
@memoize # Avoid re-downloading.
|
||||
def _downloaded_filename(self):
|
||||
"""Download the package's archive if necessary, and return its
|
||||
filename.
|
||||
|
||||
--no-deps is implied, as we have reimplemented the bits that would
|
||||
ordinarily do dependency resolution.
|
||||
|
||||
"""
|
||||
# Peep doesn't support requirements that don't come down as a single
|
||||
# file, because it can't hash them. Thus, it doesn't support editable
|
||||
# requirements, because pip itself doesn't support editable
|
||||
# requirements except for "local projects or a VCS url". Nor does it
|
||||
# support VCS requirements yet, because we haven't yet come up with a
|
||||
# portable, deterministic way to hash them. In summary, all we support
|
||||
# is == requirements and tarballs/zips/etc.
|
||||
|
||||
# TODO: Stop on reqs that are editable or aren't ==.
|
||||
|
||||
finder = package_finder(self._argv)
|
||||
|
||||
# If the requirement isn't already specified as a URL, get a URL
|
||||
# from an index:
|
||||
link = (finder.find_requirement(self._req, upgrade=False)
|
||||
if self._req.url is None
|
||||
else Link(self._req.url))
|
||||
|
||||
if link:
|
||||
lower_scheme = link.scheme.lower() # pip lower()s it for some reason.
|
||||
if lower_scheme == 'http' or lower_scheme == 'https':
|
||||
file_path = self._download(link)
|
||||
return basename(file_path)
|
||||
elif lower_scheme == 'file':
|
||||
# The following is inspired by pip's unpack_file_url():
|
||||
link_path = url_to_path(link.url_without_fragment)
|
||||
if isdir(link_path):
|
||||
raise UnsupportedRequirementError(
|
||||
"%s: %s is a directory. So that it can compute "
|
||||
"a hash, peep supports only filesystem paths which "
|
||||
"point to files" %
|
||||
(self._req, link.url_without_fragment))
|
||||
else:
|
||||
copy(link_path, self._temp_path)
|
||||
return basename(link_path)
|
||||
else:
|
||||
raise UnsupportedRequirementError(
|
||||
"%s: The download link, %s, would not result in a file "
|
||||
"that can be hashed. Peep supports only == requirements, "
|
||||
"file:// URLs pointing to files (not folders), and "
|
||||
"http:// and https:// URLs pointing to tarballs, zips, "
|
||||
"etc." % (self._req, link.url))
|
||||
else:
|
||||
raise UnsupportedRequirementError(
|
||||
"%s: couldn't determine where to download this requirement from."
|
||||
% (self._req,))
|
||||
|
||||
def install(self):
|
||||
"""Install the package I represent, without dependencies.
|
||||
|
||||
Obey typical pip-install options passed in on the command line.
|
||||
|
||||
"""
|
||||
other_args = list(requirement_args(self._argv, want_other=True))
|
||||
archive_path = join(self._temp_path, self._downloaded_filename())
|
||||
run_pip(['install'] + other_args + ['--no-deps', archive_path])
|
||||
|
||||
@memoize
|
||||
def _actual_hash(self):
|
||||
"""Download the package's archive if necessary, and return its hash."""
|
||||
return hash_of_file(join(self._temp_path, self._downloaded_filename()))
|
||||
|
||||
def _project_name(self):
|
||||
"""Return the inner Requirement's "unsafe name".
|
||||
|
||||
Raise ValueError if there is no name.
|
||||
|
||||
"""
|
||||
name = getattr(self._req.req, 'project_name', '')
|
||||
if name:
|
||||
return name
|
||||
raise ValueError('Requirement has no project_name.')
|
||||
|
||||
def _name(self):
|
||||
return self._req.name
|
||||
|
||||
def _url(self):
|
||||
return self._req.url
|
||||
|
||||
@memoize # Avoid re-running expensive check_if_exists().
|
||||
def _is_satisfied(self):
|
||||
self._req.check_if_exists()
|
||||
return (self._req.satisfied_by and
|
||||
not self._is_always_unsatisfied())
|
||||
|
||||
def _class(self):
|
||||
"""Return the class I should be, spanning a continuum of goodness."""
|
||||
try:
|
||||
self._project_name()
|
||||
except ValueError:
|
||||
return MalformedReq
|
||||
if self._is_satisfied():
|
||||
return SatisfiedReq
|
||||
if not self._expected_hashes():
|
||||
return MissingReq
|
||||
if self._actual_hash() not in self._expected_hashes():
|
||||
return MismatchedReq
|
||||
return InstallableReq
|
||||
|
||||
@classmethod
|
||||
def foot(cls):
|
||||
"""Return the text to be printed once, after all of the errors from
|
||||
classes of my type are printed.
|
||||
|
||||
"""
|
||||
return ''
|
||||
|
||||
|
||||
class MalformedReq(DownloadedReq):
|
||||
"""A requirement whose package name could not be determined"""
|
||||
|
||||
@classmethod
|
||||
def head(cls):
|
||||
return 'The following requirements could not be processed:\n'
|
||||
|
||||
def error(self):
|
||||
return '* Unable to determine package name from URL %s; add #egg=' % self._url()
|
||||
|
||||
|
||||
class MissingReq(DownloadedReq):
|
||||
"""A requirement for which no hashes were specified in the requirements file"""
|
||||
|
||||
@classmethod
|
||||
def head(cls):
|
||||
return ('The following packages had no hashes specified in the requirements file, which\n'
|
||||
'leaves them open to tampering. Vet these packages to your satisfaction, then\n'
|
||||
'add these "sha256" lines like so:\n\n')
|
||||
|
||||
def error(self):
|
||||
if self._url():
|
||||
line = self._url()
|
||||
if self._name() not in filename_from_url(self._url()):
|
||||
line = '%s#egg=%s' % (line, self._name())
|
||||
else:
|
||||
line = '%s==%s' % (self._name(), self._version())
|
||||
return '# sha256: %s\n%s\n' % (self._actual_hash(), line)
|
||||
|
||||
|
||||
class MismatchedReq(DownloadedReq):
|
||||
"""A requirement for which the downloaded file didn't match any of my hashes."""
|
||||
@classmethod
|
||||
def head(cls):
|
||||
return ("THE FOLLOWING PACKAGES DIDN'T MATCH THE HASHES SPECIFIED IN THE REQUIREMENTS\n"
|
||||
"FILE. If you have updated the package versions, update the hashes. If not,\n"
|
||||
"freak out, because someone has tampered with the packages.\n\n")
|
||||
|
||||
def error(self):
|
||||
preamble = ' %s: expected%s' % (
|
||||
self._project_name(),
|
||||
' one of' if len(self._expected_hashes()) > 1 else '')
|
||||
return '%s %s\n%s got %s' % (
|
||||
preamble,
|
||||
('\n' + ' ' * (len(preamble) + 1)).join(self._expected_hashes()),
|
||||
' ' * (len(preamble) - 4),
|
||||
self._actual_hash())
|
||||
|
||||
@classmethod
|
||||
def foot(cls):
|
||||
return '\n'
|
||||
|
||||
|
||||
class SatisfiedReq(DownloadedReq):
|
||||
"""A requirement which turned out to be already installed"""
|
||||
|
||||
@classmethod
|
||||
def head(cls):
|
||||
return ("These packages were already installed, so we didn't need to download or build\n"
|
||||
"them again. If you installed them with peep in the first place, you should be\n"
|
||||
"safe. If not, uninstall them, then re-attempt your install with peep.\n")
|
||||
|
||||
def error(self):
|
||||
return ' %s' % (self._req,)
|
||||
|
||||
|
||||
class InstallableReq(DownloadedReq):
|
||||
"""A requirement whose hash matched and can be safely installed"""
|
||||
|
||||
|
||||
# DownloadedReq subclasses that indicate an error that should keep us from
|
||||
# going forward with installation, in the order in which their errors should
|
||||
# be reported:
|
||||
ERROR_CLASSES = [MismatchedReq, MissingReq, MalformedReq]
|
||||
|
||||
|
||||
def bucket(things, key):
|
||||
"""Return a map of key -> list of things."""
|
||||
ret = defaultdict(list)
|
||||
for thing in things:
|
||||
ret[key(thing)].append(thing)
|
||||
return ret
|
||||
|
||||
|
||||
def first_every_last(iterable, first, every, last):
|
||||
"""Execute something before the first item of iter, something else for each
|
||||
item, and a third thing after the last.
|
||||
|
||||
If there are no items in the iterable, don't execute anything.
|
||||
|
||||
"""
|
||||
did_first = False
|
||||
for item in iterable:
|
||||
if not did_first:
|
||||
first(item)
|
||||
every(item)
|
||||
if did_first:
|
||||
last(item)
|
||||
|
||||
|
||||
def downloaded_reqs_from_path(path, argv):
|
||||
"""Return a list of DownloadedReqs representing the requirements parsed
|
||||
out of a given requirements file.
|
||||
|
||||
:arg path: The path to the requirements file
|
||||
:arg argv: The commandline args, starting after the subcommand
|
||||
|
||||
"""
|
||||
try:
|
||||
return [DownloadedReq(req, argv) for req in
|
||||
parse_requirements(path, options=EmptyOptions())]
|
||||
except TypeError:
|
||||
# session is a required kwarg as of pip 6.0 and will raise
|
||||
# a TypeError if missing. It needs to be a PipSession instance,
|
||||
# but in older versions we can't import it from pip.download
|
||||
# (nor do we need it at all) so we only import it in this except block
|
||||
from pip.download import PipSession
|
||||
return [DownloadedReq(req, argv) for req in
|
||||
parse_requirements(path, options=EmptyOptions(),
|
||||
session=PipSession())]
|
||||
|
||||
|
||||
def peep_install(argv):
|
||||
"""Perform the ``peep install`` subcommand, returning a shell status code
|
||||
or raising a PipException.
|
||||
|
||||
:arg argv: The commandline args, starting after the subcommand
|
||||
|
||||
"""
|
||||
output = []
|
||||
#out = output.append
|
||||
out = print
|
||||
reqs = []
|
||||
try:
|
||||
req_paths = list(requirement_args(argv, want_paths=True))
|
||||
if not req_paths:
|
||||
out("You have to specify one or more requirements files with the -r option, because\n"
|
||||
"otherwise there's nowhere for peep to look up the hashes.\n")
|
||||
return COMMAND_LINE_ERROR
|
||||
|
||||
# We're a "peep install" command, and we have some requirement paths.
|
||||
reqs = list(chain.from_iterable(
|
||||
downloaded_reqs_from_path(path, argv)
|
||||
for path in req_paths))
|
||||
buckets = bucket(reqs, lambda r: r.__class__)
|
||||
|
||||
# Skip a line after pip's "Cleaning up..." so the important stuff
|
||||
# stands out:
|
||||
if any(buckets[b] for b in ERROR_CLASSES):
|
||||
out('\n')
|
||||
|
||||
printers = (lambda r: out(r.head()),
|
||||
lambda r: out(r.error() + '\n'),
|
||||
lambda r: out(r.foot()))
|
||||
for c in ERROR_CLASSES:
|
||||
first_every_last(buckets[c], *printers)
|
||||
|
||||
if any(buckets[b] for b in ERROR_CLASSES):
|
||||
out('-------------------------------\n'
|
||||
'Not proceeding to installation.\n')
|
||||
return SOMETHING_WENT_WRONG
|
||||
else:
|
||||
for req in buckets[InstallableReq]:
|
||||
req.install()
|
||||
|
||||
first_every_last(buckets[SatisfiedReq], *printers)
|
||||
|
||||
return ITS_FINE_ITS_FINE
|
||||
except (UnsupportedRequirementError, DownloadError) as exc:
|
||||
out(str(exc))
|
||||
return SOMETHING_WENT_WRONG
|
||||
finally:
|
||||
for req in reqs:
|
||||
req.dispose()
|
||||
print(''.join(output))
|
||||
|
||||
|
||||
def main():
|
||||
"""Be the top-level entrypoint. Return a shell status code."""
|
||||
commands = {'hash': peep_hash,
|
||||
'install': peep_install}
|
||||
try:
|
||||
if len(argv) >= 2 and argv[1] in commands:
|
||||
return commands[argv[1]](argv[2:])
|
||||
else:
|
||||
# Fall through to top-level pip main() for everything else:
|
||||
return pip.main()
|
||||
except PipException as exc:
|
||||
return exc.error_code
|
||||
|
||||
|
||||
def exception_handler(exc_type, exc_value, exc_tb):
|
||||
print('Oh no! Peep had a problem while trying to do stuff. Please write up a bug report')
|
||||
print('with the specifics so we can fix it:')
|
||||
print()
|
||||
print('https://github.com/erikrose/peep/issues/new')
|
||||
print()
|
||||
print('Here are some particulars you can copy and paste into the bug report:')
|
||||
print()
|
||||
print('---')
|
||||
print('peep:', repr(__version__))
|
||||
print('python:', repr(sys.version))
|
||||
print('pip:', repr(pip.__version__))
|
||||
print('Command line: ', repr(sys.argv))
|
||||
print(
|
||||
''.join(traceback.format_exception(exc_type, exc_value, exc_tb)))
|
||||
print('---')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
exit(main())
|
||||
except Exception as exc:
|
||||
exception_handler(*sys.exc_info())
|
||||
exit(1)
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
./manage.py collectstatic --noinput -c
|
||||
./manage.py syncdb --noinput
|
||||
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
./bin/run-common.sh
|
||||
gunicorn sugardough.wsgi:application -b 0.0.0.0:80 -w 2 --log-file -
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
./bin/run-common.sh
|
||||
./manage.py runserver 0.0.0.0:8000
|
|
@ -1,177 +0,0 @@
|
|||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/sugardough.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/sugardough.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/sugardough"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/sugardough"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
|
@ -1,265 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# sugardough documentation build configuration file, created by
|
||||
# sphinx-quickstart on Thu Nov 6 19:39:08 2014.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
|
||||
on_rtd = os.environ.get('READTHEDOCS') == 'True'
|
||||
if not on_rtd:
|
||||
import sphinx_rtd_theme
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
#sys.path.insert(0, os.path.abspath('.'))
|
||||
|
||||
# -- General configuration ------------------------------------------------
|
||||
|
||||
# If your documentation needs a minimal Sphinx version, state it here.
|
||||
#needs_sphinx = '1.0'
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
templates_path = ['_templates']
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The encoding of source files.
|
||||
#source_encoding = 'utf-8-sig'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u'sugardough'
|
||||
copyright = u'2014, Chef'
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = '0.1'
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = '0.1'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#language = None
|
||||
|
||||
# There are two options for replacing |today|: either, you set today to some
|
||||
# non-false value, then it is used:
|
||||
#today = ''
|
||||
# Else, today_fmt is used as the format for a strftime call.
|
||||
#today_fmt = '%B %d, %Y'
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The reST default role (used for this markup: `text`) to use for all
|
||||
# documents.
|
||||
#default_role = None
|
||||
|
||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||
#add_function_parentheses = True
|
||||
|
||||
# If true, the current module name will be prepended to all description
|
||||
# unit titles (such as .. function::).
|
||||
#add_module_names = True
|
||||
|
||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||
# output. They are ignored by default.
|
||||
#show_authors = False
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# A list of ignored prefixes for module index sorting.
|
||||
#modindex_common_prefix = []
|
||||
|
||||
# If true, keep warnings as "system message" paragraphs in the built documents.
|
||||
#keep_warnings = False
|
||||
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'sphinx_rtd_theme'
|
||||
|
||||
# Theme options are theme-specific and customize the look and feel of a theme
|
||||
# further. For a list of options available for each theme, see the
|
||||
# documentation.
|
||||
#html_theme_options = {}
|
||||
|
||||
# Add any paths that contain custom themes here, relative to this directory.
|
||||
#html_theme_path = []
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# Add any extra paths that contain custom files (such as robots.txt or
|
||||
# .htaccess) here, relative to this directory. These files are copied
|
||||
# directly to the root of the documentation.
|
||||
#html_extra_path = []
|
||||
|
||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||
# using the given strftime format.
|
||||
#html_last_updated_fmt = '%b %d, %Y'
|
||||
|
||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||
# typographically correct entities.
|
||||
#html_use_smartypants = True
|
||||
|
||||
# Custom sidebar templates, maps document names to template names.
|
||||
#html_sidebars = {}
|
||||
|
||||
# Additional templates that should be rendered to pages, maps page names to
|
||||
# template names.
|
||||
#html_additional_pages = {}
|
||||
|
||||
# If false, no module index is generated.
|
||||
#html_domain_indices = True
|
||||
|
||||
# If false, no index is generated.
|
||||
#html_use_index = True
|
||||
|
||||
# If true, the index is split into individual pages for each letter.
|
||||
#html_split_index = False
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
#html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
#html_show_sphinx = True
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
#html_show_copyright = True
|
||||
|
||||
# If true, an OpenSearch description file will be output, and all pages will
|
||||
# contain a <link> tag referring to it. The value of this option must be the
|
||||
# base URL from which the finished HTML is served.
|
||||
#html_use_opensearch = ''
|
||||
|
||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||
#html_file_suffix = None
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'sugardoughdoc'
|
||||
|
||||
|
||||
# -- Options for LaTeX output ---------------------------------------------
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
('index', 'sugardough.tex', u'sugardough Documentation',
|
||||
u'Chef', 'manual'),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
# the title page.
|
||||
#latex_logo = None
|
||||
|
||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||
# not chapters.
|
||||
#latex_use_parts = False
|
||||
|
||||
# If true, show page references after internal links.
|
||||
#latex_show_pagerefs = False
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#latex_show_urls = False
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#latex_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#latex_domain_indices = True
|
||||
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
('index', 'sugardough', u'sugardough Documentation',
|
||||
[u'Chef'], 1)
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#man_show_urls = False
|
||||
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
('index', 'sugardough', u'sugardough Documentation',
|
||||
u'Chef', 'sugardough', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
# Documents to append as an appendix to all manuals.
|
||||
#texinfo_appendices = []
|
||||
|
||||
# If false, no module index is generated.
|
||||
#texinfo_domain_indices = True
|
||||
|
||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||
#texinfo_show_urls = 'footnote'
|
||||
|
||||
# If true, do not generate a @detailmenu in the "Top" node's menu.
|
||||
#texinfo_no_detailmenu = False
|
|
@ -1,22 +0,0 @@
|
|||
.. sugardough documentation master file, created by
|
||||
sphinx-quickstart on Thu Nov 6 19:39:08 2014.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to sugardough's documentation!
|
||||
======================================
|
||||
|
||||
Contents:
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
||||
|
|
@ -1,242 +0,0 @@
|
|||
@ECHO OFF
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. xml to make Docutils-native XML files
|
||||
echo. pseudoxml to make pseudoxml-XML files for display purposes
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "clean" (
|
||||
for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
|
||||
del /q /s %BUILDDIR%\*
|
||||
goto end
|
||||
)
|
||||
|
||||
|
||||
%SPHINXBUILD% 2> nul
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\sugardough.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\sugardough.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdf" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf
|
||||
cd %BUILDDIR%/..
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latexpdfja" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
cd %BUILDDIR%/latex
|
||||
make all-pdf-ja
|
||||
cd %BUILDDIR%/..
|
||||
echo.
|
||||
echo.Build finished; the PDF files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
or in %BUILDDIR%/linkcheck/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "xml" (
|
||||
%SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The XML files are in %BUILDDIR%/xml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pseudoxml" (
|
||||
%SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
|
@ -1,18 +0,0 @@
|
|||
db:
|
||||
image: postgres:9.3
|
||||
web:
|
||||
build: .
|
||||
ports:
|
||||
- "8000:8000"
|
||||
volumes:
|
||||
- .:/app
|
||||
links:
|
||||
- db
|
||||
environment:
|
||||
- PYTHONDONTWRITEBYTECODE=1
|
||||
- DATABASE_URL=postgres://postgres@db/postgres
|
||||
- DEBUG=True
|
||||
- ALLOWED_HOSTS=localhost,127.0.0.1,
|
||||
- SECRET_KEY=59114b6a-2858-4caf-8878-482a24ee9542
|
||||
command:
|
||||
./bin/run-fig.sh
|
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import os
|
||||
import sys
|
||||
|
||||
if __name__ == "__main__":
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sugardough.settings")
|
||||
|
||||
from django.core.management import execute_from_command_line
|
||||
|
||||
execute_from_command_line(sys.argv)
|
|
@ -1,210 +0,0 @@
|
|||
# ---------------------------------------------------------------------------
|
||||
|
||||
#
|
||||
# This file configures the New Relic Python Agent.
|
||||
#
|
||||
# The path to the configuration file should be supplied to the function
|
||||
# newrelic.agent.initialize() when the agent is being initialized.
|
||||
#
|
||||
# The configuration file follows a structure similar to what you would
|
||||
# find for Microsoft Windows INI files. For further information on the
|
||||
# configuration file format see the Python ConfigParser documentation at:
|
||||
#
|
||||
# http://docs.python.org/library/configparser.html
|
||||
#
|
||||
# For further discussion on the behaviour of the Python agent that can
|
||||
# be configured via this configuration file see:
|
||||
#
|
||||
# http://newrelic.com/docs/python/python-agent-configuration
|
||||
#
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Here are the settings that are common to all environments.
|
||||
|
||||
[newrelic]
|
||||
|
||||
# You must specify the license key associated with your New
|
||||
# Relic account. This key binds the Python Agent's data to your
|
||||
# account in the New Relic service.
|
||||
# license_key = use NEW_RELIC_LICENSE_KEY environment variable
|
||||
|
||||
# The appplication name. Set this to be the name of your
|
||||
# application as you would like it to show up in New Relic UI.
|
||||
# The UI will then auto-map instances of your application into a
|
||||
# entry on your home dashboard page.
|
||||
# app_name = use NEW_RELIC_APP_NAME environment variable
|
||||
|
||||
# When "true", the agent collects performance data about your
|
||||
# application and reports this data to the New Relic UI at
|
||||
# newrelic.com. This global switch is normally overridden for
|
||||
# each environment below.
|
||||
monitor_mode = true
|
||||
|
||||
# Sets the name of a file to log agent messages to. Useful for
|
||||
# debugging any issues with the agent. This is not set by
|
||||
# default as it is not known in advance what user your web
|
||||
# application processes will run as and where they have
|
||||
# permission to write to. Whatever you set this to you must
|
||||
# ensure that the permissions for the containing directory and
|
||||
# the file itself are correct, and that the user that your web
|
||||
# application runs as can write to the file. If not able to
|
||||
# write out a log file, it is also possible to say "stderr" and
|
||||
# output to standard error output. This would normally result in
|
||||
# output appearing in your web server log.
|
||||
#log_file = /tmp/newrelic-python-agent.log
|
||||
|
||||
# Sets the level of detail of messages sent to the log file, if
|
||||
# a log file location has been provided. Possible values, in
|
||||
# increasing order of detail, are: "critical", "error", "warning",
|
||||
# "info" and "debug". When reporting any agent issues to New
|
||||
# Relic technical support, the most useful setting for the
|
||||
# support engineers is "debug". However, this can generate a lot
|
||||
# of information very quickly, so it is best not to keep the
|
||||
# agent at this level for longer than it takes to reproduce the
|
||||
# problem you are experiencing.
|
||||
log_level = info
|
||||
|
||||
# The Python Agent communicates with the New Relic service using
|
||||
# SSL by default. Note that this does result in an increase in
|
||||
# CPU overhead, over and above what would occur for a non SSL
|
||||
# connection, to perform the encryption involved in the SSL
|
||||
# communication. This work is though done in a distinct thread
|
||||
# to those handling your web requests, so it should not impact
|
||||
# response times. You can if you wish revert to using a non SSL
|
||||
# connection, but this will result in information being sent
|
||||
# over a plain socket connection and will not be as secure.
|
||||
ssl = true
|
||||
|
||||
# High Security Mode enforces certain security settings, and
|
||||
# prevents them from being overridden, so that no sensitive data
|
||||
# is sent to New Relic. Enabling High Security Mode means that
|
||||
# SSL is turned on, request parameters are not collected, and SQL
|
||||
# can not be sent to New Relic in its raw form. To activate High
|
||||
# Security Mode, it must be set to 'true' in this local .ini
|
||||
# configuration file AND be set to 'true' in the server-side
|
||||
# configuration in the New Relic user interface. For details, see
|
||||
# https://docs.newrelic.com/docs/subscriptions/high-security
|
||||
high_security = true
|
||||
|
||||
# The Python Agent will attempt to connect directly to the New
|
||||
# Relic service. If there is an intermediate firewall between
|
||||
# your host and the New Relic service that requires you to use a
|
||||
# HTTP proxy, then you should set both the "proxy_host" and
|
||||
# "proxy_port" settings to the required values for the HTTP
|
||||
# proxy. The "proxy_user" and "proxy_pass" settings should
|
||||
# additionally be set if proxy authentication is implemented by
|
||||
# the HTTP proxy. The "proxy_scheme" setting dictates what
|
||||
# protocol scheme is used in talking to the HTTP protocol. This
|
||||
# would normally always be set as "http" which will result in the
|
||||
# agent then using a SSL tunnel through the HTTP proxy for end to
|
||||
# end encryption.
|
||||
# proxy_scheme = http
|
||||
# proxy_host = hostname
|
||||
# proxy_port = 8080
|
||||
# proxy_user =
|
||||
# proxy_pass =
|
||||
|
||||
# Tells the transaction tracer and error collector (when
|
||||
# enabled) whether or not to capture the query string for the
|
||||
# URL and send it as the request parameters for display in the
|
||||
# UI. When "true", it is still possible to exclude specific
|
||||
# values from being captured using the "ignored_params" setting.
|
||||
capture_params = false
|
||||
|
||||
# Space separated list of variables that should be removed from
|
||||
# the query string captured for display as the request
|
||||
# parameters in the UI.
|
||||
ignored_params =
|
||||
|
||||
# The transaction tracer captures deep information about slow
|
||||
# transactions and sends this to the UI on a periodic basis. The
|
||||
# transaction tracer is enabled by default. Set this to "false"
|
||||
# to turn it off.
|
||||
transaction_tracer.enabled = true
|
||||
|
||||
# Threshold in seconds for when to collect a transaction trace.
|
||||
# When the response time of a controller action exceeds this
|
||||
# threshold, a transaction trace will be recorded and sent to
|
||||
# the UI. Valid values are any positive float value, or (default)
|
||||
# "apdex_f", which will use the threshold for a dissatisfying
|
||||
# Apdex controller action - four times the Apdex T value.
|
||||
transaction_tracer.transaction_threshold = apdex_f
|
||||
|
||||
# When the transaction tracer is on, SQL statements can
|
||||
# optionally be recorded. The recorder has three modes, "off"
|
||||
# which sends no SQL, "raw" which sends the SQL statement in its
|
||||
# original form, and "obfuscated", which strips out numeric and
|
||||
# string literals.
|
||||
transaction_tracer.record_sql = obfuscated
|
||||
|
||||
# Threshold in seconds for when to collect stack trace for a SQL
|
||||
# call. In other words, when SQL statements exceed this
|
||||
# threshold, then capture and send to the UI the current stack
|
||||
# trace. This is helpful for pinpointing where long SQL calls
|
||||
# originate from in an application.
|
||||
transaction_tracer.stack_trace_threshold = 0.5
|
||||
|
||||
# Determines whether the agent will capture query plans for slow
|
||||
# SQL queries. Only supported in MySQL and PostgreSQL. Set this
|
||||
# to "false" to turn it off.
|
||||
transaction_tracer.explain_enabled = true
|
||||
|
||||
# Threshold for query execution time below which query plans
|
||||
# will not not be captured. Relevant only when "explain_enabled"
|
||||
# is true.
|
||||
transaction_tracer.explain_threshold = 0.5
|
||||
|
||||
# Space separated list of function or method names in form
|
||||
# 'module:function' or 'module:class.function' for which
|
||||
# additional function timing instrumentation will be added.
|
||||
transaction_tracer.function_trace =
|
||||
|
||||
# The error collector captures information about uncaught
|
||||
# exceptions or logged exceptions and sends them to UI for
|
||||
# viewing. The error collector is enabled by default. Set this
|
||||
# to "false" to turn it off.
|
||||
error_collector.enabled = true
|
||||
|
||||
# To stop specific errors from reporting to the UI, set this to
|
||||
# a space separated list of the Python exception type names to
|
||||
# ignore. The exception name should be of the form 'module:class'.
|
||||
error_collector.ignore_errors =
|
||||
|
||||
# Browser monitoring is the Real User Monitoring feature of the UI.
|
||||
# For those Python web frameworks that are supported, this
|
||||
# setting enables the auto-insertion of the browser monitoring
|
||||
# JavaScript fragments.
|
||||
browser_monitoring.auto_instrument = true
|
||||
|
||||
# A thread profiling session can be scheduled via the UI when
|
||||
# this option is enabled. The thread profiler will periodically
|
||||
# capture a snapshot of the call stack for each active thread in
|
||||
# the application to construct a statistically representative
|
||||
# call tree.
|
||||
thread_profiler.enabled = true
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
#
|
||||
# The application environments. These are specific settings which
|
||||
# override the common environment settings. The settings related to a
|
||||
# specific environment will be used when the environment argument to the
|
||||
# newrelic.agent.initialize() function has been defined to be either
|
||||
# "development", "test", "staging" or "production".
|
||||
#
|
||||
|
||||
[newrelic:development]
|
||||
monitor_mode = false
|
||||
|
||||
[newrelic:test]
|
||||
monitor_mode = false
|
||||
|
||||
[newrelic:staging]
|
||||
app_name = Python Application (Staging)
|
||||
monitor_mode = true
|
||||
|
||||
[newrelic:production]
|
||||
monitor_mode = true
|
||||
|
||||
# ---------------------------------------------------------------------------
|
|
@ -1,96 +0,0 @@
|
|||
# Adding peep so we can track it on requires.io. For packages
|
||||
# installation we use peep.py shipped in ./bin
|
||||
# sha256: D3VN0_wgThq7_XnEfyVZXFqU0kfOWuUrsmmZWqLnmTM
|
||||
peep==2.1
|
||||
|
||||
# sha256: QW014kelmSc9QP0Bp8pIJmbr2UZ7lYUsqUpV3bzxDuA
|
||||
# sha256: 49SRYStWkFJZgEtdIpJhJr7ZoMKoMXSFgrhQUAkgs3A
|
||||
# sha256: lZZFTHQINrALhLsuwySDga96cQXWoY_i6BO9GwW6MbE
|
||||
peepin==0.7
|
||||
|
||||
# sha256: cu3Ue1WudI0p8acdXKS4bnhcn7l0QHzyQrMWjm8bF34
|
||||
# sha256: 8ib7iqQ4RWlo1AP2c53hzy2tEo24b2buK0Hf6-NkXFs
|
||||
Django==1.7.3
|
||||
|
||||
# sha256: 8uJz7TSsu1YJYtXPEpF5NtjfAil98JvTCJuFRtRYQTg
|
||||
# sha256: ygF2j97N4TQwHzFwdDIm9g7f9cOTXxJDc3jr2RFQY1M
|
||||
dj-database-url==0.3.0
|
||||
|
||||
# sha256: JLo_Moq6CNjVBbwqj2ipTr4kqF6Eg2QLZnIhvA79Ox4
|
||||
psycopg2==2.5.4
|
||||
|
||||
# sha256: ufv3R_qOcR0zDy9Da5suz17tnrZ6Y3yoGrzRHCq-3sg
|
||||
python-decouple==2.3
|
||||
|
||||
# sha256: LiSsXQBNtXFJdqBKwOgMbfbkfpjDVMssDYL4h51Pj9s
|
||||
Jinja2==2.7.3
|
||||
|
||||
# sha256: o71QUd0X6dUOip8tNiUI6C1wYwEP3AbV4HzjYI54D-Y
|
||||
jingo==0.7
|
||||
|
||||
# sha256: pOwa_1m5WhS0XrLiN2GgF56YMZ2lp-t2tW6ozce4ccM
|
||||
MarkupSafe==0.23
|
||||
|
||||
# sha256: gnFVEftiRvrUumbYEuuTQWroNxtGT6iL84Z8nBd9qhQ
|
||||
# sha256: _7QRdX3jdNBcdZySmQjSOXxm47z2813uQabvnrhRSXs
|
||||
gunicorn==19.1.1
|
||||
# sha256: Pk2AGZlglZuCiVGqJMv6o2zr7RSdzXKMEehVeYsV-HA
|
||||
# sha256: 2slBnbPs4nu1PHQzJD_CLtQs9o3p1sQSk5Dh2a7-YxA
|
||||
whitenoise==1.0.6
|
||||
|
||||
# sha256: 0yV2uq13_dnmhZvpfCtqPBem9IQ2Q4nbcx8Gl7ehlmE
|
||||
django-csp==2.0.3
|
||||
# sha256: DJMq9feHqTyMvpNbZgequ5JcxijWsldRgTNES0FRpgA
|
||||
# sha256: hLcOgcH8zdVDirgjgIguLtEarEjtZdRIL60feZC6Q1I
|
||||
# sha256: e0ZyF7n0tLNXjqgMGH1KZFhBN-s5pP0J06k5uyWiRzE
|
||||
# sha256: _iM9iNuVAbn4doWmVuCAQioE3fsR8AInX4KYbXFwSfg
|
||||
# sha256: VDjXSekjyRR0H-KkELUoq-JwUwAPv4eLxDdCjQiuCrE
|
||||
# sha256: murD-YhlyCltxRNxMdRH4v7_DSytAlhIj1dm_aE3TQ8
|
||||
# sha256: plnqNp9L4hoxtIrrZ-xi92JwzFP0CPNXiIO_9BoCGWY
|
||||
# sha256: K7Ah5rGyKc2zSLN-1NBkPBEVrEOdaC_KF0p4WItYoEI
|
||||
# sha256: muSNOY60kfi7ERHlNT-an22BHjjCpp1Zujj5e1TZC0w
|
||||
# sha256: OH-Wj955OxQoZYApFlYYOfVZHYtLFMlBEl6w_Kfk5Y0
|
||||
# sha256: qUE4tjiQdJH0c8h16MlSA6agLv71K2VivjAuQ1AW9PM
|
||||
# sha256: TH1S2kEIdel3YFwZTJP7KaLZIfmSNLUBM8Zip9Qmj9Y
|
||||
pytz==2014.10
|
||||
# sha256: eLAxFrrdDbsu1N7clqSj1CE4uUu4n8Lrmffzvf0Zn1Y
|
||||
# sha256: zIrr3sWl-5iZEvFX93s8IaXi8tpiOvkKe0drEGqDSr8
|
||||
# sha256: drxjpOLV5aDfd8p9GPD1bixGz7YrcRA7qSqSx5-rHgM
|
||||
nose==1.3.4
|
||||
# sha256: NmfSakH-wwNkoO9yWAgyylMogC1VP21ucq9awhyzY2U
|
||||
django-nose==1.3
|
||||
# sha256: J7kqegDxYuebWJb-nliXGbmF6c_fRFgmzBiFYVnObB4
|
||||
newrelic==2.40.0.34
|
||||
|
||||
# sha256: yZzJcW1mVdnIvLHndjK4YVvwq9KC16vZ9cIUjK1_xmk
|
||||
# sha256: XuGkPM0HFtYGFSHuxpN8mD76AneTAT5XJxLE2lXHyD4
|
||||
flake8==2.3.0
|
||||
|
||||
# sha256: q409VanEtW-Xy2D21rk92UlUwWEU8K6HLCR6zoI_Fds
|
||||
# sha256: jy0vup7228b_a2mm76_Yg1voKHm4rnmkM2HH1k42Pew
|
||||
# sha256: PphGaqL-VFQLy6mqbgGjn0ARDWdmjClzQMS5UUt8xJw
|
||||
# sha256: MmP8nqIHC-HP-ERXhtbzGLdnaMAGg4n7OFOw0us_THI
|
||||
ipython==2.3.1
|
||||
|
||||
# sha256: DUoz845megyA7BpaDkaP5Y5c3GHLj0B3sQJLhf57cRc
|
||||
ipdb==0.8
|
||||
|
||||
# sha256: 0UXkOs2Dxbos77J2EH6Wb-YzXbAmWnoLSxCirbhM0m0
|
||||
# sha256: uwkN7xH7pRFnpYjYyklp9Jo8Q3rYrPlg39OcSwzxYAk
|
||||
# sha256: lJM7ZOL-CAfaBhLFdKAhwNrCjHvTxKI3I65aOeqPPQQ
|
||||
Sphinx==1.2.3
|
||||
|
||||
# sha256: PYBK7mdyHzjoQO-jo65SiYMo5kcJJZNB8qiQHVvwHNQ
|
||||
# sha256: Dyn1RPbQN5ifoMdymp6rfk2OpQ1vDvNzY_RydWwe3KY
|
||||
sphinx-rtd-theme==0.1.6
|
||||
# sha256: 9gl24LXtjMs_y-WRWCssnN7AGAg3YvXUk7q9GG29W4U
|
||||
# sha256: Klo6r_GcU0ANRc7QHOvPE6VIJW2l3NGcGF5ZdG7f7Q0
|
||||
sphinx-autobuild==0.4.0
|
||||
|
||||
# sha256: x9txeBCraWX2bIzwOYqYydjfmC2jm0zX8WKRHriVlvo
|
||||
docutils==0.12
|
||||
|
||||
# sha256: XgOeHUDSMpge1YkUttGsLkU6foPd6iLvnz7q3QHeRcs
|
||||
# sha256: ZoyOZluTX2I4WcEyE-G8mzP9Gf7sTbfC-OhVwcjFWdc
|
||||
# sha256: fydQ9H4cA_ffSGbTdhm4B0_8HTIYI7taNRMC_k3SC2c
|
||||
Pygments==2.0.1
|
|
@ -1,5 +0,0 @@
|
|||
[flake8]
|
||||
ignore=E121,E123,E124,E125,E126,E128,E129
|
||||
max-line-length=100
|
||||
exclude=migrations
|
||||
|
|
@ -1,10 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from distutils.core import setup
|
||||
|
||||
setup(name='sugardough',
|
||||
version='0.1dev',
|
||||
description='This is https://github.com/mozilla/sugardough',
|
||||
author='Chef',
|
||||
author_email='',
|
||||
url='https://github.com/mozilla/sugardough')
|
|
@ -1,5 +0,0 @@
|
|||
from django.contrib.staticfiles.templatetags.staticfiles import static
|
||||
from jingo import register
|
||||
|
||||
|
||||
static = register.function(static)
|
|
@ -1,9 +0,0 @@
|
|||
from nose.tools import eq_
|
||||
|
||||
from django.test import TestCase
|
||||
|
||||
|
||||
class SampleTest(TestCase):
|
||||
|
||||
def test_adder(self):
|
||||
eq_(1 + 1, 2)
|
|
@ -1,10 +0,0 @@
|
|||
import sys
|
||||
|
||||
import decouple
|
||||
|
||||
|
||||
try:
|
||||
from .base import * # noqa
|
||||
except decouple.UndefinedValueError as exp:
|
||||
print('ERROR:', exp.message)
|
||||
sys.exit(1)
|
|
@ -1,139 +0,0 @@
|
|||
"""
|
||||
Django settings for sugardough project.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/1.7/topics/settings/
|
||||
|
||||
For the full list of settings and their values, see
|
||||
https://docs.djangoproject.com/en/1.7/ref/settings/
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import dj_database_url
|
||||
from decouple import Csv, config
|
||||
|
||||
|
||||
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
|
||||
|
||||
# Quick-start development settings - unsuitable for production
|
||||
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
|
||||
|
||||
# SECURITY WARNING: keep the secret key used in production secret!
|
||||
SECRET_KEY = config('SECRET_KEY')
|
||||
|
||||
# SECURITY WARNING: don't run with debug turned on in production!
|
||||
DEBUG = config('DEBUG', cast=bool)
|
||||
|
||||
TEMPLATE_DEBUG = config('DEBUG', default=DEBUG, cast=bool)
|
||||
|
||||
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
|
||||
|
||||
|
||||
# Application definition
|
||||
|
||||
INSTALLED_APPS = [
|
||||
# Project specific apps
|
||||
'sugardough.base',
|
||||
|
||||
# Third party apps
|
||||
'django_nose',
|
||||
|
||||
# Django apps
|
||||
'django.contrib.admin',
|
||||
'django.contrib.auth',
|
||||
'django.contrib.contenttypes',
|
||||
'django.contrib.sessions',
|
||||
'django.contrib.messages',
|
||||
'django.contrib.staticfiles',
|
||||
]
|
||||
|
||||
for app in config('EXTRA_APPS', default='', cast=Csv()):
|
||||
INSTALLED_APPS.append(app)
|
||||
|
||||
|
||||
MIDDLEWARE_CLASSES = (
|
||||
'django.contrib.sessions.middleware.SessionMiddleware',
|
||||
'django.middleware.common.CommonMiddleware',
|
||||
'django.middleware.csrf.CsrfViewMiddleware',
|
||||
'django.contrib.auth.middleware.AuthenticationMiddleware',
|
||||
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
|
||||
'django.contrib.messages.middleware.MessageMiddleware',
|
||||
'django.middleware.clickjacking.XFrameOptionsMiddleware',
|
||||
'csp.middleware.CSPMiddleware',
|
||||
)
|
||||
|
||||
ROOT_URLCONF = 'sugardough.urls'
|
||||
|
||||
WSGI_APPLICATION = 'sugardough.wsgi.application'
|
||||
|
||||
|
||||
# Database
|
||||
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
|
||||
|
||||
DATABASES = {
|
||||
'default': config(
|
||||
'DATABASE_URL',
|
||||
cast=dj_database_url.parse
|
||||
)
|
||||
}
|
||||
|
||||
# Internationalization
|
||||
# https://docs.djangoproject.com/en/1.7/topics/i18n/
|
||||
|
||||
LANGUAGE_CODE = config('LANGUAGE_CODE', default='en-us')
|
||||
|
||||
TIME_ZONE = config('TIME_ZONE', default='UTC')
|
||||
|
||||
USE_I18N = config('USE_I18N', default=True, cast=bool)
|
||||
|
||||
USE_L10N = config('USE_L10N', default=True, cast=bool)
|
||||
|
||||
USE_TZ = config('USE_TZ', default=True, cast=bool)
|
||||
|
||||
STATIC_ROOT = config('STATIC_ROOT', default=os.path.join(BASE_DIR, 'static'))
|
||||
STATIC_URL = config('STATIC_URL', '/static/')
|
||||
|
||||
MEDIA_ROOT = config('MEDIA_ROOT', default=os.path.join(BASE_DIR, 'media'))
|
||||
MEDIA_URL = config('MEDIA_URL', '/media/')
|
||||
|
||||
SESSION_COOKIE_SECURE = config('SESSION_COOKIE_SECURE', default=not DEBUG, cast=bool)
|
||||
|
||||
TEMPLATE_LOADERS = (
|
||||
'jingo.Loader',
|
||||
'django.template.loaders.filesystem.Loader',
|
||||
'django.template.loaders.app_directories.Loader',
|
||||
)
|
||||
|
||||
# Django-CSP
|
||||
CSP_DEFAULT_SRC = (
|
||||
"'self'",
|
||||
)
|
||||
CSP_FONT_SRC = (
|
||||
"'self'",
|
||||
'http://*.mozilla.net',
|
||||
'https://*.mozilla.net'
|
||||
)
|
||||
CSP_IMG_SRC = (
|
||||
"'self'",
|
||||
'http://*.mozilla.net',
|
||||
'https://*.mozilla.net',
|
||||
)
|
||||
CSP_SCRIPT_SRC = (
|
||||
"'self'",
|
||||
'http://www.mozilla.org',
|
||||
'https://www.mozilla.org',
|
||||
'http://*.mozilla.net',
|
||||
'https://*.mozilla.net',
|
||||
)
|
||||
CSP_STYLE_SRC = (
|
||||
"'self'",
|
||||
"'unsafe-inline'",
|
||||
'http://www.mozilla.org',
|
||||
'https://www.mozilla.org',
|
||||
'http://*.mozilla.net',
|
||||
'https://*.mozilla.net',
|
||||
)
|
||||
|
||||
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
|
|
@ -1,10 +0,0 @@
|
|||
from django.conf.urls import patterns, include, url
|
||||
from django.contrib import admin
|
||||
|
||||
urlpatterns = patterns('',
|
||||
# Examples:
|
||||
# url(r'^$', 'sugardough.base.views.home', name='home'),
|
||||
# url(r'^blog/', include('blog.urls')),
|
||||
|
||||
url(r'^admin/', include(admin.site.urls)),
|
||||
)
|
|
@ -1,32 +0,0 @@
|
|||
"""
|
||||
WSGI config for sugardough project.
|
||||
|
||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
||||
|
||||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sugardough.settings')
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
||||
import newrelic
|
||||
from decouple import config
|
||||
from whitenoise.django import DjangoWhiteNoise
|
||||
|
||||
application = get_wsgi_application()
|
||||
application = DjangoWhiteNoise(application)
|
||||
|
||||
# Add media files
|
||||
if settings.MEDIA_ROOT and settings.MEDIA_URL:
|
||||
application.add_files(settings.MEDIA_ROOT, prefix=settings.MEDIA_URL)
|
||||
|
||||
# Add NewRelic
|
||||
newrelic_ini = config('NEW_RELIC_CONFIG_FILE', default='newrelic.ini')
|
||||
newrelic_license_key = config('NEW_RELIC_LICENSE_KEY', default=None)
|
||||
if newrelic_ini and newrelic_license_key:
|
||||
newrelic.agent.initialize(newrelic_ini)
|
||||
application = newrelic.agent.wsgi_application()(application)
|
|
@ -1,29 +0,0 @@
|
|||
[tox]
|
||||
envlist = tests, flake8, docs
|
||||
|
||||
[testenv]
|
||||
basepython = python2.7
|
||||
setenv =
|
||||
DEBUG=False
|
||||
SECRET_KEY='FOO'
|
||||
ALLOWED_HOSTS=localhost
|
||||
DATABASE_URL=postgres://localhost/sugardough_db
|
||||
|
||||
[testenv:tests]
|
||||
deps =
|
||||
coverage==3.7.1
|
||||
commands =
|
||||
{toxinidir}/bin/peep.py install -r requirements.txt
|
||||
coverage run manage.py test
|
||||
|
||||
[testenv:flake8]
|
||||
deps = flake8
|
||||
commands = flake8 sugardough
|
||||
|
||||
[testenv:docs]
|
||||
whitelist_externals = make
|
||||
deps =
|
||||
sphinx
|
||||
sphinx-rtd-theme
|
||||
sphinx-autobuild
|
||||
commands = make -C docs html
|
|
@ -0,0 +1,4 @@
|
|||
# Ignore everything in this directory
|
||||
*
|
||||
# Except this file
|
||||
!.gitignore
|
|
@ -6,9 +6,8 @@ It exposes the WSGI callable as a module-level variable named ``application``.
|
|||
For more information on this file, see
|
||||
https://docs.djangoproject.com/en/1.7/howto/deployment/wsgi/
|
||||
"""
|
||||
|
||||
import os
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{{ cookiecutter.project_name }}.settings')
|
||||
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{{ cookiecutter.project_name }}.settings') # NOQA
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.wsgi import get_wsgi_application
|
||||
|
@ -17,6 +16,7 @@ import newrelic
|
|||
from decouple import config
|
||||
from whitenoise.django import DjangoWhiteNoise
|
||||
|
||||
|
||||
application = get_wsgi_application()
|
||||
application = DjangoWhiteNoise(application)
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче