зеркало из https://github.com/microsoft/docker.git
builder: parser and beginnings of the evaluator
Docker-DCO-1.1-Signed-off-by: Erik Hollensbe <github@hollensbe.org> (github: erikh)
This commit is contained in:
Родитель
334dca15b0
Коммит
22c46af4b3
|
@ -0,0 +1,2 @@
|
|||
main
|
||||
gopath
|
|
@ -0,0 +1,2 @@
|
|||
builder
|
||||
Dockerfile
|
|
@ -0,0 +1,31 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/erikh/buildfile/evaluator"
|
||||
)
|
||||
|
||||
func main() {
|
||||
if len(os.Args) < 2 {
|
||||
os.Stderr.WriteString("Please supply filename(s) to evaluate")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _, fn := range os.Args[1:] {
|
||||
f, err := os.Open(fn)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
opts := &evaluator.BuildOpts{}
|
||||
|
||||
bf, err := opts.NewBuildFile(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
if err := bf.Run(); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,45 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func env(b *buildFile, args ...string) error {
|
||||
if len(args) != 2 {
|
||||
return fmt.Errorf("ENV accepts two arguments")
|
||||
}
|
||||
|
||||
// the duplication here is intended to ease the replaceEnv() call's env
|
||||
// handling. This routine gets much shorter with the denormalization here.
|
||||
key := args[0]
|
||||
b.env[key] = args[1]
|
||||
b.config.Env = append(b.config.Env, strings.Join("=", key, b.env[key]))
|
||||
|
||||
return b.commit("", b.config.Cmd, fmt.Sprintf("ENV %s", value))
|
||||
}
|
||||
|
||||
func maintainer(b *buildFile, args ...string) error {
|
||||
if len(args) != 1 {
|
||||
return fmt.Errorf("MAINTAINER requires only one argument")
|
||||
}
|
||||
|
||||
b.maintainer = args[0]
|
||||
return b.commit("", b.config.Cmd, fmt.Sprintf("MAINTAINER %s", b.maintainer))
|
||||
}
|
||||
|
||||
func add(b *buildFile, args ...string) error {
|
||||
if len(args) != 2 {
|
||||
return fmt.Errorf("ADD requires two arguments")
|
||||
}
|
||||
|
||||
return b.runContextCommand(args, true, true, "ADD")
|
||||
}
|
||||
|
||||
func dispatchCopy(b *buildFile, args ...string) error {
|
||||
if len(args) != 2 {
|
||||
return fmt.Errorf("COPY requires two arguments")
|
||||
}
|
||||
|
||||
return b.runContextCommand(args, false, false, "COPY")
|
||||
}
|
|
@ -0,0 +1,118 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
|
||||
"github.com/erikh/buildfile/parser"
|
||||
|
||||
"github.com/docker/docker/daemon"
|
||||
"github.com/docker/docker/engine"
|
||||
"github.com/docker/docker/nat"
|
||||
"github.com/docker/docker/registry"
|
||||
"github.com/docker/docker/runconfig"
|
||||
"github.com/docker/docker/utils"
|
||||
)
|
||||
|
||||
var (
|
||||
evaluateTable = map[string]func(*buildFile, ...string) error{
|
||||
"env": env,
|
||||
"maintainer": maintainer,
|
||||
"add": add,
|
||||
"copy": dispatchCopy, // copy() is a go builtin
|
||||
//"onbuild": parseMaybeJSON,
|
||||
//"workdir": parseString,
|
||||
//"docker-version": parseString,
|
||||
//"run": parseMaybeJSON,
|
||||
//"cmd": parseMaybeJSON,
|
||||
//"entrypoint": parseMaybeJSON,
|
||||
//"expose": parseMaybeJSON,
|
||||
//"volume": parseMaybeJSON,
|
||||
}
|
||||
)
|
||||
|
||||
type buildFile struct {
|
||||
dockerfile *parser.Node
|
||||
env envMap
|
||||
image string
|
||||
config *runconfig.Config
|
||||
options *BuildOpts
|
||||
maintainer string
|
||||
}
|
||||
|
||||
type BuildOpts struct {
|
||||
Daemon *daemon.Daemon
|
||||
Engine *engine.Engine
|
||||
OutStream io.Writer
|
||||
ErrStream io.Writer
|
||||
Verbose bool
|
||||
UtilizeCache bool
|
||||
Remove bool
|
||||
ForceRm bool
|
||||
OutOld io.Writer
|
||||
StreamFormatter *utils.StreamFormatter
|
||||
Auth *registry.AuthConfig
|
||||
AuthConfigFile *registry.ConfigFile
|
||||
}
|
||||
|
||||
func (opts *BuildOpts) NewBuildFile(file io.ReadWriteCloser) (*buildFile, error) {
|
||||
ast, err := parser.Parse(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &buildFile{
|
||||
dockerfile: ast,
|
||||
env: envMap{},
|
||||
config: initRunConfig(),
|
||||
options: opts,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func (b *buildFile) Run() error {
|
||||
node := b.dockerfile
|
||||
|
||||
for i, n := range node.Children {
|
||||
if err := b.dispatch(i, n); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func initRunConfig() *runconfig.Config {
|
||||
return &runconfig.Config{
|
||||
PortSpecs: []string{},
|
||||
// FIXME(erikh) this should be a type that lives in runconfig
|
||||
ExposedPorts: map[nat.Port]struct{}{},
|
||||
Env: []string{},
|
||||
Cmd: []string{},
|
||||
|
||||
// FIXME(erikh) this should also be a type in runconfig
|
||||
Volumes: map[string]struct{}{},
|
||||
Entrypoint: []string{},
|
||||
OnBuild: []string{},
|
||||
}
|
||||
}
|
||||
|
||||
func (b *buildFile) dispatch(stepN int, ast *parser.Node) error {
|
||||
cmd := ast.Value
|
||||
strs := []string{}
|
||||
for ast.Next != nil {
|
||||
ast = ast.Next
|
||||
strs = append(strs, replaceEnv(b, stripQuotes(ast.Value)))
|
||||
}
|
||||
|
||||
fmt.Fprintf(b.outStream, "Step %d : %s\n", i, cmd, expression)
|
||||
|
||||
// XXX yes, we skip any cmds that are not valid; the parser should have
|
||||
// picked these out already.
|
||||
if f, ok := evaluateTable[cmd]; ok {
|
||||
return f(b, strs...)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,247 @@
|
|||
package evaluator
|
||||
|
||||
func (b *buildFile) addContext(context io.Reader) (string, error) {
|
||||
tmpdirPath, err := ioutil.TempDir("", "docker-build")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
decompressedStream, err := archive.DecompressStream(context)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b.context = &tarsum.TarSum{Reader: decompressedStream, DisableCompression: true}
|
||||
if err := archive.Untar(b.context, tmpdirPath, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
b.contextPath = tmpdirPath
|
||||
return tmpdirPath
|
||||
}
|
||||
|
||||
func (b *buildFile) commit(id string, autoCmd []string, comment string) error {
|
||||
if b.image == "" {
|
||||
return fmt.Errorf("Please provide a source image with `from` prior to commit")
|
||||
}
|
||||
b.config.Image = b.image
|
||||
if id == "" {
|
||||
cmd := b.config.Cmd
|
||||
b.config.Cmd = []string{"/bin/sh", "-c", "#(nop) " + comment}
|
||||
defer func(cmd []string) { b.config.Cmd = cmd }(cmd)
|
||||
|
||||
hit, err := b.probeCache()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if hit {
|
||||
return nil
|
||||
}
|
||||
|
||||
container, warnings, err := b.daemon.Create(b.config, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, warning := range warnings {
|
||||
fmt.Fprintf(b.outStream, " ---> [Warning] %s\n", warning)
|
||||
}
|
||||
b.tmpContainers[container.ID] = struct{}{}
|
||||
fmt.Fprintf(b.outStream, " ---> Running in %s\n", utils.TruncateID(container.ID))
|
||||
id = container.ID
|
||||
|
||||
if err := container.Mount(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer container.Unmount()
|
||||
}
|
||||
container := b.daemon.Get(id)
|
||||
if container == nil {
|
||||
return fmt.Errorf("An error occured while creating the container")
|
||||
}
|
||||
|
||||
// Note: Actually copy the struct
|
||||
autoConfig := *b.config
|
||||
autoConfig.Cmd = autoCmd
|
||||
// Commit the container
|
||||
image, err := b.daemon.Commit(container, "", "", "", b.maintainer, true, &autoConfig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.tmpImages[image.ID] = struct{}{}
|
||||
b.image = image.ID
|
||||
return nil
|
||||
}
|
||||
|
||||
func (b *buildFile) runContextCommand(args string, allowRemote bool, allowDecompression bool, cmdName string) error {
|
||||
if b.context == nil {
|
||||
return fmt.Errorf("No context given. Impossible to use %s", cmdName)
|
||||
}
|
||||
tmp := strings.SplitN(args, " ", 2)
|
||||
if len(tmp) != 2 {
|
||||
return fmt.Errorf("Invalid %s format", cmdName)
|
||||
}
|
||||
|
||||
orig, err := b.ReplaceEnvMatches(strings.Trim(tmp[0], " \t"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
dest, err := b.ReplaceEnvMatches(strings.Trim(tmp[1], " \t"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cmd := b.config.Cmd
|
||||
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s %s in %s", cmdName, orig, dest)}
|
||||
defer func(cmd []string) { b.config.Cmd = cmd }(cmd)
|
||||
b.config.Image = b.image
|
||||
|
||||
var (
|
||||
origPath = orig
|
||||
destPath = dest
|
||||
remoteHash string
|
||||
isRemote bool
|
||||
decompress = true
|
||||
)
|
||||
|
||||
isRemote = utils.IsURL(orig)
|
||||
if isRemote && !allowRemote {
|
||||
return fmt.Errorf("Source can't be an URL for %s", cmdName)
|
||||
} else if utils.IsURL(orig) {
|
||||
// Initiate the download
|
||||
resp, err := utils.Download(orig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create a tmp dir
|
||||
tmpDirName, err := ioutil.TempDir(b.contextPath, "docker-remote")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Create a tmp file within our tmp dir
|
||||
tmpFileName := path.Join(tmpDirName, "tmp")
|
||||
tmpFile, err := os.OpenFile(tmpFileName, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer os.RemoveAll(tmpDirName)
|
||||
|
||||
// Download and dump result to tmp file
|
||||
if _, err := io.Copy(tmpFile, resp.Body); err != nil {
|
||||
tmpFile.Close()
|
||||
return err
|
||||
}
|
||||
tmpFile.Close()
|
||||
|
||||
// Remove the mtime of the newly created tmp file
|
||||
if err := system.UtimesNano(tmpFileName, make([]syscall.Timespec, 2)); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
origPath = path.Join(filepath.Base(tmpDirName), filepath.Base(tmpFileName))
|
||||
|
||||
// Process the checksum
|
||||
r, err := archive.Tar(tmpFileName, archive.Uncompressed)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tarSum := &tarsum.TarSum{Reader: r, DisableCompression: true}
|
||||
if _, err := io.Copy(ioutil.Discard, tarSum); err != nil {
|
||||
return err
|
||||
}
|
||||
remoteHash = tarSum.Sum(nil)
|
||||
r.Close()
|
||||
|
||||
// If the destination is a directory, figure out the filename.
|
||||
if strings.HasSuffix(dest, "/") {
|
||||
u, err := url.Parse(orig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
path := u.Path
|
||||
if strings.HasSuffix(path, "/") {
|
||||
path = path[:len(path)-1]
|
||||
}
|
||||
parts := strings.Split(path, "/")
|
||||
filename := parts[len(parts)-1]
|
||||
if filename == "" {
|
||||
return fmt.Errorf("cannot determine filename from url: %s", u)
|
||||
}
|
||||
destPath = dest + filename
|
||||
}
|
||||
}
|
||||
|
||||
if err := b.checkPathForAddition(origPath); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Hash path and check the cache
|
||||
if b.utilizeCache {
|
||||
var (
|
||||
hash string
|
||||
sums = b.context.GetSums()
|
||||
)
|
||||
|
||||
if remoteHash != "" {
|
||||
hash = remoteHash
|
||||
} else if fi, err := os.Stat(path.Join(b.contextPath, origPath)); err != nil {
|
||||
return err
|
||||
} else if fi.IsDir() {
|
||||
var subfiles []string
|
||||
for file, sum := range sums {
|
||||
absFile := path.Join(b.contextPath, file)
|
||||
absOrigPath := path.Join(b.contextPath, origPath)
|
||||
if strings.HasPrefix(absFile, absOrigPath) {
|
||||
subfiles = append(subfiles, sum)
|
||||
}
|
||||
}
|
||||
sort.Strings(subfiles)
|
||||
hasher := sha256.New()
|
||||
hasher.Write([]byte(strings.Join(subfiles, ",")))
|
||||
hash = "dir:" + hex.EncodeToString(hasher.Sum(nil))
|
||||
} else {
|
||||
if origPath[0] == '/' && len(origPath) > 1 {
|
||||
origPath = origPath[1:]
|
||||
}
|
||||
origPath = strings.TrimPrefix(origPath, "./")
|
||||
if h, ok := sums[origPath]; ok {
|
||||
hash = "file:" + h
|
||||
}
|
||||
}
|
||||
b.config.Cmd = []string{"/bin/sh", "-c", fmt.Sprintf("#(nop) %s %s in %s", cmdName, hash, dest)}
|
||||
hit, err := b.probeCache()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// If we do not have a hash, never use the cache
|
||||
if hit && hash != "" {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
// Create the container
|
||||
container, _, err := b.daemon.Create(b.config, "")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
b.tmpContainers[container.ID] = struct{}{}
|
||||
|
||||
if err := container.Mount(); err != nil {
|
||||
return err
|
||||
}
|
||||
defer container.Unmount()
|
||||
|
||||
if !allowDecompression || isRemote {
|
||||
decompress = false
|
||||
}
|
||||
if err := b.addContext(container, origPath, destPath, decompress); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := b.commit(container.ID, cmd, fmt.Sprintf("%s %s in %s", cmdName, orig, dest)); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
|
@ -0,0 +1,33 @@
|
|||
package evaluator
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var (
|
||||
TOKEN_ESCAPED_QUOTE = regexp.MustCompile(`\\"`)
|
||||
TOKEN_ESCAPED_ESCAPE = regexp.MustCompile(`\\\\`)
|
||||
TOKEN_ENV_INTERPOLATION = regexp.MustCompile("(\\\\\\\\+|[^\\\\]|\\b|\\A)\\$({?)([[:alnum:]_]+)(}?)")
|
||||
)
|
||||
|
||||
func stripQuotes(str string) string {
|
||||
str = str[1 : len(str)-1]
|
||||
str = TOKEN_ESCAPED_QUOTE.ReplaceAllString(str, `"`)
|
||||
return TOKEN_ESCAPED_ESCAPE.ReplaceAllString(str, `\`)
|
||||
}
|
||||
|
||||
func replaceEnv(b *buildFile, str string) string {
|
||||
for _, match := range TOKEN_ENV_INTERPOLATION.FindAllString(str, -1) {
|
||||
match = match[strings.Index(match, "$"):]
|
||||
matchKey := strings.Trim(match, "${}")
|
||||
|
||||
for envKey, envValue := range b.env {
|
||||
if matchKey == envKey {
|
||||
str = strings.Replace(str, match, envValue, -1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return str
|
||||
}
|
|
@ -0,0 +1,2 @@
|
|||
dumper
|
||||
Dockerfile
|
|
@ -0,0 +1,32 @@
|
|||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
|
||||
"github.com/erikh/buildfile/parser"
|
||||
)
|
||||
|
||||
func main() {
|
||||
var f *os.File
|
||||
var err error
|
||||
|
||||
if len(os.Args) < 2 {
|
||||
fmt.Println("please supply filename(s)")
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
for _, fn := range os.Args[1:] {
|
||||
f, err = os.Open(fn)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
ast, err := parser.Parse(f)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
} else {
|
||||
fmt.Print(ast.Dump())
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,99 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ignore the current argument. This will still leave a command parsed, but
|
||||
// will not incorporate the arguments into the ast.
|
||||
func parseIgnore(rest string) (*Node, error) {
|
||||
return blankNode(), nil
|
||||
}
|
||||
|
||||
func parseSubCommand(rest string) (*Node, error) {
|
||||
_, child, err := parseLine(rest)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &Node{Children: []*Node{child}}, nil
|
||||
}
|
||||
|
||||
// parse environment like statements. Note that this does *not* handle
|
||||
// variable interpolation, which will be handled in the evaluator.
|
||||
func parseEnv(rest string) (*Node, error) {
|
||||
node := blankNode()
|
||||
rootnode := node
|
||||
strs := TOKEN_WHITESPACE.Split(rest, 2)
|
||||
node.Value = QuoteString(strs[0])
|
||||
node.Next = blankNode()
|
||||
node.Next.Value = QuoteString(strs[1])
|
||||
|
||||
return rootnode, nil
|
||||
|
||||
return node, nil
|
||||
}
|
||||
|
||||
// parses a whitespace-delimited set of arguments. The result is effectively a
|
||||
// linked list of string arguments.
|
||||
func parseStringsWhitespaceDelimited(rest string) (*Node, error) {
|
||||
node := blankNode()
|
||||
rootnode := node
|
||||
for _, str := range TOKEN_WHITESPACE.Split(rest, -1) { // use regexp
|
||||
node.Value = QuoteString(str)
|
||||
node.Next = blankNode()
|
||||
node = node.Next
|
||||
}
|
||||
|
||||
return rootnode, nil
|
||||
}
|
||||
|
||||
// parsestring just wraps the string in quotes and returns a working node.
|
||||
func parseString(rest string) (*Node, error) {
|
||||
return &Node{QuoteString(rest), nil, nil}, nil
|
||||
}
|
||||
|
||||
// parseJSON converts JSON arrays to an AST.
|
||||
func parseJSON(rest string) (*Node, error) {
|
||||
var (
|
||||
myJson []interface{}
|
||||
next = blankNode()
|
||||
orignext = next
|
||||
)
|
||||
|
||||
if err := json.Unmarshal([]byte(rest), &myJson); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, str := range myJson {
|
||||
switch str.(type) {
|
||||
case float64:
|
||||
str = strconv.FormatFloat(str.(float64), 'G', -1, 64)
|
||||
}
|
||||
next.Value = QuoteString(str.(string))
|
||||
next.Next = blankNode()
|
||||
next = next.Next
|
||||
}
|
||||
|
||||
return orignext, nil
|
||||
}
|
||||
|
||||
// parseMaybeJSON determines if the argument appears to be a JSON array. If
|
||||
// so, passes to parseJSON; if not, quotes the result and returns a single
|
||||
// node.
|
||||
func parseMaybeJSON(rest string) (*Node, error) {
|
||||
rest = strings.TrimSpace(rest)
|
||||
|
||||
if strings.HasPrefix(rest, "[") {
|
||||
node, err := parseJSON(rest)
|
||||
if err == nil {
|
||||
return node, nil
|
||||
}
|
||||
}
|
||||
|
||||
node := blankNode()
|
||||
node.Value = QuoteString(rest)
|
||||
return node, nil
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
// This package implements a parser and parse tree dumper for Dockerfiles.
|
||||
package parser
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"regexp"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Node is the building block of the AST this package will create.
|
||||
//
|
||||
// Nodes are structured to have a value, next, and child, the latter two of
|
||||
// which are Nodes themselves.
|
||||
//
|
||||
// This terminology is unfortunately rather confusing, so here's a diagram.
|
||||
// Anything after the ; is a comment.
|
||||
//
|
||||
// (
|
||||
// (run "foo") ; value run, and next is a value foo.
|
||||
// (run "1" "2" "3") ;
|
||||
// (something (really cool))
|
||||
// )
|
||||
//
|
||||
// Will give you something like this:
|
||||
//
|
||||
// &Node{
|
||||
// Value:"",
|
||||
// Child: &Node{Value: "run", Next: &Node{Value: "foo"}, Child: nil},
|
||||
// Next: &Node{Value:"", Child: &Node{Value:"run", Next: &Node{Value:`"1"`....
|
||||
//
|
||||
// ... and so on.
|
||||
//
|
||||
// The short and fast rule is that anything that starts with ( is a child of
|
||||
// something. Anything which follows a previous statement is a next of
|
||||
// something.
|
||||
//
|
||||
type Node struct {
|
||||
Value string // actual content
|
||||
Next *Node // the next item in the current sexp
|
||||
Children []*Node // the children of this sexp
|
||||
}
|
||||
|
||||
var (
|
||||
dispatch map[string]func(string) (*Node, error)
|
||||
TOKEN_WHITESPACE = regexp.MustCompile(`\s+`)
|
||||
TOKEN_LINE_CONTINUATION = regexp.MustCompile(`\\$`)
|
||||
TOKEN_COMMENT = regexp.MustCompile(`^#.*$`)
|
||||
)
|
||||
|
||||
func init() {
|
||||
// Dispatch Table. see line_parsers.go for the parse functions.
|
||||
// The command is parsed and mapped to the line parser. The line parser
|
||||
// recieves the arguments but not the command, and returns an AST after
|
||||
// reformulating the arguments according to the rules in the parser
|
||||
// functions. Errors are propogated up by Parse() and the resulting AST can
|
||||
// be incorporated directly into the existing AST as a next.
|
||||
dispatch = map[string]func(string) (*Node, error){
|
||||
"user": parseString,
|
||||
"onbuild": parseSubCommand,
|
||||
"workdir": parseString,
|
||||
"env": parseEnv,
|
||||
"maintainer": parseString,
|
||||
"docker-version": parseString,
|
||||
"from": parseString,
|
||||
"add": parseStringsWhitespaceDelimited,
|
||||
"copy": parseStringsWhitespaceDelimited,
|
||||
"run": parseMaybeJSON,
|
||||
"cmd": parseMaybeJSON,
|
||||
"entrypoint": parseMaybeJSON,
|
||||
"expose": parseStringsWhitespaceDelimited,
|
||||
"volume": parseMaybeJSON,
|
||||
}
|
||||
}
|
||||
|
||||
// empty node. Useful for managing structure.
|
||||
func blankNode() *Node {
|
||||
return &Node{"", nil, []*Node{}}
|
||||
}
|
||||
|
||||
func parseLine(line string) (string, *Node, error) {
|
||||
if line = stripComments(line); line == "" {
|
||||
return "", nil, nil
|
||||
}
|
||||
|
||||
if TOKEN_LINE_CONTINUATION.MatchString(line) {
|
||||
line = TOKEN_LINE_CONTINUATION.ReplaceAllString(line, "")
|
||||
return line, nil, nil
|
||||
}
|
||||
|
||||
cmd, args := splitCommand(line)
|
||||
|
||||
node := blankNode()
|
||||
node.Value = cmd
|
||||
|
||||
sexp, err := fullDispatch(cmd, args)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
node.Next = sexp
|
||||
|
||||
return "", node, nil
|
||||
}
|
||||
|
||||
// The main parse routine. Handles an io.ReadWriteCloser and returns the root
|
||||
// of the AST.
|
||||
func Parse(rwc io.Reader) (*Node, error) {
|
||||
var child *Node
|
||||
var line string
|
||||
var err error
|
||||
root := blankNode()
|
||||
scanner := bufio.NewScanner(rwc)
|
||||
|
||||
for scanner.Scan() {
|
||||
line, child, err = parseLine(strings.TrimSpace(scanner.Text()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if line != "" && child == nil {
|
||||
for {
|
||||
scanner.Scan()
|
||||
newline := strings.TrimSpace(scanner.Text())
|
||||
|
||||
if newline == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
line, child, err = parseLine(line + newline)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if child != nil {
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if child != nil {
|
||||
root.Children = append(root.Children, child)
|
||||
}
|
||||
}
|
||||
|
||||
return root, nil
|
||||
}
|
|
@ -0,0 +1,56 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
const testDir = "testfiles"
|
||||
|
||||
func TestTestData(t *testing.T) {
|
||||
f, err := os.Open(testDir)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
|
||||
dirs, err := f.Readdir(0)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, dir := range dirs {
|
||||
dockerfile := filepath.Join(testDir, dir.Name(), "Dockerfile")
|
||||
resultfile := filepath.Join(testDir, dir.Name(), "result")
|
||||
|
||||
df, err := os.Open(dockerfile)
|
||||
if err != nil {
|
||||
t.Fatalf("Dockerfile missing for %s: %s", dir.Name(), err.Error())
|
||||
}
|
||||
|
||||
rf, err := os.Open(resultfile)
|
||||
if err != nil {
|
||||
t.Fatalf("Result file missing for %s: %s", dir.Name(), err.Error())
|
||||
}
|
||||
|
||||
ast, err := Parse(df)
|
||||
if err != nil {
|
||||
t.Fatalf("Error parsing %s's dockerfile: %s", dir.Name(), err.Error())
|
||||
}
|
||||
|
||||
content, err := ioutil.ReadAll(rf)
|
||||
if err != nil {
|
||||
t.Fatalf("Error reading %s's result file: %s", dir.Name(), err.Error())
|
||||
}
|
||||
|
||||
if ast.Dump() != string(content) {
|
||||
t.Fatalf("%s: AST dump of dockerfile does not match result", dir.Name())
|
||||
}
|
||||
|
||||
df.Close()
|
||||
rf.Close()
|
||||
}
|
||||
}
|
|
@ -0,0 +1,25 @@
|
|||
FROM brimstone/ubuntu:14.04
|
||||
|
||||
MAINTAINER brimstone@the.narro.ws
|
||||
|
||||
# TORUN -v /var/run/docker.sock:/var/run/docker.sock
|
||||
|
||||
ENV GOPATH /go
|
||||
|
||||
# Set our command
|
||||
ENTRYPOINT ["/usr/local/bin/consuldock"]
|
||||
|
||||
# Install the packages we need, clean up after them and us
|
||||
RUN apt-get update \
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
|
||||
&& apt-get install -y --no-install-recommends git golang ca-certificates \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists \
|
||||
|
||||
&& go get -v github.com/brimstone/consuldock \
|
||||
&& mv $GOPATH/bin/consuldock /usr/local/bin/consuldock \
|
||||
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
|
||||
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
|
||||
&& rm /tmp/dpkg.* \
|
||||
&& rm -rf $GOPATH
|
|
@ -0,0 +1,5 @@
|
|||
(from "brimstone/ubuntu:14.04")
|
||||
(maintainer "brimstone@the.narro.ws")
|
||||
(env "GOPATH" "/go")
|
||||
(entrypoint "/usr/local/bin/consuldock")
|
||||
(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates && apt-get clean && rm -rf /var/lib/apt/lists && go get -v github.com/brimstone/consuldock && mv $GOPATH/bin/consuldock /usr/local/bin/consuldock && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.* && rm -rf $GOPATH")
|
|
@ -0,0 +1,52 @@
|
|||
FROM brimstone/ubuntu:14.04
|
||||
|
||||
CMD []
|
||||
|
||||
ENTRYPOINT ["/usr/bin/consul", "agent", "-server", "-data-dir=/consul", "-client=0.0.0.0", "-ui-dir=/webui"]
|
||||
|
||||
EXPOSE 8500 8600 8400 8301 8302
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y unzip wget \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists
|
||||
|
||||
RUN cd /tmp \
|
||||
&& wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
|
||||
-O web_ui.zip \
|
||||
&& unzip web_ui.zip \
|
||||
&& mv dist /webui \
|
||||
&& rm web_ui.zip
|
||||
|
||||
RUN apt-get update \
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
|
||||
&& apt-get install -y --no-install-recommends unzip wget \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists \
|
||||
|
||||
&& cd /tmp \
|
||||
&& wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip \
|
||||
-O web_ui.zip \
|
||||
&& unzip web_ui.zip \
|
||||
&& mv dist /webui \
|
||||
&& rm web_ui.zip \
|
||||
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
|
||||
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
|
||||
&& rm /tmp/dpkg.*
|
||||
|
||||
ENV GOPATH /go
|
||||
|
||||
RUN apt-get update \
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean \
|
||||
&& apt-get install -y --no-install-recommends git golang ca-certificates build-essential \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists \
|
||||
|
||||
&& go get -v github.com/hashicorp/consul \
|
||||
&& mv $GOPATH/bin/consul /usr/bin/consul \
|
||||
|
||||
&& dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty \
|
||||
&& apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') \
|
||||
&& rm /tmp/dpkg.* \
|
||||
&& rm -rf $GOPATH
|
|
@ -0,0 +1,9 @@
|
|||
(from "brimstone/ubuntu:14.04")
|
||||
(cmd)
|
||||
(entrypoint "/usr/bin/consul" "agent" "-server" "-data-dir=/consul" "-client=0.0.0.0" "-ui-dir=/webui")
|
||||
(expose "8500" "8600" "8400" "8301" "8302")
|
||||
(run "apt-get update && apt-get install -y unzip wget && apt-get clean && rm -rf /var/lib/apt/lists")
|
||||
(run "cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip")
|
||||
(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends unzip wget && apt-get clean && rm -rf /var/lib/apt/lists && cd /tmp && wget https://dl.bintray.com/mitchellh/consul/0.3.1_web_ui.zip -O web_ui.zip && unzip web_ui.zip && mv dist /webui && rm web_ui.zip && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.*")
|
||||
(env "GOPATH" "/go")
|
||||
(run "apt-get update && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.clean && apt-get install -y --no-install-recommends git golang ca-certificates build-essential && apt-get clean && rm -rf /var/lib/apt/lists && go get -v github.com/hashicorp/consul && mv $GOPATH/bin/consul /usr/bin/consul && dpkg -l | awk '/^ii/ {print $2}' > /tmp/dpkg.dirty && apt-get remove --purge -y $(diff /tmp/dpkg.clean /tmp/dpkg.dirty | awk '/^>/ {print $2}') && rm /tmp/dpkg.* && rm -rf $GOPATH")
|
|
@ -0,0 +1,54 @@
|
|||
FROM cpuguy83/ubuntu
|
||||
ENV NAGIOS_HOME /opt/nagios
|
||||
ENV NAGIOS_USER nagios
|
||||
ENV NAGIOS_GROUP nagios
|
||||
ENV NAGIOS_CMDUSER nagios
|
||||
ENV NAGIOS_CMDGROUP nagios
|
||||
ENV NAGIOSADMIN_USER nagiosadmin
|
||||
ENV NAGIOSADMIN_PASS nagios
|
||||
ENV APACHE_RUN_USER nagios
|
||||
ENV APACHE_RUN_GROUP nagios
|
||||
ENV NAGIOS_TIMEZONE UTC
|
||||
|
||||
RUN sed -i 's/universe/universe multiverse/' /etc/apt/sources.list
|
||||
RUN apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx
|
||||
RUN ( egrep -i "^${NAGIOS_GROUP}" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i "^${NAGIOS_CMDGROUP}" /etc/group || groupadd $NAGIOS_CMDGROUP )
|
||||
RUN ( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )
|
||||
|
||||
ADD http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3 /tmp/nagios.tar.gz
|
||||
RUN cd /tmp && tar -zxvf nagios.tar.gz && cd nagios && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf
|
||||
ADD http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz /tmp/
|
||||
RUN cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install
|
||||
|
||||
RUN sed -i.bak 's/.*\=www\-data//g' /etc/apache2/envvars
|
||||
RUN export DOC_ROOT="DocumentRoot $(echo $NAGIOS_HOME/share)"; sed -i "s,DocumentRoot.*,$DOC_ROOT," /etc/apache2/sites-enabled/000-default
|
||||
|
||||
RUN ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo
|
||||
|
||||
RUN echo "use_timezone=$NAGIOS_TIMEZONE" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo "SetEnv TZ \"${NAGIOS_TIMEZONE}\"" >> /etc/apache2/conf.d/nagios.conf
|
||||
|
||||
RUN mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs
|
||||
RUN echo "cfg_dir=${NAGIOS_HOME}/etc/conf.d" >> ${NAGIOS_HOME}/etc/nagios.cfg
|
||||
RUN echo "cfg_dir=${NAGIOS_HOME}/etc/monitor" >> ${NAGIOS_HOME}/etc/nagios.cfg
|
||||
RUN download-mibs && echo "mibs +ALL" > /etc/snmp/snmp.conf
|
||||
|
||||
RUN sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && \
|
||||
sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg
|
||||
RUN cp /etc/services /var/spool/postfix/etc/
|
||||
|
||||
RUN mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix
|
||||
ADD nagios.init /etc/sv/nagios/run
|
||||
ADD apache.init /etc/sv/apache/run
|
||||
ADD postfix.init /etc/sv/postfix/run
|
||||
ADD postfix.stop /etc/sv/postfix/finish
|
||||
|
||||
ADD start.sh /usr/local/bin/start_nagios
|
||||
|
||||
ENV APACHE_LOCK_DIR /var/run
|
||||
ENV APACHE_LOG_DIR /var/log/apache2
|
||||
|
||||
EXPOSE 80
|
||||
|
||||
VOLUME ["/opt/nagios/var", "/opt/nagios/etc", "/opt/nagios/libexec", "/var/log/apache2", "/usr/share/snmp/mibs"]
|
||||
|
||||
CMD ["/usr/local/bin/start_nagios"]
|
|
@ -0,0 +1,40 @@
|
|||
(from "cpuguy83/ubuntu")
|
||||
(env "NAGIOS_HOME" "/opt/nagios")
|
||||
(env "NAGIOS_USER" "nagios")
|
||||
(env "NAGIOS_GROUP" "nagios")
|
||||
(env "NAGIOS_CMDUSER" "nagios")
|
||||
(env "NAGIOS_CMDGROUP" "nagios")
|
||||
(env "NAGIOSADMIN_USER" "nagiosadmin")
|
||||
(env "NAGIOSADMIN_PASS" "nagios")
|
||||
(env "APACHE_RUN_USER" "nagios")
|
||||
(env "APACHE_RUN_GROUP" "nagios")
|
||||
(env "NAGIOS_TIMEZONE" "UTC")
|
||||
(run "sed -i 's/universe/universe multiverse/' /etc/apt/sources.list")
|
||||
(run "apt-get update && apt-get install -y iputils-ping netcat build-essential snmp snmpd snmp-mibs-downloader php5-cli apache2 libapache2-mod-php5 runit bc postfix bsd-mailx")
|
||||
(run "( egrep -i \"^${NAGIOS_GROUP}\" /etc/group || groupadd $NAGIOS_GROUP ) && ( egrep -i \"^${NAGIOS_CMDGROUP}\" /etc/group || groupadd $NAGIOS_CMDGROUP )")
|
||||
(run "( id -u $NAGIOS_USER || useradd --system $NAGIOS_USER -g $NAGIOS_GROUP -d $NAGIOS_HOME ) && ( id -u $NAGIOS_CMDUSER || useradd --system -d $NAGIOS_HOME -g $NAGIOS_CMDGROUP $NAGIOS_CMDUSER )")
|
||||
(add "http://downloads.sourceforge.net/project/nagios/nagios-3.x/nagios-3.5.1/nagios-3.5.1.tar.gz?r=http%3A%2F%2Fwww.nagios.org%2Fdownload%2Fcore%2Fthanks%2F%3Ft%3D1398863696&ts=1398863718&use_mirror=superb-dca3" "/tmp/nagios.tar.gz")
|
||||
(run "cd /tmp && tar -zxvf nagios.tar.gz && cd nagios && ./configure --prefix=${NAGIOS_HOME} --exec-prefix=${NAGIOS_HOME} --enable-event-broker --with-nagios-command-user=${NAGIOS_CMDUSER} --with-command-group=${NAGIOS_CMDGROUP} --with-nagios-user=${NAGIOS_USER} --with-nagios-group=${NAGIOS_GROUP} && make all && make install && make install-config && make install-commandmode && cp sample-config/httpd.conf /etc/apache2/conf.d/nagios.conf")
|
||||
(add "http://www.nagios-plugins.org/download/nagios-plugins-1.5.tar.gz" "/tmp/")
|
||||
(run "cd /tmp && tar -zxvf nagios-plugins-1.5.tar.gz && cd nagios-plugins-1.5 && ./configure --prefix=${NAGIOS_HOME} && make && make install")
|
||||
(run "sed -i.bak 's/.*\\=www\\-data//g' /etc/apache2/envvars")
|
||||
(run "export DOC_ROOT=\"DocumentRoot $(echo $NAGIOS_HOME/share)\"; sed -i \"s,DocumentRoot.*,$DOC_ROOT,\" /etc/apache2/sites-enabled/000-default")
|
||||
(run "ln -s ${NAGIOS_HOME}/bin/nagios /usr/local/bin/nagios && mkdir -p /usr/share/snmp/mibs && chmod 0755 /usr/share/snmp/mibs && touch /usr/share/snmp/mibs/.foo")
|
||||
(run "echo \"use_timezone=$NAGIOS_TIMEZONE\" >> ${NAGIOS_HOME}/etc/nagios.cfg && echo \"SetEnv TZ \\\"${NAGIOS_TIMEZONE}\\\"\" >> /etc/apache2/conf.d/nagios.conf")
|
||||
(run "mkdir -p ${NAGIOS_HOME}/etc/conf.d && mkdir -p ${NAGIOS_HOME}/etc/monitor && ln -s /usr/share/snmp/mibs ${NAGIOS_HOME}/libexec/mibs")
|
||||
(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/conf.d\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
|
||||
(run "echo \"cfg_dir=${NAGIOS_HOME}/etc/monitor\" >> ${NAGIOS_HOME}/etc/nagios.cfg")
|
||||
(run "download-mibs && echo \"mibs +ALL\" > /etc/snmp/snmp.conf")
|
||||
(run "sed -i 's,/bin/mail,/usr/bin/mail,' /opt/nagios/etc/objects/commands.cfg && sed -i 's,/usr/usr,/usr,' /opt/nagios/etc/objects/commands.cfg")
|
||||
(run "cp /etc/services /var/spool/postfix/etc/")
|
||||
(run "mkdir -p /etc/sv/nagios && mkdir -p /etc/sv/apache && rm -rf /etc/sv/getty-5 && mkdir -p /etc/sv/postfix")
|
||||
(add "nagios.init" "/etc/sv/nagios/run")
|
||||
(add "apache.init" "/etc/sv/apache/run")
|
||||
(add "postfix.init" "/etc/sv/postfix/run")
|
||||
(add "postfix.stop" "/etc/sv/postfix/finish")
|
||||
(add "start.sh" "/usr/local/bin/start_nagios")
|
||||
(env "APACHE_LOCK_DIR" "/var/run")
|
||||
(env "APACHE_LOG_DIR" "/var/log/apache2")
|
||||
(expose "80")
|
||||
(volume "/opt/nagios/var" "/opt/nagios/etc" "/opt/nagios/libexec" "/var/log/apache2" "/usr/share/snmp/mibs")
|
||||
(cmd "/usr/local/bin/start_nagios")
|
|
@ -0,0 +1,105 @@
|
|||
# This file describes the standard way to build Docker, using docker
|
||||
#
|
||||
# Usage:
|
||||
#
|
||||
# # Assemble the full dev environment. This is slow the first time.
|
||||
# docker build -t docker .
|
||||
#
|
||||
# # Mount your source in an interactive container for quick testing:
|
||||
# docker run -v `pwd`:/go/src/github.com/docker/docker --privileged -i -t docker bash
|
||||
#
|
||||
# # Run the test suite:
|
||||
# docker run --privileged docker hack/make.sh test
|
||||
#
|
||||
# # Publish a release:
|
||||
# docker run --privileged \
|
||||
# -e AWS_S3_BUCKET=baz \
|
||||
# -e AWS_ACCESS_KEY=foo \
|
||||
# -e AWS_SECRET_KEY=bar \
|
||||
# -e GPG_PASSPHRASE=gloubiboulga \
|
||||
# docker hack/release.sh
|
||||
#
|
||||
# Note: Apparmor used to mess with privileged mode, but this is no longer
|
||||
# the case. Therefore, you don't have to disable it anymore.
|
||||
#
|
||||
|
||||
docker-version 0.6.1
|
||||
FROM ubuntu:14.04
|
||||
MAINTAINER Tianon Gravi <admwiggin@gmail.com> (@tianon)
|
||||
|
||||
# Packaged dependencies
|
||||
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq \
|
||||
apt-utils \
|
||||
aufs-tools \
|
||||
automake \
|
||||
btrfs-tools \
|
||||
build-essential \
|
||||
curl \
|
||||
dpkg-sig \
|
||||
git \
|
||||
iptables \
|
||||
libapparmor-dev \
|
||||
libcap-dev \
|
||||
libsqlite3-dev \
|
||||
lxc=1.0* \
|
||||
mercurial \
|
||||
pandoc \
|
||||
parallel \
|
||||
reprepro \
|
||||
ruby1.9.1 \
|
||||
ruby1.9.1-dev \
|
||||
s3cmd=1.1.0* \
|
||||
--no-install-recommends
|
||||
|
||||
# Get lvm2 source for compiling statically
|
||||
RUN git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103
|
||||
# see https://git.fedorahosted.org/cgit/lvm2.git/refs/tags for release tags
|
||||
# note: we don't use "git clone -b" above because it then spews big nasty warnings about 'detached HEAD' state that we can't silence as easily as we can silence them using "git checkout" directly
|
||||
|
||||
# Compile and install lvm2
|
||||
RUN cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper
|
||||
# see https://git.fedorahosted.org/cgit/lvm2.git/tree/INSTALL
|
||||
|
||||
# Install Go
|
||||
RUN curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz
|
||||
ENV PATH /usr/local/go/bin:$PATH
|
||||
ENV GOPATH /go:/go/src/github.com/docker/docker/vendor
|
||||
RUN cd /usr/local/go/src && ./make.bash --no-clean 2>&1
|
||||
|
||||
# Compile Go for cross compilation
|
||||
ENV DOCKER_CROSSPLATFORMS \
|
||||
linux/386 linux/arm \
|
||||
darwin/amd64 darwin/386 \
|
||||
freebsd/amd64 freebsd/386 freebsd/arm
|
||||
# (set an explicit GOARM of 5 for maximum compatibility)
|
||||
ENV GOARM 5
|
||||
RUN cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'
|
||||
|
||||
# Grab Go's cover tool for dead-simple code coverage testing
|
||||
RUN go get code.google.com/p/go.tools/cmd/cover
|
||||
|
||||
# TODO replace FPM with some very minimal debhelper stuff
|
||||
RUN gem install --no-rdoc --no-ri fpm --version 1.0.2
|
||||
|
||||
# Get the "busybox" image source so we can build locally instead of pulling
|
||||
RUN git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox
|
||||
|
||||
# Setup s3cmd config
|
||||
RUN /bin/echo -e '[default]\naccess_key=$AWS_ACCESS_KEY\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg
|
||||
|
||||
# Set user.email so crosbymichael's in-container merge commits go smoothly
|
||||
RUN git config --global user.email 'docker-dummy@example.com'
|
||||
|
||||
# Add an unprivileged user to be used for tests which need it
|
||||
RUN groupadd -r docker
|
||||
RUN useradd --create-home --gid docker unprivilegeduser
|
||||
|
||||
VOLUME /var/lib/docker
|
||||
WORKDIR /go/src/github.com/docker/docker
|
||||
ENV DOCKER_BUILDTAGS apparmor selinux
|
||||
|
||||
# Wrap all commands in the "docker-in-docker" script to allow nested containers
|
||||
ENTRYPOINT ["hack/dind"]
|
||||
|
||||
# Upload docker source
|
||||
COPY . /go/src/github.com/docker/docker
|
|
@ -0,0 +1,25 @@
|
|||
(docker-version "0.6.1")
|
||||
(from "ubuntu:14.04")
|
||||
(maintainer "Tianon Gravi <admwiggin@gmail.com> (@tianon)")
|
||||
(run "apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -yq apt-utils aufs-tools automake btrfs-tools build-essential curl dpkg-sig git iptables libapparmor-dev libcap-dev libsqlite3-dev lxc=1.0* mercurial pandoc parallel reprepro ruby1.9.1 ruby1.9.1-dev s3cmd=1.1.0* --no-install-recommends")
|
||||
(run "git clone --no-checkout https://git.fedorahosted.org/git/lvm2.git /usr/local/lvm2 && cd /usr/local/lvm2 && git checkout -q v2_02_103")
|
||||
(run "cd /usr/local/lvm2 && ./configure --enable-static_link && make device-mapper && make install_device-mapper")
|
||||
(run "curl -sSL https://golang.org/dl/go1.3.src.tar.gz | tar -v -C /usr/local -xz")
|
||||
(env "PATH" "/usr/local/go/bin:$PATH")
|
||||
(env "GOPATH" "/go:/go/src/github.com/docker/docker/vendor")
|
||||
(run "cd /usr/local/go/src && ./make.bash --no-clean 2>&1")
|
||||
(env "DOCKER_CROSSPLATFORMS" "linux/386 linux/arm darwin/amd64 darwin/386 freebsd/amd64 freebsd/386 freebsd/arm")
|
||||
(env "GOARM" "5")
|
||||
(run "cd /usr/local/go/src && bash -xc 'for platform in $DOCKER_CROSSPLATFORMS; do GOOS=${platform%/*} GOARCH=${platform##*/} ./make.bash --no-clean 2>&1; done'")
|
||||
(run "go get code.google.com/p/go.tools/cmd/cover")
|
||||
(run "gem install --no-rdoc --no-ri fpm --version 1.0.2")
|
||||
(run "git clone -b buildroot-2014.02 https://github.com/jpetazzo/docker-busybox.git /docker-busybox")
|
||||
(run "/bin/echo -e '[default]\\naccess_key=$AWS_ACCESS_KEY\\nsecret_key=$AWS_SECRET_KEY' > /.s3cfg")
|
||||
(run "git config --global user.email 'docker-dummy@example.com'")
|
||||
(run "groupadd -r docker")
|
||||
(run "useradd --create-home --gid docker unprivilegeduser")
|
||||
(volume "/var/lib/docker")
|
||||
(workdir "/go/src/github.com/docker/docker")
|
||||
(env "DOCKER_BUILDTAGS" "apparmor selinux")
|
||||
(entrypoint "hack/dind")
|
||||
(copy "." "/go/src/github.com/docker/docker")
|
|
@ -0,0 +1,8 @@
|
|||
FROM ubuntu:14.04
|
||||
MAINTAINER Erik \\Hollensbe <erik@hollensbe.org>\"
|
||||
|
||||
RUN apt-get \update && \
|
||||
apt-get \"install znc -y
|
||||
ADD \conf\\" /.znc
|
||||
|
||||
CMD [ "\/usr\\\"/bin/znc", "-f", "-r" ]
|
|
@ -0,0 +1,5 @@
|
|||
(from "ubuntu:14.04")
|
||||
(maintainer "Erik \\\\Hollensbe <erik@hollensbe.org>\\\"")
|
||||
(run "apt-get \\update && apt-get \\\"install znc -y")
|
||||
(add "\\conf\\\\\"" "/.znc")
|
||||
(cmd "/usr\\\"/bin/znc" "-f" "-r")
|
|
@ -0,0 +1,15 @@
|
|||
FROM ubuntu:14.04
|
||||
|
||||
RUN apt-get update && apt-get install wget -y
|
||||
RUN wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb
|
||||
RUN dpkg -i influxdb_latest_amd64.deb
|
||||
RUN rm -r /opt/influxdb/shared
|
||||
|
||||
VOLUME /opt/influxdb/shared
|
||||
|
||||
CMD /usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml
|
||||
|
||||
EXPOSE 8083
|
||||
EXPOSE 8086
|
||||
EXPOSE 8090
|
||||
EXPOSE 8099
|
|
@ -0,0 +1,11 @@
|
|||
(from "ubuntu:14.04")
|
||||
(run "apt-get update && apt-get install wget -y")
|
||||
(run "wget http://s3.amazonaws.com/influxdb/influxdb_latest_amd64.deb")
|
||||
(run "dpkg -i influxdb_latest_amd64.deb")
|
||||
(run "rm -r /opt/influxdb/shared")
|
||||
(volume "/opt/influxdb/shared")
|
||||
(cmd "/usr/bin/influxdb --pidfile /var/run/influxdb.pid -config /opt/influxdb/shared/config.toml")
|
||||
(expose "8083")
|
||||
(expose "8086")
|
||||
(expose "8090")
|
||||
(expose "8099")
|
|
@ -0,0 +1,7 @@
|
|||
FROM ubuntu:14.04
|
||||
MAINTAINER James Turnbull "james@example.com"
|
||||
ENV REFRESHED_AT 2014-06-01
|
||||
RUN apt-get update
|
||||
RUN apt-get -y install redis-server redis-tools
|
||||
EXPOSE 6379
|
||||
ENTRYPOINT [ "/usr/bin/redis-server" ]
|
|
@ -0,0 +1,7 @@
|
|||
(from "ubuntu:14.04")
|
||||
(maintainer "James Turnbull \"james@example.com\"")
|
||||
(env "REFRESHED_AT" "2014-06-01")
|
||||
(run "apt-get update")
|
||||
(run "apt-get -y install redis-server redis-tools")
|
||||
(expose "6379")
|
||||
(entrypoint "/usr/bin/redis-server")
|
|
@ -0,0 +1,48 @@
|
|||
FROM busybox:buildroot-2014.02
|
||||
|
||||
MAINTAINER docker <docker@docker.io>
|
||||
|
||||
ONBUILD RUN ["echo", "test"]
|
||||
ONBUILD RUN echo test
|
||||
ONBUILD COPY . /
|
||||
|
||||
|
||||
# RUN Commands \
|
||||
# linebreak in comment \
|
||||
RUN ["ls", "-la"]
|
||||
RUN ["echo", "'1234'"]
|
||||
RUN echo "1234"
|
||||
RUN echo 1234
|
||||
RUN echo '1234' && \
|
||||
echo "456" && \
|
||||
echo 789
|
||||
RUN sh -c 'echo root:testpass \
|
||||
> /tmp/passwd'
|
||||
RUN mkdir -p /test /test2 /test3/test
|
||||
|
||||
# ENV \
|
||||
ENV SCUBA 1 DUBA 3
|
||||
ENV SCUBA "1 DUBA 3"
|
||||
|
||||
# CMD \
|
||||
CMD ["echo", "test"]
|
||||
CMD echo test
|
||||
CMD echo "test"
|
||||
CMD echo 'test'
|
||||
CMD echo 'test' | wc -
|
||||
|
||||
#EXPOSE\
|
||||
EXPOSE 3000
|
||||
EXPOSE 9000 5000 6000
|
||||
|
||||
USER docker
|
||||
USER docker:root
|
||||
|
||||
VOLUME ["/test"]
|
||||
VOLUME ["/test", "/test2"]
|
||||
VOLUME /test3
|
||||
|
||||
WORKDIR /test
|
||||
|
||||
ADD . /
|
||||
COPY . copy
|
|
@ -0,0 +1,29 @@
|
|||
(from "busybox:buildroot-2014.02")
|
||||
(maintainer "docker <docker@docker.io>")
|
||||
(onbuild (run "echo" "test"))
|
||||
(onbuild (run "echo test"))
|
||||
(onbuild (copy "." "/"))
|
||||
(run "ls" "-la")
|
||||
(run "echo" "'1234'")
|
||||
(run "echo \"1234\"")
|
||||
(run "echo 1234")
|
||||
(run "echo '1234' && echo \"456\" && echo 789")
|
||||
(run "sh -c 'echo root:testpass > /tmp/passwd'")
|
||||
(run "mkdir -p /test /test2 /test3/test")
|
||||
(env "SCUBA" "1 DUBA 3")
|
||||
(env "SCUBA" "\"1 DUBA 3\"")
|
||||
(cmd "echo" "test")
|
||||
(cmd "echo test")
|
||||
(cmd "echo \"test\"")
|
||||
(cmd "echo 'test'")
|
||||
(cmd "echo 'test' | wc -")
|
||||
(expose "3000")
|
||||
(expose "9000" "5000" "6000")
|
||||
(user "docker")
|
||||
(user "docker:root")
|
||||
(volume "/test")
|
||||
(volume "/test" "/test2")
|
||||
(volume "/test3")
|
||||
(workdir "/test")
|
||||
(add "." "/")
|
||||
(copy "." "copy")
|
|
@ -0,0 +1,16 @@
|
|||
FROM ubuntu:14.04
|
||||
|
||||
RUN apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y
|
||||
ADD .muttrc /
|
||||
ADD .offlineimaprc /
|
||||
ADD .tmux.conf /
|
||||
ADD mutt /.mutt
|
||||
ADD vim /.vim
|
||||
ADD vimrc /.vimrc
|
||||
ADD crontab /etc/crontab
|
||||
RUN chmod 644 /etc/crontab
|
||||
RUN mkdir /Mail
|
||||
RUN mkdir /.offlineimap
|
||||
RUN echo "export TERM=screen-256color" >/.zshenv
|
||||
|
||||
CMD setsid cron; tmux -2
|
|
@ -0,0 +1,14 @@
|
|||
(from "ubuntu:14.04")
|
||||
(run "apt-get update -qy && apt-get install mutt offlineimap vim-nox abook elinks curl tmux cron zsh -y")
|
||||
(add ".muttrc" "/")
|
||||
(add ".offlineimaprc" "/")
|
||||
(add ".tmux.conf" "/")
|
||||
(add "mutt" "/.mutt")
|
||||
(add "vim" "/.vim")
|
||||
(add "vimrc" "/.vimrc")
|
||||
(add "crontab" "/etc/crontab")
|
||||
(run "chmod 644 /etc/crontab")
|
||||
(run "mkdir /Mail")
|
||||
(run "mkdir /.offlineimap")
|
||||
(run "echo \"export TERM=screen-256color\" >/.zshenv")
|
||||
(cmd "setsid cron; tmux -2")
|
|
@ -0,0 +1,7 @@
|
|||
FROM ubuntu:14.04
|
||||
|
||||
RUN apt-get update && apt-get install libcap2-bin mumble-server -y
|
||||
|
||||
ADD ./mumble-server.ini /etc/mumble-server.ini
|
||||
|
||||
CMD /usr/sbin/murmurd
|
|
@ -0,0 +1,4 @@
|
|||
(from "ubuntu:14.04")
|
||||
(run "apt-get update && apt-get install libcap2-bin mumble-server -y")
|
||||
(add "./mumble-server.ini" "/etc/mumble-server.ini")
|
||||
(cmd "/usr/sbin/murmurd")
|
|
@ -0,0 +1,14 @@
|
|||
FROM ubuntu:14.04
|
||||
MAINTAINER Erik Hollensbe <erik@hollensbe.org>
|
||||
|
||||
RUN apt-get update && apt-get install nginx-full -y
|
||||
RUN rm -rf /etc/nginx
|
||||
ADD etc /etc/nginx
|
||||
RUN chown -R root:root /etc/nginx
|
||||
RUN /usr/sbin/nginx -qt
|
||||
RUN mkdir /www
|
||||
|
||||
CMD ["/usr/sbin/nginx"]
|
||||
|
||||
VOLUME /www
|
||||
EXPOSE 80
|
|
@ -0,0 +1,11 @@
|
|||
(from "ubuntu:14.04")
|
||||
(maintainer "Erik Hollensbe <erik@hollensbe.org>")
|
||||
(run "apt-get update && apt-get install nginx-full -y")
|
||||
(run "rm -rf /etc/nginx")
|
||||
(add "etc" "/etc/nginx")
|
||||
(run "chown -R root:root /etc/nginx")
|
||||
(run "/usr/sbin/nginx -qt")
|
||||
(run "mkdir /www")
|
||||
(cmd "/usr/sbin/nginx")
|
||||
(volume "/www")
|
||||
(expose "80")
|
|
@ -0,0 +1,23 @@
|
|||
FROM ubuntu:12.04
|
||||
|
||||
EXPOSE 27015
|
||||
EXPOSE 27005
|
||||
EXPOSE 26901
|
||||
EXPOSE 27020
|
||||
|
||||
RUN apt-get update && apt-get install libc6-dev-i386 curl unzip -y
|
||||
RUN mkdir -p /steam
|
||||
RUN curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam
|
||||
ADD ./script /steam/script
|
||||
RUN /steam/steamcmd.sh +runscript /steam/script
|
||||
RUN curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf
|
||||
RUN curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf
|
||||
ADD ./server.cfg /steam/tf2/tf/cfg/server.cfg
|
||||
ADD ./ctf_2fort.cfg /steam/tf2/tf/cfg/ctf_2fort.cfg
|
||||
ADD ./sourcemod.cfg /steam/tf2/tf/cfg/sourcemod/sourcemod.cfg
|
||||
RUN rm -r /steam/tf2/tf/addons/sourcemod/configs
|
||||
ADD ./configs /steam/tf2/tf/addons/sourcemod/configs
|
||||
RUN mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en
|
||||
RUN cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en
|
||||
|
||||
CMD cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill
|
|
@ -0,0 +1,20 @@
|
|||
(from "ubuntu:12.04")
|
||||
(expose "27015")
|
||||
(expose "27005")
|
||||
(expose "26901")
|
||||
(expose "27020")
|
||||
(run "apt-get update && apt-get install libc6-dev-i386 curl unzip -y")
|
||||
(run "mkdir -p /steam")
|
||||
(run "curl http://media.steampowered.com/client/steamcmd_linux.tar.gz | tar vxz -C /steam")
|
||||
(add "./script" "/steam/script")
|
||||
(run "/steam/steamcmd.sh +runscript /steam/script")
|
||||
(run "curl http://mirror.pointysoftware.net/alliedmodders/mmsource-1.10.0-linux.tar.gz | tar vxz -C /steam/tf2/tf")
|
||||
(run "curl http://mirror.pointysoftware.net/alliedmodders/sourcemod-1.5.3-linux.tar.gz | tar vxz -C /steam/tf2/tf")
|
||||
(add "./server.cfg" "/steam/tf2/tf/cfg/server.cfg")
|
||||
(add "./ctf_2fort.cfg" "/steam/tf2/tf/cfg/ctf_2fort.cfg")
|
||||
(add "./sourcemod.cfg" "/steam/tf2/tf/cfg/sourcemod/sourcemod.cfg")
|
||||
(run "rm -r /steam/tf2/tf/addons/sourcemod/configs")
|
||||
(add "./configs" "/steam/tf2/tf/addons/sourcemod/configs")
|
||||
(run "mkdir -p /steam/tf2/tf/addons/sourcemod/translations/en")
|
||||
(run "cp /steam/tf2/tf/addons/sourcemod/translations/*.txt /steam/tf2/tf/addons/sourcemod/translations/en")
|
||||
(cmd "cd /steam/tf2 && ./srcds_run -port 27015 +ip 0.0.0.0 +map ctf_2fort -autoupdate -steam_dir /steam -steamcmd_script /steam/script +tf_bot_quota 12 +tf_bot_quota_mode fill")
|
|
@ -0,0 +1,9 @@
|
|||
FROM ubuntu:14.04
|
||||
|
||||
RUN apt-get update -qy && apt-get install tmux zsh weechat-curses -y
|
||||
|
||||
ADD .weechat /.weechat
|
||||
ADD .tmux.conf /
|
||||
RUN echo "export TERM=screen-256color" >/.zshenv
|
||||
|
||||
CMD zsh -c weechat
|
|
@ -0,0 +1,6 @@
|
|||
(from "ubuntu:14.04")
|
||||
(run "apt-get update -qy && apt-get install tmux zsh weechat-curses -y")
|
||||
(add ".weechat" "/.weechat")
|
||||
(add ".tmux.conf" "/")
|
||||
(run "echo \"export TERM=screen-256color\" >/.zshenv")
|
||||
(cmd "zsh -c weechat")
|
|
@ -0,0 +1,7 @@
|
|||
FROM ubuntu:14.04
|
||||
MAINTAINER Erik Hollensbe <erik@hollensbe.org>
|
||||
|
||||
RUN apt-get update && apt-get install znc -y
|
||||
ADD conf /.znc
|
||||
|
||||
CMD [ "/usr/bin/znc", "-f", "-r" ]
|
|
@ -0,0 +1,5 @@
|
|||
(from "ubuntu:14.04")
|
||||
(maintainer "Erik Hollensbe <erik@hollensbe.org>")
|
||||
(run "apt-get update && apt-get install znc -y")
|
||||
(add "conf" "/.znc")
|
||||
(cmd "/usr/bin/znc" "-f" "-r")
|
|
@ -0,0 +1,86 @@
|
|||
package parser
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// QuoteString walks characters (after trimming), escapes any quotes and
|
||||
// escapes, then wraps the whole thing in quotes. Very useful for generating
|
||||
// argument output in nodes.
|
||||
func QuoteString(str string) string {
|
||||
result := ""
|
||||
chars := strings.Split(strings.TrimSpace(str), "")
|
||||
|
||||
for _, char := range chars {
|
||||
switch char {
|
||||
case `"`:
|
||||
result += `\"`
|
||||
case `\`:
|
||||
result += `\\`
|
||||
default:
|
||||
result += char
|
||||
}
|
||||
}
|
||||
|
||||
return `"` + result + `"`
|
||||
}
|
||||
|
||||
// dumps the AST defined by `node` as a list of sexps. Returns a string
|
||||
// suitable for printing.
|
||||
func (node *Node) Dump() string {
|
||||
str := ""
|
||||
str += node.Value
|
||||
|
||||
for _, n := range node.Children {
|
||||
str += "(" + n.Dump() + ")\n"
|
||||
}
|
||||
|
||||
if node.Next != nil {
|
||||
for n := node.Next; n != nil; n = n.Next {
|
||||
if len(n.Children) > 0 {
|
||||
str += " " + n.Dump()
|
||||
} else {
|
||||
str += " " + n.Value
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return strings.TrimSpace(str)
|
||||
}
|
||||
|
||||
// performs the dispatch based on the two primal strings, cmd and args. Please
|
||||
// look at the dispatch table in parser.go to see how these dispatchers work.
|
||||
func fullDispatch(cmd, args string) (*Node, error) {
|
||||
if _, ok := dispatch[cmd]; !ok {
|
||||
return nil, fmt.Errorf("'%s' is not a valid dockerfile command", cmd)
|
||||
}
|
||||
|
||||
sexp, err := dispatch[cmd](args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sexp, nil
|
||||
}
|
||||
|
||||
// splitCommand takes a single line of text and parses out the cmd and args,
|
||||
// which are used for dispatching to more exact parsing functions.
|
||||
func splitCommand(line string) (string, string) {
|
||||
cmdline := TOKEN_WHITESPACE.Split(line, 2)
|
||||
cmd := strings.ToLower(cmdline[0])
|
||||
// the cmd should never have whitespace, but it's possible for the args to
|
||||
// have trailing whitespace.
|
||||
return cmd, strings.TrimSpace(cmdline[1])
|
||||
}
|
||||
|
||||
// covers comments and empty lines. Lines should be trimmed before passing to
|
||||
// this function.
|
||||
func stripComments(line string) string {
|
||||
// string is already trimmed at this point
|
||||
if TOKEN_COMMENT.MatchString(line) {
|
||||
return TOKEN_COMMENT.ReplaceAllString(line, "")
|
||||
}
|
||||
|
||||
return line
|
||||
}
|
Загрузка…
Ссылка в новой задаче