Merge pull request #160 from therealkbhat/file-decompress

[medium] Decompress files before content inspect, resolves #108
This commit is contained in:
Julien Vehent [:ulfr] 2016-01-14 14:08:10 -05:00
Родитель 26c94d0548 f722f992ae
Коммит bde41c6d4a
5 изменённых файлов: 295 добавлений и 57 удалений

Просмотреть файл

@ -263,7 +263,8 @@ label (key) and search parameters (value).</p><p>A search label is a string betw
<span class="punctuation">],</span>
<span class="name tag">"options"</span><span class="punctuation">:</span> <span class="punctuation">{</span>
<span class="name tag">"matchall"</span><span class="punctuation">:</span> <span class="keyword constant">true</span><span class="punctuation">,</span>
<span class="name tag">"maxdepth"</span><span class="punctuation">:</span> <span class="literal number integer">3</span>
<span class="name tag">"maxdepth"</span><span class="punctuation">:</span> <span class="literal number integer">3</span><span class="punctuation">,</span>
<span class="name tag">"decompress"</span><span class="punctuation">:</span> <span class="keyword constant">true</span>
<span class="punctuation">}</span>
<span class="punctuation">}</span>
<span class="punctuation">}</span>
@ -341,7 +342,10 @@ single file search, you should probably consider breaking it down into smaller
searches, or running the search locally instead of through MIG.</p></li><li><p><strong>returnsha256</strong> instructs the agent to return the SHA256 hash for any
matched files. The client will display the hash with the file information
in the result. As an example, this option can be used to do basic file
integrity monitoring across actions.</p></li></ul></section></section><section id="search-algorithm"><header><h2><a href="#id5">2   Search algorithm</a></h2></header><p>FM traverse a directory tree starting from a root path and until no search are
integrity monitoring across actions.</p></li><li><p><strong>decompress</strong> tells the agent to decompress gzipped files prior to
inspecting content or calculating hashes. Note that if the decompress flag
is set for one search, all searches will involve a test for file
decompression.</p></li></ul></section></section><section id="search-algorithm"><header><h2><a href="#id5">2   Search algorithm</a></h2></header><p>FM traverse a directory tree starting from a root path and until no search are
longer active. FM traverses a given path only once, regardless of the number of
searches that are being performed. When FM enters a directory, it activates
searches that apply to the directory, and deactivates the ones that don't.

Просмотреть файл

@ -55,7 +55,8 @@ A search must have at least one search path.
],
"options": {
"matchall": true,
"maxdepth": 3
"maxdepth": 3,
"decompress": true
}
}
}
@ -214,6 +215,11 @@ Several options can be applied to a search:
in the result. As an example, this option can be used to do basic file
integrity monitoring across actions.
* **decompress** tells the agent to decompress gzipped files prior to
inspecting content or calculating hashes. Note that if the decompress flag
is set for one search, all searches will involve a test for file
decompression.
Search algorithm
----------------

Просмотреть файл

@ -7,7 +7,7 @@
/* The file module provides functions to scan a file system. It can look into files
using regexes. It can search files by name. It can match hashes in md5, sha1,
sha256, sha384, sha512, sha3_224, sha3_256, sha3_384 and sha3_512.
The filesystem can be searches using pattern, as described in the Parameters
The filesystem can be searched using patterns, as described in the Parameters
documentation at http://mig.mozilla.org/doc/module_file.html .
*/
package file /* import "mig.ninja/mig/modules/file" */
@ -33,9 +33,12 @@ import (
"golang.org/x/crypto/sha3"
"mig.ninja/mig/modules"
"compress/gzip"
)
var debug bool = false
var tryDecompress bool = false
func debugprint(format string, a ...interface{}) {
if debug {
@ -98,6 +101,7 @@ type options struct {
MatchLimit float64 `json:"matchlimit"`
Debug string `json:"debug,omitempty"`
ReturnSHA256 bool `json:"returnsha256,omitempty"`
Decompress bool `json:"decompress,omitempty"`
}
type checkType uint64
@ -514,6 +518,11 @@ func (r *run) ValidateParameters() (err error) {
return
}
}
if s.Options.Decompress {
tryDecompress = true
} else {
tryDecompress = false
}
}
return
}
@ -934,6 +943,57 @@ func followSymLink(link string) (mode os.FileMode, path string, err error) {
return
}
type fileEntry struct {
filename string
fd *os.File
compRdr io.Reader
}
func (f *fileEntry) Close() {
f.fd.Close()
}
// getReader returns an appropriate reader for the file being checked.
// This is done by checking whether the file is compressed or not
func (f *fileEntry) getReader() io.Reader {
var (
err error
)
defer func() {
if e := recover(); e != nil {
err = fmt.Errorf("getReader() -> %v", err)
}
}()
f.fd, err = os.Open(f.filename)
if err != nil {
stats.Openfailed++
panic(err)
}
if tryDecompress != true {
return f.fd
}
magic := make([]byte, 2)
n, err := f.fd.Read(magic)
if err != nil {
panic(err)
}
if n != 2 {
return f.fd
}
_, err = f.fd.Seek(0, 0)
if err != nil {
panic(err)
}
if magic[0] == 0x1f && magic[1] == 0x8b {
f.compRdr, err = gzip.NewReader(f.fd)
if err != nil {
panic(err)
}
return f.compRdr
}
return f.fd
}
// evaluateFile takes a single file and applies searches to it
func (r *run) evaluateFile(file string) (err error) {
var activeSearches []string
@ -990,16 +1050,17 @@ func (r *run) evaluateFile(file string) (err error) {
// Second pass: Enter all content & hash checks across all searches.
// Only perform the searches that are active.
// Optimize to only read a file once per check type
r.checkContent(file)
r.checkHash(file, checkMD5)
r.checkHash(file, checkSHA1)
r.checkHash(file, checkSHA256)
r.checkHash(file, checkSHA384)
r.checkHash(file, checkSHA512)
r.checkHash(file, checkSHA3_224)
r.checkHash(file, checkSHA3_256)
r.checkHash(file, checkSHA3_384)
r.checkHash(file, checkSHA3_512)
f := fileEntry{filename: file}
r.checkContent(f)
r.checkHash(f, checkMD5)
r.checkHash(f, checkSHA1)
r.checkHash(f, checkSHA256)
r.checkHash(f, checkSHA384)
r.checkHash(f, checkSHA512)
r.checkHash(f, checkSHA3_224)
r.checkHash(f, checkSHA3_256)
r.checkHash(f, checkSHA3_384)
r.checkHash(f, checkSHA3_512)
return
}
@ -1151,10 +1212,9 @@ func (s search) checkMtime(file string, fi os.FileInfo) (matchedall bool) {
return
}
func (r *run) checkContent(file string) {
func (r *run) checkContent(f fileEntry) {
var (
err error
fd *os.File
)
defer func() {
if e := recover(); e != nil {
@ -1184,14 +1244,10 @@ func (r *run) checkContent(file string) {
if !continuereadingfile {
return
}
fd, err = os.Open(file)
if err != nil {
stats.Openfailed++
panic(err)
}
defer fd.Close()
// iterate over the file content
scanner := bufio.NewScanner(fd)
reader := f.getReader()
defer f.Close()
scanner := bufio.NewScanner(reader)
for scanner.Scan() {
if err := scanner.Err(); err != nil {
panic(err)
@ -1246,7 +1302,7 @@ func (r *run) checkContent(file string) {
debugprint("checkContent: [pass] must match any line and current line matched. regex='%s', line='%s'\n",
c.value, scanner.Text())
if c.wantThis(true) {
c.storeMatch(file)
c.storeMatch(f.filename)
}
}
} else {
@ -1255,7 +1311,7 @@ func (r *run) checkContent(file string) {
c.value, scanner.Text())
macroalstatus[label] = false
if c.wantThis(true) {
c.storeMatch(file)
c.storeMatch(f.filename)
}
} else {
debugprint("checkContent: [fail] must not match at least one line but current line matched. regex='%s', line='%s'\n",
@ -1280,7 +1336,7 @@ func (r *run) checkContent(file string) {
c.value, scanner.Text())
macroalstatus[label] = false
if c.wantThis(false) {
c.storeMatch(file)
c.storeMatch(f.filename)
}
} else {
debugprint("checkContent: [fail] much match any line and current line didn't match. regex='%s', line='%s'\n",
@ -1309,7 +1365,7 @@ func (r *run) checkContent(file string) {
// 1. if MACROAL is set and the search succeeded, store the file in each content check
if search.Options.Macroal && macroalstatus[label] {
debugprint("checkContent: macroal is set and search label '%s' passed on file '%s'\n",
label, file)
label, f.filename)
// we match all content regexes on all lines of the file,
// as requested via the Macroal flag
// now store the filename in all checks
@ -1318,7 +1374,7 @@ func (r *run) checkContent(file string) {
continue
}
if c.wantThis(checksstatus[label][i]) {
c.storeMatch(file)
c.storeMatch(f.filename)
}
search.checks[i] = c
}
@ -1332,9 +1388,9 @@ func (r *run) checkContent(file string) {
}
if !checksstatus[label][i] && c.inversematch {
debugprint("in search '%s' on file '%s', check '%s' has not matched and is set to inversematch, record this as a positive result\n",
label, file, c.value)
label, f.filename, c.value)
if c.wantThis(checksstatus[label][i]) {
c.storeMatch(file)
c.storeMatch(f.filename)
}
// adjust check status to true because the check did in fact match as an inverse
checksstatus[label][i] = true
@ -1350,7 +1406,7 @@ func (r *run) checkContent(file string) {
// check hasn't matched, or has matched and we didn't want it to, deactivate the search
if !checksstatus[label][i] || (checksstatus[label][i] && c.inversematch) {
if c.wantThis(checksstatus[label][i]) {
c.storeMatch(file)
c.storeMatch(f.filename)
search.checks[i] = c
}
search.deactivate()
@ -1362,7 +1418,7 @@ func (r *run) checkContent(file string) {
return
}
func (r *run) checkHash(file string, hashtype checkType) {
func (r *run) checkHash(f fileEntry, hashtype checkType) {
var (
err error
)
@ -1382,7 +1438,7 @@ func (r *run) checkHash(file string, hashtype checkType) {
if nothingToDo {
return
}
hash, err := getHash(file, hashtype)
hash, err := getHash(f, hashtype)
if err != nil {
panic(err)
}
@ -1395,10 +1451,10 @@ func (r *run) checkHash(file string, hashtype checkType) {
match := false
if c.value == hash {
match = true
debugprint("checkHash: file '%s' matches checksum '%s'\n", file, c.value)
debugprint("checkHash: file '%s' matches checksum '%s'\n", f.filename, c.value)
}
if c.wantThis(match) {
c.storeMatch(file)
c.storeMatch(f.filename)
} else if search.Options.MatchAll {
search.deactivate()
}
@ -1411,19 +1467,15 @@ func (r *run) checkHash(file string, hashtype checkType) {
}
// getHash calculates the hash of a file.
func getHash(file string, hashType checkType) (hexhash string, err error) {
func getHash(f fileEntry, hashType checkType) (hexhash string, err error) {
defer func() {
if e := recover(); e != nil {
err = fmt.Errorf("getHash() -> %v", e)
}
}()
fd, err := os.Open(file)
if err != nil {
stats.Openfailed++
panic(err)
}
defer fd.Close()
debugprint("getHash: computing hash for '%s'\n", fd.Name())
reader := f.getReader()
defer f.Close()
debugprint("getHash: computing hash for '%s'\n", f.fd.Name())
var h hash.Hash
switch hashType {
case checkMD5:
@ -1449,9 +1501,8 @@ func getHash(file string, hashType checkType) (hexhash string, err error) {
panic(err)
}
buf := make([]byte, 4096)
var offset int64 = 0
for {
block, err := fd.ReadAt(buf, offset)
block, err := reader.Read(buf)
if err != nil && err != io.EOF {
panic(err)
}
@ -1459,7 +1510,6 @@ func getHash(file string, hashType checkType) (hexhash string, err error) {
break
}
h.Write(buf[:block])
offset += int64(block)
}
hexhash = fmt.Sprintf("%X", h.Sum(nil))
return
@ -1554,7 +1604,8 @@ func (r *run) buildResults(t0 time.Time) (resStr string, err error) {
mf.FileInfo.Mode = fi.Mode().String()
mf.FileInfo.Mtime = fi.ModTime().UTC().String()
if search.Options.ReturnSHA256 {
mf.FileInfo.SHA256, err = getHash(mf.File, checkSHA256)
f := fileEntry{filename: mf.File}
mf.FileInfo.SHA256, err = getHash(f, checkSHA256)
if err != nil {
panic(err)
}

Просмотреть файл

@ -102,6 +102,48 @@ func TestContentSearch(t *testing.T) {
}
}
func TestDecompressedContentSearch(t *testing.T) {
for _, tp := range TESTDATA {
var (
r run
s search
)
var expectedfiles = []string{
basedir + "/" + tp.name,
basedir + subdirs + tp.name,
}
// testfile8 is a corrupt gzip and should fail to decompress
if tp.name == "testfile8" {
expectedfiles = []string{""}
}
r.Parameters = *newParameters()
s.Paths = append(s.Paths, basedir)
if tp.decompressedcontent != "" {
s.Contents = append(s.Contents, tp.decompressedcontent)
} else {
s.Contents = append(s.Contents, tp.content)
}
s.Contents = append(s.Contents, "!^FOOBAR$")
s.Options.MatchAll = true
s.Options.Decompress = true
r.Parameters.Searches["s1"] = s
msg, err := modules.MakeMessage(modules.MsgClassParameters, r.Parameters)
if err != nil {
t.Fatal(err)
}
t.Logf("%s\n", msg)
out := r.Run(bytes.NewBuffer(msg))
if len(out) == 0 {
t.Fatal("run failed")
}
t.Log(out)
err = evalResults([]byte(out), expectedfiles)
if err != nil {
t.Fatal(err)
}
}
}
func TestSize(t *testing.T) {
for _, tp := range TESTDATA {
var (
@ -241,6 +283,46 @@ func TestHashes(t *testing.T) {
}
}
func TestDecompressedHash(t *testing.T) {
for _, tp := range TESTDATA {
var (
r run
s search
)
var expectedfiles = []string{
basedir + "/" + tp.name,
basedir + subdirs + tp.name,
}
// testfile8 is a corrupt gzip and should fail to decompress
if tp.name == "testfile8" {
expectedfiles = []string{""}
}
r.Parameters = *newParameters()
s.Paths = append(s.Paths, basedir)
if tp.decompressedmd5 != "" {
s.MD5 = append(s.MD5, tp.decompressedmd5)
} else {
s.MD5 = append(s.MD5, tp.md5)
}
s.Options.Decompress = true
r.Parameters.Searches["s1"] = s
msg, err := modules.MakeMessage(modules.MsgClassParameters, r.Parameters)
if err != nil {
t.Fatal(err)
}
t.Logf("%s\n", msg)
out := r.Run(bytes.NewBuffer(msg))
if len(out) == 0 {
t.Fatal("run failed")
}
t.Log(out)
err = evalResults([]byte(out), expectedfiles)
if err != nil {
t.Fatal(err)
}
}
}
func TestAllHashes(t *testing.T) {
for _, tp := range TESTDATA {
var (
@ -387,7 +469,7 @@ type mismatchtest struct {
func TestMismatch(t *testing.T) {
var MismatchTestCases = []mismatchtest{
mismatchtest{
desc: "want files that don't match name '^testfile0' with maxdepth=1, should find testfile1, 2, 3, 4 & 5",
desc: "want files that don't match name '^testfile0' with maxdepth=1, should find testfile1, 2, 3, 4, 5, 6, 7 & 8",
search: search{
Paths: []string{basedir},
Names: []string{"^" + TESTDATA[0].name + "$"},
@ -401,10 +483,13 @@ func TestMismatch(t *testing.T) {
basedir + "/" + TESTDATA[2].name,
basedir + "/" + TESTDATA[3].name,
basedir + "/" + TESTDATA[4].name,
basedir + "/" + TESTDATA[5].name},
basedir + "/" + TESTDATA[5].name,
basedir + "/" + TESTDATA[6].name,
basedir + "/" + TESTDATA[7].name,
basedir + "/" + TESTDATA[8].name},
},
mismatchtest{
desc: "want files that don't have a size of 190 bytes or larger than 10{k,m,g,t} or smaller than 10 bytes, should find testfile1, 2 & 3",
desc: "want files that don't have a size of 190 bytes or larger than 10{k,m,g,t} or smaller than 10 bytes, should find testfile1, 2, 3, 4, 5, 6, 7 & 8",
search: search{
Paths: []string{basedir},
Sizes: []string{"190", ">10k", ">10m", ">10g", ">10t", "<10"},
@ -419,7 +504,10 @@ func TestMismatch(t *testing.T) {
basedir + "/" + TESTDATA[2].name,
basedir + "/" + TESTDATA[3].name,
basedir + "/" + TESTDATA[4].name,
basedir + "/" + TESTDATA[5].name},
basedir + "/" + TESTDATA[5].name,
basedir + "/" + TESTDATA[6].name,
basedir + "/" + TESTDATA[7].name,
basedir + "/" + TESTDATA[8].name},
},
mismatchtest{
desc: "want files that have not been modified in the last hour ago, should find nothing",
@ -473,7 +561,7 @@ func TestMismatch(t *testing.T) {
basedir + subdirs + TESTDATA[1].name},
},
mismatchtest{
desc: "want files that don't match the hashes of testfile2, should find testfile0, 1, 3, 4, & 5",
desc: "want files that don't match the hashes of testfile2, should find testfile0, 1, 3, 4, 5, 6, 7 & 8",
search: search{
Paths: []string{basedir},
MD5: []string{TESTDATA[2].md5},
@ -491,7 +579,10 @@ func TestMismatch(t *testing.T) {
basedir + "/" + TESTDATA[1].name,
basedir + "/" + TESTDATA[3].name,
basedir + "/" + TESTDATA[4].name,
basedir + "/" + TESTDATA[5].name},
basedir + "/" + TESTDATA[5].name,
basedir + "/" + TESTDATA[6].name,
basedir + "/" + TESTDATA[7].name,
basedir + "/" + TESTDATA[8].name},
},
}
@ -543,6 +634,7 @@ func TestParamsParser(t *testing.T) {
args = append(args, "-matchlimit", "10")
args = append(args, "-maxdepth", "2")
args = append(args, "-verbose")
args = append(args, "-decompress")
t.Logf("%s\n", args)
_, err = r.ParamsParser(args)
if err != nil {
@ -633,7 +725,8 @@ const subdirs string = `/a/b/c/d/e/f/g/h/i/j/k/l/m/n/`
type testParams struct {
data []byte
name, size, mode, mtime, content,
md5, sha1, sha2, sha3 string
md5, sha1, sha2, sha3,
decompressedcontent, decompressedmd5 string
}
var TESTDATA = []testParams{
@ -787,4 +880,77 @@ some other text`),
sha2: `8871b2ff047be05571549398e54c1f36163ae171e05a89900468688ea3bac4f9f3d7c922f0bebc24fdac28d0b2d38fb2718209fb5976c9245e7c837170b79819`,
sha3: `cb086f02b728d57e299651f89e1fb0f89c659db50c7c780ec2689a8143e55c8e5e63ab47fe20897be7155e409151c190`,
},
testParams{
data: []byte("\x1f\x8b\x08\x08\xd9\xdc\x88\x56\x00\x03\x74\x65\x73\x74\x00\x8d" +
"\x8e\xcd\x0a\xc3\x30\x0c\x83\xef\x79\x0a\xc1\xae\xf3\x43\x65\xad" +
"\x4a\x0c\x49\x1c\x1a\xb3\xbf\xa7\x5f\x96\xb2\x4b\x4f\x33\x42\x18" +
"\xf3\x49\x58\x44\x90\x18\x57\xee\xd8\x6c\xc7\x5b\x5b\xe3\x8a\x4d" +
"\x33\x21\x22\xe1\x02\x4f\xda\x31\x14\xb1\x58\x29\xac\x1e\xf0\xdf" +
"\x8c\x6c\xbc\xd9\x9d\x33\x5c\x91\xb5\xf2\xdb\x9b\x47\xfd\x43\x3d" +
"\xa1\xb7\xb8\xb0\x87\x13\xc6\xd2\xfc\x35\xe1\x2b\xaa\xfd\xa0\x6e" +
"\x85\x70\x3e\xfd\xd8\xcc\xd3\xf8\xf7\xf0\x79\xfd\x00\x4c\x08\xa4" +
"\x7a\xc6\x00\x00\x00"),
name: `testfile6`,
size: `133`,
mode: `-rw-r--r--`,
mtime: `<1m`,
content: `KO3B`,
md5: `31d38eee231318166538e1569631aba9`,
sha1: `bd1f24d8cbb000bbf7bcd618c2aec73280388721`,
sha2: `bb4e449df74edae0292d60d2733a3b1801d90ae23560484b1e04fb52f111a14f`,
sha3: `433b84f162d1b00481e6da022c5738fb4d04c3bb4317f73266746dd1`,
decompressedcontent: `^--- header for zipped file ---$`,
decompressedmd5: `5bb23d3b1eaaddc6e108e3fb0ee80a61`,
},
testParams{
data: []byte("\x1f\x8b\x08\x00\xd9\xe3\x8f\x56\x00\x03\xed\xd4\xcb\x6a\x84\x30" +
"\x14\x06\x60\xd7\xf3\x14\x07\xba\xad\x60\xe2\xed\x09\xfa\x02\x5d" +
"\x14\xba\x4c\xf5\x0c\x86\x6a\x22\xe6\xf4\xfa\xf4\x8d\xce\x74\x28" +
"\x03\x65\xba\xb1\x45\xfa\x7f\x44\x22\x7a\x12\x43\xf0\x8f\x70\x90" +
"\xbd\xed\x59\x25\xeb\xc9\xa2\xaa\x2a\xe6\x5e\xd5\x65\xf6\xb5\x5f" +
"\xe4\xaa\x4c\x94\xae\x8a\xbc\xae\x54\xa1\x55\x92\x29\xad\xeb\x2c" +
"\xa1\x6c\xc5\x35\x9d\x3c\x05\x31\x13\x51\xf2\x68\x43\xe7\xa7\xef" +
"\xeb\x2e\xbd\xdf\xa8\x34\x4d\xa9\x63\xd3\xf2\x44\x7b\x1f\x2f\x3b" +
"\x05\xa1\x77\x3b\x8e\xdc\xd2\xfc\x5f\x50\x2c\xd8\x5d\x91\x74\x36" +
"\x50\x6c\x86\x1a\x3f\x0c\xec\x64\x47\x3f\x13\xc7\x9a\x07\xff\xcc" +
"\xcb\x60\x47\xbd\x75\x3c\xcf\xdb\xc7\xe9\x5f\xac\x74\x14\x46\xd3" +
"\x70\xd8\x9d\x95\xf1\x30\xca\xdb\x52\x7c\x4d\xce\x7f\x16\x05\x3f" +
"\x30\x09\xbf\xca\xe1\xce\x4b\x17\x57\x3d\x19\xd7\xfa\xe1\xf0\xf8" +
"\xaf\x37\x73\x83\xe4\x98\x7f\xbd\xe2\x37\x2e\xe6\x5f\xe7\xa7\xfc" +
"\x97\xc5\x31\xff\x39\xf2\xff\x1b\xce\xf2\x1f\xb8\xf1\xae\xdd\xd4" +
"\x01\x70\x77\x73\x7b\x8f\x53\x00\x00\x00\x00\x00\x00\x00\x00\x00" +
"\x00\x00\x00\x00\x00\x00\x00\xfe\xaf\x0f\x60\x69\x1f\x15\x00\x28" +
"\x00\x00"),
name: `testfile7`,
size: `274`,
mode: `-rw-r--r--`,
mtime: `<1m`,
content: `t6Pl`,
md5: `52fa96013b5c6aa9302d39ee7fe2f6a5`,
sha1: `31952c0d2772c302ec94b303c2b80b67cf830060`,
sha2: `f6032dc9b4ba112397a6f8bcb778ab10708c1acd38e48f637ace15c3ae417ded`,
sha3: `3f4dacf0b2347d0a0ab6f09b7d7c98fd12cb2030d4af8baeacaf55a9`,
decompressedcontent: `ustar`,
decompressedmd5: `7f82b4c1613fd10208ad1f71de17ebb5`,
},
testParams{
data: []byte("\x1f\x8b\x08\x08\x2c\xe8\x8f\x56\x00\x03\x74\x65\x73\x74\x66\x69" +
"\x6c\x65\x33\x00\x8d\x8e\x4b\x0a\x03\x31\x0c\x43\xf7\x73\x0a\x41" +
"\xb7\xf5\xa1\xd2\x19\x0d\x09\x24\x71\x48\xdc\xef\xe9\xeb\xa6\xcc" +
"\xa6\xab\x1a\x81\x6c\x78\x12\x16\x11\x44\x86\x8d\x1d\xbb\x76\xac" +
"\xda\xfb\xb5\x19\x37\xbc\x52\x6b\x00\x00\xca\x84\x88\x2c\x27\x58" +
"\x4c\x03\xae\xe0\x54\x29\xac\xb6\xe0\xbf\xf1\x6c\xb8\xe8\x8d\x33" +
"\x5c\x91\x53\xe5\xa7\x37\x7b\xfd\x3d\x59\xc4\x68\x61\xe5\x58\x7e" +
"\x30\x96\x66\xcf\x09\x9f\x51\xf5\x80\x86\x16\xc2\xf8\xb0\xef\xa6" +
"\x16\xfd\xf3\x79\xbf\x01\x7b\xae\xde\x84\xca\x00\x00\x00"),
name: `testfile8`,
size: `142`,
mode: `-rw-r--r--`,
mtime: `<1m`,
content: `,'XL`,
md5: `df7b577ceb59f700d5b03db9d12d174e`,
sha1: `ea033d30e996ac443bc50e9c37eb25b37505302e`,
sha2: `2f4f81c0920501f178085032cd2784b8aa811b8c8e94da7ff85a43a361cd96cc`,
sha3: `d171566f8026a4ca6b4cdf8e6491a651625f98fbc15f9cb601833b64`,
},
}

Просмотреть файл

@ -76,6 +76,8 @@ Options
is reached.
%sreturnsha256 - include sha256 hash for matched files.
ex: %sreturnsha256
%sdecompress - decompress file before inspection
ex: %sdecompress
Module documentation is at http://mig.mozilla.org/doc/module_file.html
Cheatsheet and examples are at http://mig.mozilla.org/doc/cheatsheet.rst.html
@ -100,6 +102,7 @@ func (r *run) ParamsCreator() (interface{}, error) {
search.Options.MatchAll = true
search.Options.MaxDepth = 1000
search.Options.MatchLimit = 1000
search.Options.Decompress = false
for {
fmt.Println("Give a name to this search, or 'done' to exit")
fmt.Printf("label> ")
@ -347,6 +350,12 @@ func (r *run) ParamsCreator() (interface{}, error) {
continue
}
search.Options.MatchLimit = v
case "decompress":
if checkValue != "" {
fmt.Println("This option doesn't take arguments, try again")
continue
}
search.Options.Decompress = true
default:
fmt.Printf("Invalid method!\n")
continue
@ -369,9 +378,9 @@ func (r *run) ParamsParser(args []string) (interface{}, error) {
err error
paths, names, sizes, modes, mtimes, contents, md5s, sha1s, sha2s,
sha3s, mismatch flagParam
maxdepth, matchlimit float64
returnsha256, matchall, matchany, macroal, verbose bool
fs flag.FlagSet
maxdepth, matchlimit float64
returnsha256, matchall, matchany, macroal, verbose, decompress bool
fs flag.FlagSet
)
if len(args) < 1 || args[0] == "" || args[0] == "help" {
printHelp(true)
@ -396,6 +405,7 @@ func (r *run) ParamsParser(args []string) (interface{}, error) {
fs.BoolVar(&macroal, "macroal", false, "see help")
fs.BoolVar(&debug, "verbose", false, "see help")
fs.BoolVar(&returnsha256, "returnsha256", false, "see help")
fs.BoolVar(&decompress, "decompress", false, "see help")
err = fs.Parse(args)
if err != nil {
return nil, err
@ -417,6 +427,7 @@ func (r *run) ParamsParser(args []string) (interface{}, error) {
s.Options.Mismatch = mismatch
s.Options.MatchAll = matchall
s.Options.ReturnSHA256 = returnsha256
s.Options.Decompress = decompress
if matchany {
s.Options.MatchAll = false
}