summaryrefslogtreecommitdiffstats
path: root/framework/src/suricata/qa
diff options
context:
space:
mode:
Diffstat (limited to 'framework/src/suricata/qa')
-rw-r--r--framework/src/suricata/qa/Makefile.am2
-rw-r--r--framework/src/suricata/qa/coccinelle/Makefile.am21
-rw-r--r--framework/src/suricata/qa/coccinelle/access-pkt-packet.cocci55
-rw-r--r--framework/src/suricata/qa/coccinelle/action-pkt.cocci15
-rw-r--r--framework/src/suricata/qa/coccinelle/banned-functions.cocci15
-rw-r--r--framework/src/suricata/qa/coccinelle/direct-packet.cocci15
-rw-r--r--framework/src/suricata/qa/coccinelle/malloc-error-check.cocci63
-rw-r--r--framework/src/suricata/qa/coccinelle/pktnotset-packet.cocci29
-rw-r--r--framework/src/suricata/qa/coccinelle/realloc.cocci18
-rwxr-xr-xframework/src/suricata/qa/coccinelle/run_check.sh40
-rw-r--r--framework/src/suricata/qa/coccinelle/size_t.cocci44
-rw-r--r--framework/src/suricata/qa/coccinelle/struct-flags.cocci77
-rwxr-xr-xframework/src/suricata/qa/coccinelle/struct-flags.py55
-rw-r--r--framework/src/suricata/qa/coccinelle/sz3.cocci48
-rw-r--r--framework/src/suricata/qa/docker/buildbot.cfg235
-rw-r--r--framework/src/suricata/qa/docker/pcaps/tls.pcapbin160944 -> 0 bytes
-rw-r--r--framework/src/suricata/qa/drmemory.suppress16
-rwxr-xr-xframework/src/suricata/qa/gnuplot/plot-csv-large-all.sh24
-rwxr-xr-xframework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file-stream-vs-http.sh26
-rwxr-xr-xframework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file.sh31
-rwxr-xr-xframework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file-stream-vs-http.sh26
-rwxr-xr-xframework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file.sh31
-rwxr-xr-xframework/src/suricata/qa/prscript.py360
-rwxr-xr-xframework/src/suricata/qa/sock_to_gzip_file.py57
-rwxr-xr-xframework/src/suricata/qa/travis-libhtp.sh3
-rw-r--r--framework/src/suricata/qa/valgrind.suppress69
-rwxr-xr-xframework/src/suricata/qa/wirefuzz.pl645
27 files changed, 0 insertions, 2020 deletions
diff --git a/framework/src/suricata/qa/Makefile.am b/framework/src/suricata/qa/Makefile.am
deleted file mode 100644
index e5f77dc8..00000000
--- a/framework/src/suricata/qa/Makefile.am
+++ /dev/null
@@ -1,2 +0,0 @@
-SUBDIRS = coccinelle
-EXTRA_DIST = wirefuzz.pl sock_to_gzip_file.py drmemory.suppress
diff --git a/framework/src/suricata/qa/coccinelle/Makefile.am b/framework/src/suricata/qa/coccinelle/Makefile.am
deleted file mode 100644
index 8ceebe06..00000000
--- a/framework/src/suricata/qa/coccinelle/Makefile.am
+++ /dev/null
@@ -1,21 +0,0 @@
-EXTRA_DIST= access-pkt-packet.cocci \
- action-pkt.cocci \
- banned-functions.cocci \
- direct-packet.cocci \
- malloc-error-check.cocci \
- pktnotset-packet.cocci \
- size_t.cocci \
- struct-flags.cocci \
- sz3.cocci \
- run_check.sh struct-flags.py
-
-if HAVE_COCCINELLE
-struct-flags.cocci:
- $(srcdir)/struct-flags.py > $(top_builddir)/qa/coccinelle/struct-flags.cocci
-
-check:
- $(top_srcdir)/qa/coccinelle/run_check.sh
-
-distclean-local:
- -rm $(top_builddir)/qa/coccinelle/struct-flags.cocci
-endif
diff --git a/framework/src/suricata/qa/coccinelle/access-pkt-packet.cocci b/framework/src/suricata/qa/coccinelle/access-pkt-packet.cocci
deleted file mode 100644
index 681848ec..00000000
--- a/framework/src/suricata/qa/coccinelle/access-pkt-packet.cocci
+++ /dev/null
@@ -1,55 +0,0 @@
-@init@
-typedef Packet;
-Packet *p;
-expression E;
-statement S;
-@@
-
-(
-memset(p, ...);
-p->pkt = E;
-|
-p = SCCalloc(...);
-S
-p->pkt = E;
-)
-
-@pktfield depends on !init@
-identifier func !~ "^PacketCopyDataOffset$";
-Packet *p;
-position p1;
-@@
-
-func(...) {
-<...
-p->pkt@p1
-...>
-}
-
-@ script:python @
-p1 << pktfield.p1;
-@@
-
-print "Invalid Packet->pkt usage, GET_PKT_DATA macro must be used at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
-
-@pktlenfield@
-identifier func !~ "^PacketCopyDataOffset$";
-Packet *p;
-position p1;
-@@
-
-func(...) {
-<...
-p->pktlen@p1
-...>
-}
-
-@ script:python @
-p1 << pktlenfield.p1;
-@@
-
-print "Invalid Packet->pktlen usage, GET_PKT_LEN macro must be used at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/action-pkt.cocci b/framework/src/suricata/qa/coccinelle/action-pkt.cocci
deleted file mode 100644
index 1a66721a..00000000
--- a/framework/src/suricata/qa/coccinelle/action-pkt.cocci
+++ /dev/null
@@ -1,15 +0,0 @@
-@action@
-typedef Packet;
-Packet *p;
-position p1;
-@@
-
-p->action@p1
-
-@ script:python @
-p1 << action.p1;
-@@
-
-print "Invalid usage of p->action, please use macro at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/banned-functions.cocci b/framework/src/suricata/qa/coccinelle/banned-functions.cocci
deleted file mode 100644
index 5913521c..00000000
--- a/framework/src/suricata/qa/coccinelle/banned-functions.cocci
+++ /dev/null
@@ -1,15 +0,0 @@
-@banned@
-identifier i;
-position p1;
-@@
-
-\(strtok@i\|sprintf@i\|strcat@i\|strcpy@i\|strncpy@i\|strncat@i\|strndup@i\|strchrdup@i\)(...)@p1
-
-@script:python@
-p1 << banned.p1;
-i << banned.i;
-@@
-
-print("Banned function '%s' used at %s:%s" % (i, p1[0].file, p1[0].line))
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/direct-packet.cocci b/framework/src/suricata/qa/coccinelle/direct-packet.cocci
deleted file mode 100644
index dbe1f98b..00000000
--- a/framework/src/suricata/qa/coccinelle/direct-packet.cocci
+++ /dev/null
@@ -1,15 +0,0 @@
-@directpacket@
-identifier p;
-typedef Packet;
-position p1;
-@@
-
-Packet p@p1;
-
-@ script:python @
-p1 << directpacket.p1;
-@@
-
-print "Invalid Packet definition, explicit allocation must be used at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/malloc-error-check.cocci b/framework/src/suricata/qa/coccinelle/malloc-error-check.cocci
deleted file mode 100644
index b245189a..00000000
--- a/framework/src/suricata/qa/coccinelle/malloc-error-check.cocci
+++ /dev/null
@@ -1,63 +0,0 @@
-@malloced@
-expression x;
-position p1;
-identifier func =~ "(SCMalloc|SCStrdup|SCCalloc|SCMallocAligned|SCRealloc)";
-@@
-
-x@p1 = func(...)
-
-@inlinetested@
-expression x, E;
-statement S;
-position malloced.p1;
-identifier func =~ "(SCMalloc|SCStrdup|SCCalloc|SCMallocAligned|SCRealloc)";
-@@
-
-(
-if ((x@p1 = func(...)) == NULL) S
-|
-if (E && (x@p1 = func(...)) == NULL) S
-)
-
-@realloc exists@
-position malloced.p1;
-expression x, E1;
-identifier func =~ "(SCMalloc|SCCalloc|SCMallocAligned)";
-@@
-
-x@p1 = func(...)
-... when != x
-x = SCRealloc(x, E1)
-
-@istested depends on !realloc exists@
-expression x, E1;
-position malloced.p1;
-statement S1, S2;
-identifier func =~ "(SCMalloc|SCStrdup|SCCalloc|SCMallocAligned|SCRealloc)";
-@@
-
-x@p1 = func(...)
-... when != x
-(
-if (unlikely(x == NULL)) S1
-|
-if (unlikely(x == NULL)) S1 else S2
-|
-if (likely(x != NULL)) S1
-|
-if (x == NULL) S1
-|
-if (x != NULL) S1 else S2
-|
-if (x && E1) S1
-|
-BUG_ON(x == NULL)
-)
-
-
-@script:python depends on !realloc && !istested && !inlinetested@
-p1 << malloced.p1;
-@@
-print "Structure malloced at %s:%s but error is not checked." % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/pktnotset-packet.cocci b/framework/src/suricata/qa/coccinelle/pktnotset-packet.cocci
deleted file mode 100644
index ab6a98c1..00000000
--- a/framework/src/suricata/qa/coccinelle/pktnotset-packet.cocci
+++ /dev/null
@@ -1,29 +0,0 @@
-@zeroed@
-typedef Packet;
-typedef uint8_t;
-Packet *p;
-position p1;
-@@
-
-memset(p@p1, 0, ...);
-
-@isset@
-Packet *p;
-position zeroed.p1;
-@@
-
-memset(p@p1, 0, ...);
-... when != p
-(
-p->pkt
-|
-PACKET_INITIALIZE(p)
-)
-
-@script:python depends on !isset@
-p1 << zeroed.p1;
-@@
-
-print "Packet zeroed at %s:%s but pkt field is not set afterward." % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/realloc.cocci b/framework/src/suricata/qa/coccinelle/realloc.cocci
deleted file mode 100644
index 0b828807..00000000
--- a/framework/src/suricata/qa/coccinelle/realloc.cocci
+++ /dev/null
@@ -1,18 +0,0 @@
-@realloc@
-expression x, E;
-type ty;
-position p1;
-@@
-
-(
-x@p1 = SCRealloc(x, E)
-|
-x@p1 = (ty *) SCRealloc(x, E)
-)
-
-@script:python@
-p1 << realloc.p1;
-@@
-print "Structure reallocated at %s:%s but original pointer is lost and not freed in case of error." % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/run_check.sh b/framework/src/suricata/qa/coccinelle/run_check.sh
deleted file mode 100755
index 79ec9cc6..00000000
--- a/framework/src/suricata/qa/coccinelle/run_check.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/sh
-
-if [ $1 ]; then
- case $1 in
- *[ch])
- LIST=$@;
- ;;
- *..*)
- LIST=$(git diff --pretty="format:" --name-only $1 | grep -E '[ch]$')
- PREFIX=$(git rev-parse --show-toplevel)/
- ;;
- *)
- LIST=$(git show --pretty="format:" --name-only $1 | grep -E '[ch]$')
- PREFIX=$(git rev-parse --show-toplevel)/
- ;;
- esac
-else
- LIST=$(git ls-tree -r --name-only --full-tree HEAD src/ | grep -E '*.c$')
- PREFIX=$(git rev-parse --show-toplevel)/
-fi
-
-if [ -z "$CONCURRENCY_LEVEL" ]; then
- CONCURRENCY_LEVEL=1
- echo "No concurrency"
-else
- echo "Using concurrency level $CONCURRENCY_LEVEL"
-fi
-
-for SMPL in $(git rev-parse --show-toplevel)/qa/coccinelle/*.cocci; do
- echo "Testing cocci file: $SMPL"
- if command -v parallel >/dev/null; then
- echo -n $LIST | parallel -d ' ' -j $CONCURRENCY_LEVEL spatch --very-quiet -sp_file $SMPL --undefined UNITTESTS $PREFIX{} || if [ -z "$NOT_TERMINAL" ]; then exit 1; fi
- else
- for FILE in $LIST ; do
- spatch --very-quiet -sp_file $SMPL --undefined UNITTESTS $PREFIX$FILE || if [ -z "$NOT_TERMINAL" ]; then exit 1; fi
- done
- fi
-done
-
-exit 0
diff --git a/framework/src/suricata/qa/coccinelle/size_t.cocci b/framework/src/suricata/qa/coccinelle/size_t.cocci
deleted file mode 100644
index 4bd5b9f2..00000000
--- a/framework/src/suricata/qa/coccinelle/size_t.cocci
+++ /dev/null
@@ -1,44 +0,0 @@
-@sizet@
-size_t p;
-identifier func =~ "^(sprintf|printf|SCLog.*)$";
-identifier funcn =~ "^.*nprintf$";
-position p1;
-typedef uint16_t;
-typedef uint32_t;
-typedef uint64_t;
-expression E1, E2;
-@@
-
-(
-func(..., p, ...)@p1;
-|
-func(..., (int) p, ...)@p1;
-|
-func(..., (unsigned int) p, ...)@p1;
-|
-func(..., (uint16_t) p, ...)@p1;
-|
-func(..., (uint32_t) p, ...)@p1;
-|
-func(..., (uint64_t) p, ...)@p1;
-|
-funcn(E1, E2,..., p, ...)@p1;
-|
-funcn(E1, E2,..., (int) p, ...)@p1;
-|
-funcn(E1, E2,..., (unsigned int) p, ...)@p1;
-|
-funcn(E1, E2,..., (uint16_t) p, ...)@p1;
-|
-funcn(E1, E2,..., (uint32_t) p, ...)@p1;
-|
-funcn(E1, E2,..., (uint64_t) p, ...)@p1;
-)
-
-@ script:python @
-p1 << sizet.p1;
-@@
-
-print "Invalid printf with size_t (not casted to uintmax_t) at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/struct-flags.cocci b/framework/src/suricata/qa/coccinelle/struct-flags.cocci
deleted file mode 100644
index 45fab734..00000000
--- a/framework/src/suricata/qa/coccinelle/struct-flags.cocci
+++ /dev/null
@@ -1,77 +0,0 @@
-@flags@
-SignatureHeader *struct0;
-identifier struct_flags0 =~ "^(?!SIG_FLAG).+";
-Signature *struct1;
-identifier struct_flags1 =~ "^(?!SIG_FLAG).+";
-Signature *struct2;
-identifier struct_flags2 =~ "^(?!SIG_FLAG_INIT_).+";
-Flow *struct3;
-identifier struct_flags3 =~ "^(?!FLOW_).+";
-TcpSegment *struct4;
-identifier struct_flags4 =~ "^(?!SEGMENTTCP_FLAG).+";
-TcpStream *struct5;
-identifier struct_flags5 =~ "^(?!STREAMTCP_STREAM_FLAG_).+";
-TcpSession *struct6;
-identifier struct_flags6 =~ "^(?!STREAMTCP_FLAG).+";
-Packet *struct7;
-identifier struct_flags7 =~ "^(?!FLOW_PKT_).+";
-position p1;
-@@
-
-(
-struct0->flags@p1 |= struct_flags0
-|
-struct0->flags@p1 & struct_flags0
-|
-struct0->flags@p1 &= ~struct_flags0
-|
-struct1->flags@p1 |= struct_flags1
-|
-struct1->flags@p1 & struct_flags1
-|
-struct1->flags@p1 &= ~struct_flags1
-|
-struct2->init_flags@p1 |= struct_flags2
-|
-struct2->init_flags@p1 & struct_flags2
-|
-struct2->init_flags@p1 &= ~struct_flags2
-|
-struct3->flags@p1 |= struct_flags3
-|
-struct3->flags@p1 & struct_flags3
-|
-struct3->flags@p1 &= ~struct_flags3
-|
-struct4->flags@p1 |= struct_flags4
-|
-struct4->flags@p1 & struct_flags4
-|
-struct4->flags@p1 &= ~struct_flags4
-|
-struct5->flags@p1 |= struct_flags5
-|
-struct5->flags@p1 & struct_flags5
-|
-struct5->flags@p1 &= ~struct_flags5
-|
-struct6->flags@p1 |= struct_flags6
-|
-struct6->flags@p1 & struct_flags6
-|
-struct6->flags@p1 &= ~struct_flags6
-|
-struct7->flowflags@p1 |= struct_flags7
-|
-struct7->flowflags@p1 & struct_flags7
-|
-struct7->flowflags@p1 &= ~struct_flags7
-)
-
-@script:python@
-p1 << flags.p1;
-@@
-
-print "Invalid usage of flags field at %s:%s, flags value is incorrect (wrong family)." % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/coccinelle/struct-flags.py b/framework/src/suricata/qa/coccinelle/struct-flags.py
deleted file mode 100755
index 3a91157b..00000000
--- a/framework/src/suricata/qa/coccinelle/struct-flags.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-import re
-from os import listdir
-
-SRC_DIR="../../src/"
-
-class Structure:
- def __init__(self, string):
- (self.struct, self.flags, self.values) = string.split(":")
-
-cmd = "grep -h coccinelle ../../src/*[ch] | sed -e 's/.*coccinelle: \(.*\) \*\//\1/'"
-
-struct_list = []
-
-dirList = listdir(SRC_DIR)
-for fname in dirList:
- if re.search("\.[ch]$", fname):
- for line in open(SRC_DIR + fname):
- if "coccinelle:" in line:
- m = re.search("coccinelle: (.*) \*\/", line)
- struct = Structure(m.group(1))
- struct_list.append(struct)
-
-header = "@flags@"
-body = []
-
-i = 0
-for struct in struct_list:
- header += """
-%s *struct%d;
-identifier struct_flags%d =~ "^(?!%s).+";""" % ( struct.struct, i, i, struct.values)
-
- body.append("""
-struct%d->%s@p1 |= struct_flags%d
-|
-struct%d->%s@p1 & struct_flags%d
-|
-struct%d->%s@p1 &= ~struct_flags%d
-""" % (i, struct.flags, i, i, struct.flags, i, i, struct.flags, i))
-
- i+=1
-
-print header
-print "position p1;"
-print "@@"
-print ""
-print "(" + "|".join(body) + ")"
-print ""
-print """@script:python@
-p1 << flags.p1;
-@@
-
-print "Invalid usage of flags field at %s:%s, flags value is incorrect (wrong family)." % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)"""
diff --git a/framework/src/suricata/qa/coccinelle/sz3.cocci b/framework/src/suricata/qa/coccinelle/sz3.cocci
deleted file mode 100644
index 37a8877b..00000000
--- a/framework/src/suricata/qa/coccinelle/sz3.cocci
+++ /dev/null
@@ -1,48 +0,0 @@
-//
-// Take size of pointed value, not pointer
-//
-// Target: Linux, Generic
-// Copyright: 2012 - LIP6/INRIA
-// License: Licensed under GPLv2 or any later version.
-// Author: Julia Lawall <Julia.Lawall@lip6.fr>
-// URL: http://coccinelle.lip6.fr/
-// URL: http://coccinellery.org/
-// Modified by Eric Leblond <eric@regit.org> for suricata test system
-
-@preuse@
-expression *e;
-type T;
-identifier f;
-position p1;
-@@
-
-f(...,
-sizeof(e@p1)
-,...,(T)e,...)
-
-@ script:python @
-p1 << preuse.p1;
-@@
-
-print "Size of pointed value not pointer used at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
-
-@postuse@
-expression *e;
-type T;
-identifier f;
-position p1;
-@@
-
-f(...,(T)e,...,
-sizeof(e@p1)
-,...)
-
-@ script:python @
-p1 << postuse.p1;
-@@
-
-print "Size of pointed value not pointer used at %s:%s" % (p1[0].file, p1[0].line)
-import sys
-sys.exit(1)
diff --git a/framework/src/suricata/qa/docker/buildbot.cfg b/framework/src/suricata/qa/docker/buildbot.cfg
deleted file mode 100644
index b2063ac9..00000000
--- a/framework/src/suricata/qa/docker/buildbot.cfg
+++ /dev/null
@@ -1,235 +0,0 @@
-# -*- python -*-
-# ex: set syntax=python:
-
-# This is a sample buildmaster config file. It must be installed as
-# 'master.cfg' in your buildmaster's base directory.
-
-# This is the dictionary that the buildmaster pays attention to. We also use
-# a shorter alias to save typing.
-c = BuildmasterConfig = {}
-
-####### BUILDSLAVES
-
-# The 'slaves' list defines the set of recognized buildslaves. Each element is
-# a BuildSlave object, specifying a unique slave name and password. The same
-# slave name and password must be configured on the slave.
-from buildbot.buildslave import BuildSlave
-c['slaves'] = [BuildSlave("buildslave", "Suridocker")]
-
-# 'slavePortnum' defines the TCP port to listen on for connections from slaves.
-# This must match the value configured into the buildslaves (with their
-# --master option)
-c['slavePortnum'] = 9989
-
-####### CHANGESOURCES
-
-# the 'change_source' setting tells the buildmaster how it should find out
-# about source code changes. Here we point to the buildbot clone of pyflakes.
-
-from buildbot.changes.gitpoller import GitPoller
-c['change_source'] = []
-c['change_source'].append(GitPoller(
- '/data/oisf/.git/',
- workdir='gitpoller-workdir', branches = ['master'],
- pollinterval=300, project='suricata'))
-
-####### SCHEDULERS
-
-# Configure the Schedulers, which decide how to react to incoming changes. In this
-# case, just kick off a 'runtests' build
-
-from buildbot.schedulers.basic import SingleBranchScheduler
-#from buildbot.schedulers.forcesched import ForceScheduler
-from buildbot.changes import filter
-c['schedulers'] = []
-c['schedulers'].append(SingleBranchScheduler(
- name="master",
- change_filter=filter.ChangeFilter(branch='master'),
- treeStableTimer=None,
- builderNames=["features","profiling","clang"]))
-
-#c['schedulers'].append(ForceScheduler(
-# name="force",
-# builderNames=["builds","debug"]))
-
-####### BUILDERS
-
-# The 'builders' list defines the Builders, which tell Buildbot how to perform a build:
-# what steps, and which slaves can execute them. Note that any particular build will
-# only take place on one slave.
-
-from buildbot.process.factory import BuildFactory
-#from buildbot.steps.source.git import Git
-from buildbot.steps.source import Git
-from buildbot.steps.shell import ShellCommand
-
-def SuriBuildFactory(repo='/data/oisf/'):
- factory = BuildFactory()
- factory.addStep(Git(repourl=repo, mode='copy'))
- factory.addStep(ShellCommand(command=["rm", "-rf", "libhtp"]))
- factory.addStep(ShellCommand(command=["git", "clone", "-b", "0.5.x", "/data/oisf/libhtp/.git/", "libhtp"]))
- return factory
-
-factory = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory.addStep(ShellCommand(command=["./autogen.sh"]))
-factory.addStep(ShellCommand(command=["./configure"]))
-factory.addStep(ShellCommand(command=["make"]))
-factory.addStep(ShellCommand(command=["make", "clean"]))
-#factory.addStep(ShellCommand(command=["make", "distcheck"]))
-
-factory_devel = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_devel.addStep(ShellCommand(command=["./autogen.sh"]))
-factory_devel.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-unittests"]))
-factory_devel.addStep(ShellCommand(command=["make"]))
-factory_devel.addStep(ShellCommand(command=["make", "clean"]))
-#factory_devel.addStep(ShellCommand(command=["make", "distcheck"], env={'DISTCHECK_CONFIGURE_FLAGS': "--enable-debug --enable-unittests"}))
-
-factory_profiling = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_profiling.addStep(ShellCommand(command=["./autogen.sh"]))
-factory_profiling.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-profiling","--enable-unittests"]))
-factory_profiling.addStep(ShellCommand(command=["make"]))
-factory_profiling.addStep(ShellCommand(command=["make", "clean"]))
-#factory_profiling.addStep(ShellCommand(command=["make", "distcheck"],env={'DISTCHECK_CONFIGURE_FLAGS': "--enable-debug --enable-profiling --enable-unittests"}))
-
-factory_clang = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_clang.addStep(ShellCommand(command=["./autogen.sh"]))
-#factory_clang.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-unittests","CC=clang","CFLAGS=-fsanitize=address"]))
-factory_clang.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-unittests","CC=clang","ac_cv_func_malloc_0_nonnull=yes","ac_cv_func_realloc_0_nonnull=yes"]))
-factory_clang.addStep(ShellCommand(command=["make"]))
-factory_clang.addStep(ShellCommand(command=["make", "clean"]))
-
-factory_clang_32 = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_clang_32.addStep(ShellCommand(command=["./autogen.sh"]))
-factory_clang_32.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-unittests","CC=clang","CFLAGS=-fsanitize=address","ac_cv_func_malloc_0_nonnull=yes","ac_cv_func_realloc_0_nonnull=yes"]))
-factory_clang_32.addStep(ShellCommand(command=["make"]))
-factory_clang_32.addStep(ShellCommand(command=["make", "clean"]))
-
-factory_features = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_features.addStep(ShellCommand(command=["./autogen.sh"]))
-factory_features.addStep(ShellCommand(command=["./configure","--enable-debug","--enable-unittests","--enable-nfqueue","--enable-nflog", "--enable-lua", "--enable-prelude"]))
-factory_features.addStep(ShellCommand(command=["make"]))
-factory_features.addStep(ShellCommand(command=["make", "clean"]))
-import psutil
-factory_features.addStep(ShellCommand(command=["make", "distcheck"],env={'DISTCHECK_CONFIGURE_FLAGS': "--enable-debug --enable-unittests --enable-nfqueue --enable-nflog --enable-lua --enable-prelude", "CONCURRENCY_LEVEL": str(psutil.cpu_count())}))
-
-import os
-PCAP_PATH='/data/oisf/qa/docker/pcaps/'
-(_, _, pcaps_list) = os.walk(PCAP_PATH).next()
-pcaps_list = [ os.path.join(PCAP_PATH, pcap) for pcap in pcaps_list if pcap.endswith(".pcap") ]
-
-factory_stress_pcap = SuriBuildFactory()
-# run the tests (note that this will require that 'trial' is installed)
-factory_stress_pcap.addStep(ShellCommand(command=["./autogen.sh"]))
-factory_stress_pcap.addStep(ShellCommand(command=["./configure","--enable-debug-validation"],env={"CFLAGS" : "-fsanitize=address -fno-omit-frame-pointer"}))
-factory_stress_pcap.addStep(ShellCommand(command=["make"]))
-factory_stress_pcap.addStep(ShellCommand(command=["sudo", "make","install"]))
-factory_stress_pcap.addStep(ShellCommand(command=["sudo", "rm", "-f", "/usr/local/etc/suricata/suricata.yaml"]))
-factory_stress_pcap.addStep(ShellCommand(command=["sudo", "make","install-conf"]))
-factory_stress_pcap.addStep(ShellCommand(command=["make","clean"]))
-factory_stress_pcap.addStep(ShellCommand(command=["sudo", "ldconfig"]))
-for pfile in pcaps_list:
- factory_stress_pcap.addStep(ShellCommand(command=["sudo", "/usr/local/bin/suricata","-r",pfile,"--init-errors-fatal","-S","/data/oisf/rules/http-events.rules"]))
-factory_stress_pcap.addStep(ShellCommand(command=["sudo", "rm", "-rf", "/usr/local/var/log/suricata/"]))
-
-from buildbot.config import BuilderConfig
-
-def SuriBuilderConfig(*args, **kwargs):
- if not kwargs.has_key('category'):
- kwargs['category']='default'
- return BuilderConfig(*args, **kwargs)
-
-c['builders'] = []
-
-c['builders'].append(
- SuriBuilderConfig(name="gcc",
- slavename="buildslave",
- factory=factory))
-c['schedulers'].append(SingleBranchScheduler(
- name="build",
- change_filter=filter.ChangeFilter(branch='master'),
- treeStableTimer=None,
- builderNames=["gcc"]))
-
-c['builders'].append(
- SuriBuilderConfig(name="debug",
- slavename="buildslave",
- factory=factory_devel))
-c['schedulers'].append(SingleBranchScheduler(
- name="debug",
- change_filter=filter.ChangeFilter(branch='master'),
- treeStableTimer=None,
- builderNames=["debug"]))
-
-c['builders'].append(
- SuriBuilderConfig(name="profiling",
- slavename="buildslave",
- factory=factory_profiling))
-c['builders'].append(
- SuriBuilderConfig(name="clang",
- slavename="buildslave",
- factory=factory_clang_32))
-c['builders'].append(
- SuriBuilderConfig(name="features",
- slavename="buildslave",
- factory=factory_features))
-c['builders'].append(
- SuriBuilderConfig(name="pcaps",
- slavename="buildslave",
- factory=factory_stress_pcap))
-
-from buildbot import locks
-build_lock = locks.SlaveLock("slave_builds", maxCount = 1)
-
-
-from buildbot.schedulers.forcesched import *
-c['schedulers'].append(ForceScheduler(name="force", builderNames = [ builder.getConfigDict()['name'] for builder in c['builders'] ]))
-
-c['status'] = []
-
-from buildbot.status import html
-from buildbot.status.web import authz, auth
-
-authz_cfg=authz.Authz(
- # change any of these to True to enable; see the manual for more
- # options
- #auth=auth.BasicAuth(users),
- gracefulShutdown = False,
- forceBuild = True, # use this to test your slave once it is set up
- forceAllBuilds = True,
- pingBuilder = True,
- stopBuild = True,
- stopAllBuilds = True,
- cancelPendingBuild = True,
-)
-c['status'].append(html.WebStatus(http_port=8010, authz=authz_cfg))
-
-####### PROJECT IDENTITY
-
-# the 'title' string will appear at the top of this buildbot
-# installation's html.WebStatus home page (linked to the
-# 'titleURL') and is embedded in the title of the waterfall HTML page.
-
-c['title'] = "Suricata"
-c['titleURL'] = "https://redmine.openinfosecfoundation.org/projects/suricata"
-
-# the 'buildbotURL' string should point to the location where the buildbot's
-# internal web server (usually the html.WebStatus page) is visible. This
-# typically uses the port number set in the Waterfall 'status' entry, but
-# with an externally-visible host name which the buildbot cannot figure out
-# without some help.
-
-c['buildbotURL'] = "http://localhost:8010/"
-
-####### DB URL
-
-c['db'] = {
- # This specifies what database buildbot uses to store its state. You can leave
- # this at its default for all but the largest installations.
- 'db_url' : "sqlite:///state.sqlite",
-}
diff --git a/framework/src/suricata/qa/docker/pcaps/tls.pcap b/framework/src/suricata/qa/docker/pcaps/tls.pcap
deleted file mode 100644
index 8aca2186..00000000
--- a/framework/src/suricata/qa/docker/pcaps/tls.pcap
+++ /dev/null
Binary files differ
diff --git a/framework/src/suricata/qa/drmemory.suppress b/framework/src/suricata/qa/drmemory.suppress
deleted file mode 100644
index fd79b022..00000000
--- a/framework/src/suricata/qa/drmemory.suppress
+++ /dev/null
@@ -1,16 +0,0 @@
-UNADDRESSABLE ACCESS
-name=magic issue, not important
-libc.so.6!__strnlen_sse2
-libc.so.6!_IO_vfprintf_internal
-libc.so.6!__GI___vasprintf_chk
-libc.so.6!__asprintf_chk
-libmagic.so.1!?
-libmagic.so.1!file_apprentice
-libmagic.so.1!magic_load
-suricata!main
-
-LEAK
-name=useless warning, likely DrMemory bug
-*
-libpcre.so.3!pcre_get_substring
-
diff --git a/framework/src/suricata/qa/gnuplot/plot-csv-large-all.sh b/framework/src/suricata/qa/gnuplot/plot-csv-large-all.sh
deleted file mode 100755
index f3484fd7..00000000
--- a/framework/src/suricata/qa/gnuplot/plot-csv-large-all.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-#
-#
-if [ "$1" = "" ]; then
- echo "call with location of csv file."
- exit 1;
-fi
-
-gnuplot << EOF
-set datafile separator ","
-set terminal png size 1024,768
-set output "$1.png"
-set title "$1 ticks"
-set key autotitle columnhead
-set yrange [:]
-set xrange [:]
-set logscale y
-#set pointsize 4
-plot "$1" using $2 with $4, for [i in $3] '' using i with $4
-EOF
-RESULT=$?
-if [ "$RESULT" = "0" ]; then
- echo "PNG $1.png written"
-fi
diff --git a/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file-stream-vs-http.sh b/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file-stream-vs-http.sh
deleted file mode 100755
index 9942307a..00000000
--- a/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file-stream-vs-http.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-#
-#
-if [ "$1" = "" ]; then
- echo "call with location of csv file."
- exit 1;
-fi
-
-DRAW="dots"
-gnuplot << EOF
-set datafile separator ","
-set terminal png size 1024,768
-set output "$1.png"
-set title "$1 ticks"
-set key autotitle columnhead
-set yrange [:]
-set xrange [:]
-set logscale y
-plot "$1" using :32 with $DRAW, \
- "" using :54 with $DRAW, \
- "" using :42 with $DRAW
-EOF
-RESULT=$?
-if [ "$RESULT" = "0" ]; then
- echo "PNG $1.png written"
-fi
diff --git a/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file.sh b/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file.sh
deleted file mode 100755
index 08c601e3..00000000
--- a/framework/src/suricata/qa/gnuplot/plot-csv-large-pcap-file.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#
-#
-if [ "$1" = "" ]; then
- echo "call with location of csv file."
- exit 1;
-fi
-
-DRAW="dots"
-gnuplot << EOF
-set datafile separator ","
-set terminal png size 1024,768
-set output "$1.png"
-set title "$1 ticks"
-set key autotitle columnhead
-set yrange [:]
-set xrange [:]
-set logscale y
-plot "$1" using :4 with $DRAW, \
- "" using :11 with $DRAW, \
- "" using :14 with $DRAW, \
- "" using :15 with $DRAW, \
- "" using :20 with $DRAW, \
- "" using :28 with $DRAW, \
- "" using :32 with $DRAW, \
- "" using :40 with $DRAW
-EOF
-RESULT=$?
-if [ "$RESULT" = "0" ]; then
- echo "PNG $1.png written"
-fi
diff --git a/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file-stream-vs-http.sh b/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file-stream-vs-http.sh
deleted file mode 100755
index 81a53136..00000000
--- a/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file-stream-vs-http.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-#
-#
-if [ "$1" = "" ]; then
- echo "call with location of csv file."
- exit 1;
-fi
-
-DRAW="boxes"
-gnuplot << EOF
-set datafile separator ","
-set terminal png size 1024,768
-set output "$1.png"
-set title "$1 ticks"
-set key autotitle columnhead
-set yrange [:]
-set xrange [:]
-set logscale y
-plot "$1" using :32 with $DRAW, \
- "" using :54 with $DRAW, \
- "" using :42 with points
-EOF
-RESULT=$?
-if [ "$RESULT" = "0" ]; then
- echo "PNG $1.png written"
-fi
diff --git a/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file.sh b/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file.sh
deleted file mode 100755
index 01c5ba10..00000000
--- a/framework/src/suricata/qa/gnuplot/plot-csv-small-pcap-file.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-#
-#
-if [ "$1" = "" ]; then
- echo "call with location of csv file."
- exit 1;
-fi
-
-DRAW="lines"
-gnuplot << EOF
-set datafile separator ","
-set terminal png size 1024,768
-set output "$1.png"
-set title "$1 ticks"
-set key autotitle columnhead
-set yrange [:]
-set xrange [:]
-set logscale y
-plot "$1" using :4 with $DRAW, \
- "" using :11 with $DRAW, \
- "" using :14 with $DRAW, \
- "" using :15 with $DRAW, \
- "" using :20 with $DRAW, \
- "" using :28 with $DRAW, \
- "" using :32 with $DRAW, \
- "" using :40 with $DRAW
-EOF
-RESULT=$?
-if [ "$RESULT" = "0" ]; then
- echo "PNG $1.png written"
-fi
diff --git a/framework/src/suricata/qa/prscript.py b/framework/src/suricata/qa/prscript.py
deleted file mode 100755
index 02dd0ad9..00000000
--- a/framework/src/suricata/qa/prscript.py
+++ /dev/null
@@ -1,360 +0,0 @@
-#!/usr/bin/env python
-# Copyright(C) 2013, 2014, 2015 Open Information Security Foundation
-
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, version 2 of the License.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
-
-# Note to Docker users:
-# If you are running SELinux in enforced mode, you may want to run
-# chcon -Rt svirt_sandbox_file_t SURICATA_ROOTSRC_DIR
-# or the buildbot will not be able to access to the data in /data/oisf
-# and the git step will fail.
-
-import urllib, urllib2, cookielib
-try:
- import simplejson as json
-except:
- import json
-import time
-import argparse
-import sys
-import os
-import copy
-
-GOT_NOTIFY = True
-try:
- import pynotify
-except:
- GOT_NOTIFY = False
-
-GOT_DOCKER = True
-try:
- from docker import Client
-except:
- GOT_DOCKER = False
-# variables
-# - github user
-# - buildbot user and password
-
-BASE_URI="https://buildbot.openinfosecfoundation.org/"
-GITHUB_BASE_URI = "https://api.github.com/repos/"
-GITHUB_MASTER_URI = "https://api.github.com/repos/inliniac/suricata/commits?sha=master"
-
-if GOT_DOCKER:
- parser = argparse.ArgumentParser(prog='prscript', description='Script checking validity of branch before PR')
-else:
- parser = argparse.ArgumentParser(prog='prscript', description='Script checking validity of branch before PR',
- epilog='You need to install Python docker module to enable docker container handling options.')
-parser.add_argument('-u', '--username', dest='username', help='github and buildbot user')
-parser.add_argument('-p', '--password', dest='password', help='buildbot password')
-parser.add_argument('-c', '--check', action='store_const', const=True, help='only check last build', default=False)
-parser.add_argument('-v', '--verbose', action='store_const', const=True, help='verbose output', default=False)
-parser.add_argument('--norebase', action='store_const', const=True, help='do not test if branch is in sync with master', default=False)
-parser.add_argument('-r', '--repository', dest='repository', default='suricata', help='name of suricata repository on github')
-parser.add_argument('-l', '--local', action='store_const', const=True, help='local testing before github push', default=False)
-if GOT_NOTIFY:
- parser.add_argument('-n', '--notify', action='store_const', const=True, help='send desktop notification', default=False)
-
-docker_deps = ""
-if not GOT_DOCKER:
- docker_deps = " (disabled)"
-parser.add_argument('-d', '--docker', action='store_const', const=True, help='use docker based testing', default=False)
-parser.add_argument('-C', '--create', action='store_const', const=True, help='create docker container' + docker_deps, default=False)
-parser.add_argument('-s', '--start', action='store_const', const=True, help='start docker container' + docker_deps, default=False)
-parser.add_argument('-S', '--stop', action='store_const', const=True, help='stop docker container' + docker_deps, default=False)
-parser.add_argument('-R', '--rm', action='store_const', const=True, help='remove docker container and image' + docker_deps, default=False)
-parser.add_argument('branch', metavar='branch', help='github branch to build', nargs='?')
-args = parser.parse_args()
-username = args.username
-password = args.password
-cookie = None
-
-if args.create or args.start or args.stop:
- if GOT_DOCKER:
- args.docker = True
- args.local = True
- else:
- print "You need to install python docker to use docker handling features."
- sys.exit(-1)
-
-if not args.local:
- if not args.username:
- print "You need to specify a github username (-u option) for this mode (or use -l to disable)"
- sys.exit(-1)
-
-if args.docker:
- BASE_URI="http://localhost:8010/"
- BUILDERS_LIST = ["gcc", "clang", "debug", "features", "profiling", "pcaps"]
-else:
- BUILDERS_LIST = [username, username + "-pcap"]
-
-BUILDERS_URI=BASE_URI+"builders/"
-JSON_BUILDERS_URI=BASE_URI+"json/builders/"
-
-if GOT_NOTIFY:
- if args.notify:
- pynotify.init("PRscript")
-
-def SendNotification(title, text):
- if not GOT_NOTIFY:
- return
- if not args.notify:
- return
- n = pynotify.Notification(title, text)
- n.show()
-
-def TestRepoSync(branch):
- request = urllib2.Request(GITHUB_MASTER_URI)
- page = urllib2.urlopen(request)
- json_result = json.loads(page.read())
- sha_orig = json_result[0]["sha"]
- request = urllib2.Request(GITHUB_BASE_URI + username + "/" + args.repository + "/commits?sha=" + branch + "&per_page=100")
- page = urllib2.urlopen(request)
- json_result = json.loads(page.read())
- found = -1
- for commit in json_result:
- if commit["sha"] == sha_orig:
- found = 1
- break
- return found
-
-def OpenBuildbotSession():
- auth_params = { 'username':username,'passwd':password, 'name':'login'}
- cookie = cookielib.LWPCookieJar()
- params = urllib.urlencode(auth_params)
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
- urllib2.install_opener(opener)
- request = urllib2.Request(BASE_URI + 'login', params)
- page = urllib2.urlopen(request)
- return cookie
-
-
-def SubmitBuild(branch, extension = "", builder_name = None):
- raw_params = {'branch':branch,'reason':'Testing ' + branch, 'name':'force_build', 'forcescheduler':'force'}
- params = urllib.urlencode(raw_params)
- if not args.docker:
- opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cookie))
- urllib2.install_opener(opener)
- if builder_name == None:
- builder_name = username + extension
- request = urllib2.Request(BUILDERS_URI + builder_name + '/force', params)
- page = urllib2.urlopen(request)
-
- result = page.read()
- if args.verbose:
- print "=== response ==="
- print result
- print "=== end of response ==="
- if args.docker:
- if "<h2>Pending Build Requests:</h2>" in result:
- print "Build '" + builder_name + "' submitted"
- return 0
- else:
- return -1
- if "Current Builds" in result:
- print "Build '" + builder_name + "' submitted"
- return 0
- else:
- return -1
-
-# TODO honor the branch argument
-def FindBuild(branch, extension = "", builder_name = None):
- if builder_name == None:
- request = urllib2.Request(JSON_BUILDERS_URI + username + extension + '/')
- else:
- request = urllib2.Request(JSON_BUILDERS_URI + builder_name + '/')
- page = urllib2.urlopen(request)
- json_result = json.loads(page.read())
- # Pending build is unnumbered
- if json_result["pendingBuilds"]:
- return -1
- if json_result["currentBuilds"]:
- return json_result["currentBuilds"][0]
- if json_result["cachedBuilds"]:
- return json_result["cachedBuilds"][-1]
- return -2
-
-def GetBuildStatus(builder, buildid, extension="", builder_name = None):
- if builder_name == None:
- builder_name = username + extension
- # https://buildbot.suricata-ids.org/json/builders/build%20deb6/builds/11
- request = urllib2.Request(JSON_BUILDERS_URI + builder_name + '/builds/' + str(buildid))
- page = urllib2.urlopen(request)
- result = page.read()
- if args.verbose:
- print "=== response ==="
- print result
- print "=== end of response ==="
- json_result = json.loads(result)
- if json_result["currentStep"]:
- return 1
- if 'successful' in json_result["text"]:
- return 0
- return -1
-
-def WaitForBuildResult(builder, buildid, extension="", builder_name = None):
- # fetch result every 10 secs till task is over
- if builder_name == None:
- builder_name = username + extension
- res = 1
- while res == 1:
- res = GetBuildStatus(username,buildid, builder_name = builder_name)
- if res == 1:
- time.sleep(10)
-
- # return the result
- if res == 0:
- print "Build successful for " + builder_name
- else:
- print "Build failure for " + builder_name + ": " + BUILDERS_URI + builder_name + '/builds/' + str(buildid)
- return res
-
- # check that github branch and inliniac master branch are sync
-if not args.local and TestRepoSync(args.branch) == -1:
- if args.norebase:
- print "Branch " + args.branch + " is not in sync with inliniac's master branch. Continuing due to --norebase option."
- else:
- print "Branch " + args.branch + " is not in sync with inliniac's master branch. Rebase needed."
- sys.exit(-1)
-
-def CreateContainer():
- cli = Client()
- # FIXME check if existing
- print "Pulling docking image, first run should take long"
- cli.pull('regit/suri-buildbot')
- cli.create_container(name='suri-buildbot', image='regit/suri-buildbot', ports=[8010, 22], volumes=['/data/oisf', '/data/buildbot/master/master.cfg'])
- sys.exit(0)
-
-def StartContainer():
- cli = Client()
- suri_src_dir = os.path.split(os.path.dirname(os.path.realpath(__file__)))[0]
- print "Using base src dir: " + suri_src_dir
- cli.start('suri-buildbot', port_bindings={8010:8010, 22:None}, binds={suri_src_dir: { 'bind': '/data/oisf', 'ro': True}, os.path.join(suri_src_dir,'qa','docker','buildbot.cfg'): { 'bind': '/data/buildbot/master/master.cfg', 'ro': True}} )
- sys.exit(0)
-
-def StopContainer():
- cli = Client()
- cli.stop('suri-buildbot')
- sys.exit(0)
-
-def RmContainer():
- cli = Client()
- try:
- cli.remove_container('suri-buildbot')
- except:
- print "Unable to remove suri-buildbot container"
- pass
- try:
- cli.remove_image('regit/suri-buildbot:latest')
- except:
- print "Unable to remove suri-buildbot images"
- pass
- sys.exit(0)
-
-if GOT_DOCKER:
- if args.create:
- CreateContainer()
- if args.start:
- StartContainer()
- if args.stop:
- StopContainer()
- if args.rm:
- RmContainer()
-
-if not args.branch:
- print "You need to specify a branch for this mode"
- sys.exit(-1)
-
-# submit buildbot form to build current branch on the devel builder
-if not args.check:
- if not args.docker:
- cookie = OpenBuildbotSession()
- if cookie == None:
- print "Unable to connect to buildbot with provided credentials"
- sys.exit(-1)
- for build in BUILDERS_LIST:
- res = SubmitBuild(args.branch, builder_name = build)
- if res == -1:
- print "Unable to start build. Check command line parameters"
- sys.exit(-1)
-
-buildids = {}
-
-if args.docker:
- time.sleep(2)
-
-# get build number and exit if we don't have
-for build in BUILDERS_LIST:
- buildid = FindBuild(args.branch, builder_name = build)
- if buildid == -1:
- print "Pending build tracking is not supported. Follow build by browsing " + BUILDERS_URI + build
- elif buildid == -2:
- print "No build found for " + BUILDERS_URI + build
- sys.exit(0)
- else:
- if not args.docker:
- print "You can watch build progress at " + BUILDERS_URI + build + "/builds/" + str(buildid)
- buildids[build] = buildid
-
-if args.docker:
- print "You can watch build progress at " + BASE_URI + "waterfall"
-
-if len(buildids):
- print "Waiting for build completion"
-else:
- sys.exit(0)
-
-res = 0
-if args.docker:
- while len(buildids):
- up_buildids = copy.copy(buildids)
- for build in buildids:
- ret = GetBuildStatus(build, buildids[build], builder_name = build)
- if ret == -1:
- res = -1
- up_buildids.pop(build, None)
- if len(up_buildids):
- remains = " (remaining builds: " + ', '.join(up_buildids.keys()) + ")"
- else:
- remains = ""
- print "Build failure for " + build + ": " + BUILDERS_URI + build + '/builds/' + str(buildids[build]) + remains
- elif ret == 0:
- up_buildids.pop(build, None)
- if len(up_buildids):
- remains = " (remaining builds: " + ', '.join(up_buildids.keys()) + ")"
- else:
- remains = ""
- print "Build successful for " + build + remains
- time.sleep(5)
- buildids = up_buildids
- if res == -1:
- SendNotification("PRscript failure", "Some builds have failed. Check <a href='" + BASE_URI + "waterfall'>waterfall</a> for results.")
- sys.exit(-1)
- else:
- print "PRscript completed successfully"
- SendNotification("PRscript success", "Congrats! All builds have passed.")
- sys.exit(0)
-else:
- for build in buildids:
- res = WaitForBuildResult(build, buildids[build], builder_name = build)
-
-if res == 0:
- if not args.norebase and not args.docker:
- print "You can copy/paste following lines into github PR"
- for build in buildids:
- print "- PR " + build + ": " + BUILDERS_URI + build + "/builds/" + str(buildids[build])
- SendNotification("OISF PRscript success", "Congrats! All builds have passed.")
- sys.exit(0)
-else:
- SendNotification("OISF PRscript failure", "Some builds have failed. Check <a href='" + BASE_URI + "waterfall'>waterfall</a> for results.")
- sys.exit(-1)
diff --git a/framework/src/suricata/qa/sock_to_gzip_file.py b/framework/src/suricata/qa/sock_to_gzip_file.py
deleted file mode 100755
index 4c51782e..00000000
--- a/framework/src/suricata/qa/sock_to_gzip_file.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/python
-#I love the python Power Glove. It's so bad!
-#Usage: sudo -u suricata ./sock_to_gzip_file.py --output-file="http.log.gz" --listen-sock="http.log.sock"
-
-import socket,os
-import gzip
-import sys
-from optparse import OptionParser
-
-if __name__ == "__main__":
- parser = OptionParser()
- #Path to the socket
- parser.add_option("--listen-sock", dest="lsock", type="string", help="Path to the socket we will listen on.")
- #Path to gzip file we will write
- parser.add_option("--output-file", dest="output", type="string", help="Path to file name to output gzip file we will write to.")
-
- #parse the opts
- (options, args) = parser.parse_args()
-
- options.usage = "example: sudo -u suricata ./sock_to_gzip_file.py --output-file=\"http.log.gz\" --listen-sock=\"http.log.sock\"\n"
- #Open the output file
- if options.output:
- try:
- f = gzip.open(options.output, 'wb')
- except Exception,e:
- print("Error: could not open output file %s:\n%s\n", options.output, e)
- sys.exit(-1)
- else:
- print("Error: --output-file option required and was not specified\n%s" % (options.usage))
- sys.exit(-1)
-
- #Open our socket and bind
- if options.lsock:
- if os.path.exists(options.lsock):
- try:
- os.remove(options.lsock)
- except OSError:
- pass
- try:
- s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- s.bind(options.lsock)
- s.listen(1)
- conn, addr = s.accept()
- except Exception,e:
- print("Error: Failed to bind socket %s\n%s\n", options.lsock, e)
- sys.exit(-1)
- else:
- print("Error: --listen-sock option required and was not specified\n%s" % (options.usage))
- sys.exit(-1)
-
- #Read data from the socket and write to the file
- while 1:
- data = conn.recv(1024)
- if not data: break
- f.write(data)
- conn.close()
- f.close()
diff --git a/framework/src/suricata/qa/travis-libhtp.sh b/framework/src/suricata/qa/travis-libhtp.sh
deleted file mode 100755
index febe1fe7..00000000
--- a/framework/src/suricata/qa/travis-libhtp.sh
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/sh
-set -ex
-git clone https://github.com/ironbee/libhtp -b 0.5.x
diff --git a/framework/src/suricata/qa/valgrind.suppress b/framework/src/suricata/qa/valgrind.suppress
deleted file mode 100644
index 3d2aebdf..00000000
--- a/framework/src/suricata/qa/valgrind.suppress
+++ /dev/null
@@ -1,69 +0,0 @@
-{
- Known issue with libmagic
- Memcheck:Addr1
- obj:/usr/lib/x86_64-linux-gnu/libmagic.so.1.0.0
- fun:file_softmagic
- fun:file_buffer
- fun:magic_buffer
- fun:MagicGlobalLookup
- fun:MagicDetectTest10ValgrindError
- fun:UtRunTests
- fun:RunUnittests
- fun:main
-}
-{
- Known issue with libmagic on Ubuntu 14.04-64bit
- Memcheck:Addr1
- obj:/usr/lib/x86_64-linux-gnu/libmagic.so.1.0.0
- obj:/usr/lib/x86_64-linux-gnu/libmagic.so.1.0.0
- obj:/usr/lib/x86_64-linux-gnu/libmagic.so.1.0.0
- obj:/usr/lib/x86_64-linux-gnu/libmagic.so.1.0.0
- fun:magic_buffer
- fun:MagicGlobalLookup
- fun:MagicDetectTest10ValgrindError
- fun:UtRunTests
- fun:RunUnittests
- fun:main
-}
-{
- Known warning, see Bug 1083
- Memcheck:Param
- socketcall.setsockopt(optval)
- fun:setsockopt
- fun:pfring_mod_set_bpf_filter
- fun:ReceivePfringThreadInit
- fun:TmThreadsSlotPktAcqLoop
- fun:start_thread
- fun:clone
-}
-{
- Known warning, see Bug 1084
- Memcheck:Leak
- fun:malloc
- obj:/usr/lib/x86_64-linux-gnu/libpcap.so.1.1.1
- fun:pcap_compile
- fun:pcap_compile_nopcap
- fun:pfring_mod_set_bpf_filter
- fun:ReceivePfringThreadInit
- fun:TmThreadsSlotPktAcqLoop
- fun:start_thread
- fun:clone
-}
-{
- Warning on ARM, not Suricata related
- Memcheck:Addr4
- fun:strlen
- fun:_dl_open
- fun:do_dlopen
- fun:_dl_catch_error
- fun:dlerror_run
- fun:__libc_dlopen_mode
- fun:__nss_lookup_function
- fun:__nss_lookup
- fun:getprotobyname_r@@GLIBC_2.4
- fun:getprotobyname
- fun:DetectIPProtoParse
- fun:DetectIPProtoTestParse02
-}
-
-
diff --git a/framework/src/suricata/qa/wirefuzz.pl b/framework/src/suricata/qa/wirefuzz.pl
deleted file mode 100755
index 63708f0b..00000000
--- a/framework/src/suricata/qa/wirefuzz.pl
+++ /dev/null
@@ -1,645 +0,0 @@
-#!/usr/bin/perl -w
-#Author:William Metcalf <william.metcalf@gmail.com>
-#File:wirefuzz.pl
-
-#Copyright (C) 2010 Open Information Security Foundation
-
-#You can copy, redistribute or modify this Program under the terms of
-#the GNU General Public License version 2 as published by the Free
-#Software Foundation.
-#
-#This program is distributed in the hope that it will be useful,
-#but WITHOUT ANY WARRANTY; without even the implied warranty of
-#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-#GNU General Public License for more details.
-#
-#You should have received a copy of the GNU General Public License
-#version 2 along with this program; if not, write to the Free Software
-#Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
-#02110-1301, USA.
-
-#This script is useful if you want to fuzz and or otherwise try to make suricata explode during decoding/proto parsing using saved pcaps.
-#It is simply a reimplimentation of the technique described here, hence the name:
-#http://wiki.wireshark.org/FuzzTesting
-#
-#Options for getting thre required perl modules:
-#Ubuntu 9.10
-#sudo apt-get install libdevel-gdb-perl libcapture-tiny-perl
-#
-#RedHatES/CentOS 5
-#yum -y install cpanspec perl-Module-Build
-#cpanspec --packager OISF -v -s --follow Capture::Tiny
-#cpanspec --packager OISF -v -s --follow Devel::GDB
-#rpmbuild --rebuild *.src.rpm
-#rpm -ivh /usr/src/redhat/RPMS/noarch/perl-Devel-GDB*.rpm
-#rpm -ivh /usr/src/redhat/RPMS/noarch/perl-Capture-Tiny*.rpm
-#
-#Fedora Core 12
-#yum -y install perl-Capture-Tiny perl-Devel-GDB
-#
-#Other debain based versions, try the Ubunutu instructions if this doesn't work try the following.
-#sudo apt-get install dh-make-perl
-#mkdir fuzzmodules && cd fuzzmodules
-#dh-make-perl --cpan Devel-GDB --build
-#dh-make-perl --cpan Capture-Tiny --build
-#sudo dpkg -i *.deb
-
-#TODO: Figure out a better way to deal with signal handling.
-#TODO: Try to determine flow/stream that caused segv by extracting from the bt and extract it from the pcap.
-#TODO: E-mail notification on segv?
-#TODO: Parse Valgrind output and alert on errors
-
-use strict;
-use warnings;
-use Capture::Tiny 'capture';
-use List::Util 'shuffle';
-use Devel::GDB;
-use File::Find;
-use Getopt::Long;
-use File::Basename;
-
-#globals
-my %config;
-my @tmpfiles;
-my @files;
-my $suricatabin;
-my $loopnum;
-my $rules;
-my $logdir;
-my $configfile;
-my $editeratio;
-my $valgrindopt;
-my $shuffle;
-my $useltsuri;
-my $ltsuribin;
-my $core_dump;
-my $excluderegex;
-my %excludefuzz;
-my $timestamp;
-my $keeplogs;
-my $file_was_fuzzed = 0;
-
-Getopt::Long::Configure("prefix_pattern=(-|--)");
-GetOptions( \%config, qw(n=s r=s c=s e=s v=s p=s l=s s=s x=s k y z=s h help) );
-
-&parseopts();
-
-#Parse the options
-sub parseopts {
-
- #display help if asked
- if ( $config{h} || $config{help} ) {
- &printhelp();
- }
-
- #filemask of pcaps to read?
- if ( $config{r} ) {
- @tmpfiles = <$config{r}>;
- if(@tmpfiles eq 0){
- print "parseopts: Pcap filemask was invalid we couldn't find any matching files\n";
- exit;
- } else {
- #escapes for filenames
- foreach my $file (@tmpfiles) {
- $file =~ s/\(/\\(/g;
- $file =~ s/\)/\\)/g;
- $file =~ s/\&/\\&/g;
- }
- }
- }
- else {
- print "parseopts: Pcap filemask not specified or doesn't exist\n";
- &printhelp();
- }
-
- #filemask do we have a path to suricata bin?
- if ( $config{p} && -e $config{p} ) {
- $suricatabin = $config{p};
-
- #do wrapper script detection lt-suricata won't be created until first run but .libs/suricata should exist.
- if ( -T $suricatabin ) {
- open my $in, '<', $suricatabin or die "Can't read old file: $!";
- while (<$in>) {
- if ( $_ =~
- m/suricata \- temporary wrapper script for \.libs\/suricata/
- )
- {
- print "parseopts: suricata bin file appears to be a wrapper script going to try to find the real bin for gdb.\n";
- my $tmpdirname = dirname $suricatabin;
- my $tmpltsuriname = $tmpdirname . "/.libs/suricata";
- if ( -e $tmpltsuriname && -B $tmpltsuriname ) {
- $ltsuribin = $tmpltsuriname;
- print "parseopts: telling gdb to use " . $ltsuribin . "\n";
- $useltsuri = "yes";
- }
- last;
- }
- }
- close $in;
- }
- elsif ( -B $suricatabin ) {
- print "parseopts: suricata bin file checks out\n";
- }
- else {
- print "parseopts: suricata bin file is not a text or a bin exiting.\n";
- exit;
- }
- }
- else {
- print "parseopts: Path to suricata bin not provided or doesn't exist\n";
- &printhelp();
- }
-
- #number of times to loop
- if ( $config{n} ) {
- $loopnum = $config{n};
- print "parseopts: looping through the pcaps " . $loopnum . " times or until we have an error\n";
- }
- else {
- print "parseopts: looping through the pcaps forever or until we have an error\n";
- $loopnum = "infinity";
- }
-
- #rules file do we have a path and does it exist
- if ( $config{s} && -e $config{s} ) {
- $rules = $config{s};
- print "parseopts: telling suricata to use rules file " . $rules . "\n";
- }
- else {
- print("parseopts: rules file not specified or doesn't exist\n");
- }
-
- #log dir does it exist
- if ( $config{l} && -e $config{l} ) {
- $logdir = $config{l};
- print "parseopts: using log dir " . $logdir . "\n";
- }
- else {
- $logdir = "./";
- }
-
- #config file do we have a path and does it exist
- if ( $config{c} && -e $config{c} ) {
- $configfile = $config{c};
- print "parseopts: telling suricata to use the config file " . $configfile . "\n";
- }
- else {
- print "parseopts: config file not specified or doesn't exist\n";
- &printhelp();
- }
-
- #% chance that a byte will be modified.
- if ( $config{e} ) {
-
- #valid range?
- my $tmperatio = $config{e} * 100;
- if ( $tmperatio <= 100 && $tmperatio >= 0 ) {
- $editeratio = $config{e};
- print "parseopts: using error ratio " . $editeratio . "\n";
- }
- else {
- print "parseopts: error ratio specified but outside of range. Valid range is 0.00-1.0\n";
- exit;
- }
- }
- else {
- print("parseopts: not going to fuzz pcap(s)\n");
- }
-
- #parse the valgrind opts
- if ( $config{v} ) {
- if ( $config{v} =~ /^(memcheck|drd|helgrind|callgrind)$/ ) {
- $valgrindopt = $config{v};
- print "parseopts: using valgrind opt " . $valgrindopt . "\n";
- }
- else {
- print "invalid valgrind opt " . $valgrindopt . "\n";
- }
- }
-
- #shuffle the array if we are starting multiple fuzzers at once. GO-GO gadget shuffle
- if ( $config{y} ) {
- print "parseopts: going to shuffle the array\n";
- $shuffle = "yes";
- }
-
- #keep logs instead of removing them after each run
- if ( $config{k} ) {
- print "parseopts: going to keep logs instead of removing them\n";
- $keeplogs = "yes";
- }
- else {
- $keeplogs = "no";
- }
-
- #we want to process some files but not fuzz them add them to a hash and check it later
- if ( $config{z} ) {
- print "will process but not fuzz files that match regex of " . $config{z} . "\n";
- $excluderegex = $config{z};
-
- my $tmpfilepos = 0;
- while ($tmpfilepos <= $#tmpfiles) {
- if ($tmpfiles[$tmpfilepos] =~ m/$excluderegex/) {
- print "adding " . $tmpfiles[$tmpfilepos] . " to fuzz_exclude_hash because it matches our regex\n";
- $excludefuzz{$tmpfiles[$tmpfilepos]} = 1;
- }
- $tmpfilepos++
- }
- }
-
- #maybe we want to exclude a file based on some regex so we can restart the fuzzer after an error
- #and not have to worry about hitting the same file.
- if ( $config{x} ) {
- print "excluding files that match regex of " . $config{x} . "\n";
- $excluderegex = $config{x};
-
- my $tmpfilepos = 0;
- while ($tmpfilepos <= $#tmpfiles) {
- if ($tmpfiles[$tmpfilepos] =~ m/$excluderegex/) {
- print "removing " . $tmpfiles[$tmpfilepos] . " because it matches our exclude regex\n";
- splice(@tmpfiles, $tmpfilepos, 1);
- }
- else {
- $tmpfilepos++
- }
- }
- }
-
- print "******************Initialization Complete**********************\n";
- return;
-
-}
-
-sub printhelp {
- print "
- -h or help <this output>
- -r=<filemask for pcaps to read>
- -n=<(optional) number of iterations or if not specified will run until error>
- -s=<(optional) path to ids rules file will be passed as -s to suricata>
- -e=<(optional) editcap error ratio to introduce if not specified will not fuzz. Valid range for this is 0.00 - 1.0>
- -p=<path to the suricata bin>
- -l=<(optional) log dir for output if not specified will use current directory.>
- -v=<(optional) (memcheck|drd|helgrind|callgrind) will run the command through one of the specified valgrind tools.>
- -x=<(optional) regex for excluding certian files incase something blows up but we want to continue fuzzing .>
- -z=<(optional) regex for excluding certian files from fuzzing but still process them note: the original files will be processed and not removed.>
- -y <shuffle the array, this is useful if running multiple instances of this script.>
- -k <will keep alert-debug.log fast.log http.log and stats.log instead of removing them at the end of each run. Note unified logs are still removed>
- Example usage:
- First thing to do is download and build suricata from git with -O0 so vars don't get optimized out. See the example below:
- git clone git://phalanx.openinfosecfoundation.org/oisf.git suricatafuzz1 && cd suricatafuzz1 && ./autogen.sh && CFLAGS=\"-g -O0\" ./configure && make
-
- Second thing to do is to edit suricata.yaml to fit your environment.
-
- Third go ahead and run the script.
-
- In the example below the script will loop forever until an error is encountered will behave in the following way.
- 1.-r Process all pcaps in subdirectories of /home/somepath/pcaps/
- 2.-s Tell suricata to use the rules file /home/somepath/current-all.rules
- 3.-y Shuffle the array of pcaps this is useful if running multiple instances of this script.
- 4.-c Tell suricata to use the suricata.yaml in the current dir.
- 6.-e Tell editcap to introduce a 2% error ratio, i.e. there is a 2% chance that a byte will be fuzzed see http://wiki.wireshark.org/FuzzTesting for more info.
- 7.-p Use src/suricata as our suricata bin file. The script will determin if the argument passed is a bin file or a txt wrapper and will adjust accordingly.
-
- /usr/bin/wirefuzz.pl -r=/home/somepath/pcaps/*/* -s=/home/somepath/current-all.rules -y -c=suricata.yaml -e=0.02 -p src/suricata
-
- If an error is encountered a file named <fuzzedfile>ERR.txt will be created in the log dir (current dir in this example) that will contain output from stderr,stdout, and gdb.
-
- Take a look at the opts make it work for you environtment and from the OISF QA team thanks for helping us make our meerkat fuzzier! ;-)\n";
- exit;
-}
-
-my $logfile = $logdir . "wirefuzzlog.txt";
-open( LOGFILE, ">>$logfile" )
-|| die( print "error: Could not open logfile! $logfile\n" );
-
-my $successcnt = 0;
-while ( $successcnt < $loopnum ) {
- if ( defined $shuffle ) {
- @files = shuffle(@tmpfiles);
- }
- else {
- @files = @tmpfiles;
- }
-
- foreach my $file (@files) {
- my $file_was_fuzzed = 0;
- #split out the path from the filename
- my $filedir = dirname $file;
- my $filename = basename $file;
- my ( $fuzzedfile, $editcapcmd, $editcapout, $editcaperr, $editcapexit,
- $editcap_sys_signal, $editcap_sys_coredump );
- my ( $fuzzedfiledir, $fuzzedfilename, $fullcmd, $out, $err, $exit,
- $suricata_sys_signal, $suricata_sys_coredump, $report);
- print "Going to work with file: $file\n";
- my ( $sec, $min, $hour, $mday, $mon, $year, $wday, $yday, $isdst ) =
- localtime(time);
- $timestamp = sprintf "%4d-%02d-%02d-%02d-%02d-%02d", $year + 1900,
- $mon + 1, $mday, $hour, $min, $sec;
- if ( defined $editeratio and !exists $excludefuzz{$file}) {
- $file_was_fuzzed = 1;
- $fuzzedfile = $logdir . $filename . "-fuzz-" . $timestamp;
- $editcapcmd =
- "editcap -E " . $editeratio . " " . $file . " " . $fuzzedfile;
- print( "editcap: " . $editcapcmd . "\n" );
- ( $editcapout, $editcaperr ) = capture {
- system $editcapcmd;
- $editcapexit = $? >> 8;
- $editcap_sys_signal = $? & 127;
- $editcap_sys_coredump = $? & 128;
- };
- if ( $editcapexit ne 0 ) {
-
- #this could still cause us to loop forever if all pcaps are bad but it's better than nothing.
- if ( @files < 2 ) {
- print "editcap: had an error and this was our only pcap:" . $editcaperr . "\n";
- exit;
- }
- else {
- print "editcap: had an error going to the next pcap:" . $editcaperr . "\n";
- next;
- }
- }
- elsif ( $editcap_sys_signal eq 2 ) {
- print "editcap: system() got a ctl+c we are bailing as well\n";
- exit;
-
- }
- else {
- print("editcap: ran successfully\n");
- print
- "******************Editcap Complete**********************\n";
- }
- }
- else {
- $fuzzedfile = $file;
- }
-
- #split out the path from the filename
- $fuzzedfiledir = dirname $fuzzedfile;
- $fuzzedfilename = basename $fuzzedfile;
-
- $fullcmd = "ulimit -c unlimited; ";
-
- if ( defined $valgrindopt ) {
- if ( $valgrindopt eq "memcheck" ) {
- $fullcmd =
- $fullcmd
- . "valgrind -v --log-file="
- . $logdir
- . $fuzzedfilename
- . $timestamp
- . "-memcheck-vg.log ";
- }
- elsif ( $valgrindopt eq "drd" ) {
- $fullcmd =
- $fullcmd
- . "valgrind --tool=drd --var-info=yes -v --log-file="
- . $logdir
- . $fuzzedfilename
- . $timestamp
- . "-drd-vg.log ";
- }
- elsif ( $valgrindopt eq "helgrind" ) {
- $fullcmd =
- $fullcmd
- . "valgrind --tool=helgrind -v --log-file="
- . $logdir
- . $fuzzedfilename
- . $timestamp
- . "-helgrind-vg.log ";
- }
- elsif ( $valgrindopt eq "callgrind" ) {
- $fullcmd =
- $fullcmd
- . "valgrind --tool=callgrind -v --callgrind-out-file="
- . $logdir
- . $fuzzedfilename
- . $timestamp
- . "-callgrind-vg.log ";
- }
- }
-
- $fullcmd =
- $fullcmd
- . $suricatabin . " -c "
- . $configfile . " -r "
- . $fuzzedfile . " -l "
- . $logdir;
- if ( defined $rules ) {
- $fullcmd = $fullcmd . " -s " . $rules;
- }
- print "suricata: $fullcmd \n";
- my $starttime = time();
- ( $out, $err ) = capture {
- system $fullcmd;
- $exit = $? >> 8;
- $suricata_sys_signal = $? & 127;
- $suricata_sys_coredump = $? & 128;
- };
-
- my $stoptime = time();
- my $timetotal = $stoptime - $starttime;
- print LOGFILE $fullcmd . ","
- . $timetotal . ","
- . $exit . ","
- . $suricata_sys_signal . ","
- . $suricata_sys_coredump . "\n";
- print "suricata: exit value $exit\n";
-
- if ( $exit ne 0 ) {
- my $knownerr = 0;
-
- #fuzzer genrated some random link type we can't deal with
- if ( $err =~
- /datalink type \d+ not \(yet\) supported in module PcapFile\./ )
- {
- print "suricata: we matched a known error going to the next file\n";
- $knownerr = 1;
- }
- if ( $knownerr eq 1 ) {
- $successcnt++;
- print "suricata: we have run with success " . $successcnt . " times\n";
- if( $keeplogs eq "yes" ) {
- &keep_logs($fuzzedfilename);
- $report = $logdir . $fuzzedfilename . "-OUT.txt";
- &generate_report($report, $fullcmd, $out, $err, $exit, "none");
- }
- &clean_logs($fuzzedfilename,$file_was_fuzzed);
- }
- else {
- my $report = $logdir . $fuzzedfilename . "-ERR.txt";
-
- &process_core_dump();
- if ($core_dump) {
- print "core dump \n $core_dump";
- system( "mv "
- . $ENV{'PWD'}
- . "/core* "
- . $logdir
- . $fuzzedfilename
- . ".core" );
- &generate_report($report, $fullcmd, $out, $err, $exit, $core_dump);
- }else{
- &generate_report($report, $fullcmd, $out, $err, $exit, "none");
- }
- exit;
- }
- }
- elsif ( $suricata_sys_signal eq 2 ) {
- print "suricata: system() got a ctl+c we are bailing as well\n";
- if( $keeplogs eq "yes" ) {
- &keep_logs($fuzzedfilename);
- }
- &clean_logs($fuzzedfilename,$file_was_fuzzed);
- exit;
- }
- else {
- if ( $out =~ /Max memuse of stream engine \d+ \(in use (\d+)\)/ ) {
- if ($1 != 0) {
- $report = $logdir . $fuzzedfilename . "-OUT.txt";
- &generate_report($report, $fullcmd, $out, $err, $exit, "none");
- print "Stream leak detected " . $1 . " was still in use at exit see " . $report . " for more details\n";
- exit;
- }
- } else {
- print "Stream mem counter could not be found in output\n";
- }
-
- $successcnt++;
- print "suricata: we have run with success " . $successcnt . " times\n";
- print "******************Suricata Complete**********************\n";
- if( $keeplogs eq "yes" ) {
- &keep_logs($fuzzedfilename);
- $report = $logdir . $fuzzedfilename . "-OUT.txt";
- &generate_report($report, $fullcmd, $out, $err, $exit, "none");
- }
- &clean_logs($fuzzedfilename,$file_was_fuzzed);
- print "******************Next Packet or Exit *******************\n";
- }
- }
-}
-
-sub process_core_dump {
- my $gdbbin;
- my $gdb = new Devel::GDB();
- my $coremask = $ENV{'PWD'} . "/core*";
- my @coredumps = <${coremask}>;
- if (@coredumps eq 1 ) {
- my $corefile = $coredumps[0];
- print "gdb: core dump found $corefile processesing with";
- if ( $useltsuri eq "yes" ) {
- $gdbbin = $ltsuribin;
- }
- else {
- $gdbbin = $suricatabin;
- }
- print " the following bin file" . $gdbbin . "\n";
- $core_dump .= join '',
- $gdb->get("file $gdbbin"), $gdb->get("core $corefile"),
- $gdb->get('info threads'), $gdb->get('thread apply all bt full');
- print "gdb: core dump \n $core_dump";
-
- }
- elsif ( @coredumps > 1 ) {
- print "gdb: multiple core dumps, please clear all core dumps and try the test again. We found:\n";
- foreach my $corefile (@coredumps) {
- print $corefile . "\n";
- }
- }
- else {
- print "gdb: no coredumps found returning.\n";
- print @coredumps;
- print " $#coredumps" . "\n";
- }
- print "******************GDB Complete**********************\n";
- return;
-}
-
-sub clean_logs {
- my $deleteme = shift;
- my $file_was_fuzzed = shift;
- my $deletemerge = $logdir . $deleteme;
- my $rmcmd;
- if ( defined $editeratio and $file_was_fuzzed) {
- if ( unlink($deletemerge) == 1 ) {
- print "clean_logs: " . $deletemerge . " deleted successfully.\n";
- }
- else {
- print "clean_logs: error " . $deletemerge . " was not deleted. You may have to delete the file manually.\n";
- }
- }
-
- if ( defined $valgrindopt ) {
- #uncomment the following lines if you want to remove valgrind logs
- #$rmcmd = "rm -f " . $deletemerge . "*vg.log";
- #print( "running " . $rmcmd . "\n" );
- #system("$rmcmd");
- }
-
- if ( unlink(<$logdir . unified*>) > 0 ) {
- print "clean_logs: removed unified logs for next run \n";
- }
- else {
- print "clean_logs: failed to delete unified logs\n:";
- }
- print "******************Log Cleanup Complete**********************\n";
- return;
-
-}
-
-sub keep_logs {
- my $saveme = shift;
- unless(defined($editeratio) || $loopnum eq '1'){
- my $saveme = $saveme . "-" . $timestamp;
- }
- my $savecmd;
-
- if (-e $logdir . "alert-debug.log"){
- $savecmd = "mv -f " . $logdir
- . "alert-debug.log "
- . $logdir
- . $saveme
- . "-alert-debug.log";
- system($savecmd);
- }
- if (-e $logdir . "fast.log"){
- $savecmd = "mv -f " . $logdir
- . "fast.log "
- . $logdir
- . $saveme
- . "-fast.log";
- system($savecmd);
- }
- if (-e $logdir . "http.log"){
- $savecmd = "mv -f " . $logdir
- . "http.log "
- . $logdir
- . $saveme
- . "-http.log";
- system($savecmd);
- }
- if (-e $logdir . "stats.log"){
- $savecmd = "mv -f " . $logdir
- . "stats.log "
- . $logdir
- . $saveme
- . "-stats.log";
- system($savecmd);
- }
- print "******************Log Move Complete**********************\n";
- return;
-}
-
-sub generate_report {
- my ($report, $fullcmd, $stdout, $stderr, $exit, $coredump) = ($_[0], $_[1], $_[2], $_[3], $_[4], $_[5]);
-
- open( REPORT, ">$report" ) || ( print "Could not open report file! $report\n" );
- print REPORT "COMMAND:$fullcmd\n";
- print REPORT "EXITVAL:$exit\n";
- print REPORT "STDERR:$stderr\n";
- print REPORT "STDOUT:$stdout\n";
- if($coredump ne "none"){
- print REPORT "COREDUMP:$coredump\n";
- }
- close(REPORT);
-}