diff options
author | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 17:40:14 +0000 |
---|---|---|
committer | Daniel Baumann <daniel.baumann@progress-linux.org> | 2024-04-19 17:40:14 +0000 |
commit | 666c8268e8bdf745f609f7f389e5c67be45f0065 (patch) | |
tree | 93948ffd948bd0f56bd3b454cef09bd00a01224e /suricata-update | |
parent | Adding upstream version 1:7.0.3. (diff) | |
download | suricata-666c8268e8bdf745f609f7f389e5c67be45f0065.tar.xz suricata-666c8268e8bdf745f609f7f389e5c67be45f0065.zip |
Adding upstream version 1:7.0.4.upstream/1%7.0.4
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to '')
-rw-r--r-- | suricata-update/.github/PULL_REQUEST_TEMPLATE.md | 5 | ||||
-rw-r--r-- | suricata-update/.github/workflows/tests.yml | 20 | ||||
-rw-r--r-- | suricata-update/.readthedocs.yaml | 17 | ||||
-rw-r--r-- | suricata-update/CHANGELOG.md | 20 | ||||
-rw-r--r-- | suricata-update/Makefile | 6 | ||||
-rw-r--r-- | suricata-update/doc/quickstart.rst | 2 | ||||
-rw-r--r-- | suricata-update/suricata/update/data/index.py | 200 | ||||
-rw-r--r-- | suricata-update/suricata/update/engine.py | 2 | ||||
-rw-r--r-- | suricata-update/suricata/update/main.py | 54 | ||||
-rw-r--r-- | suricata-update/suricata/update/matchers.py | 6 | ||||
-rw-r--r-- | suricata-update/suricata/update/osinfo.py | 2 | ||||
-rw-r--r-- | suricata-update/suricata/update/rule.py | 2 | ||||
-rw-r--r-- | suricata-update/suricata/update/version.py | 2 | ||||
-rwxr-xr-x | suricata-update/tests/integration_tests.py | 9 | ||||
-rw-r--r-- | suricata-update/tests/suricata-test-rules.zip | bin | 0 -> 2391 bytes | |||
-rw-r--r-- | suricata-update/tests/test_main.py | 4 | ||||
-rw-r--r-- | suricata-update/tox.ini | 2 |
17 files changed, 309 insertions, 44 deletions
diff --git a/suricata-update/.github/PULL_REQUEST_TEMPLATE.md b/suricata-update/.github/PULL_REQUEST_TEMPLATE.md index 40471df..5bf7005 100644 --- a/suricata-update/.github/PULL_REQUEST_TEMPLATE.md +++ b/suricata-update/.github/PULL_REQUEST_TEMPLATE.md @@ -2,10 +2,9 @@ Make sure these boxes are signed before submitting your Pull Request -- thank you. - [ ] I have read the contributing guide lines at - https://redmine.openinfosecfoundation.org/projects/suricata/wiki/Contributing + https://docs.suricata.io/en/latest/devguide/codebase/contributing/contribution-process.html - [ ] I have signed the Open Information Security Foundation - contribution agreement at - https://suricata-ids.org/about/contribution-agreement/ + contribution agreement at https://suricata.io/about/contribution-agreement/ - [ ] I have updated the user guide (in doc/userguide/) to reflect the changes made (if applicable) diff --git a/suricata-update/.github/workflows/tests.yml b/suricata-update/.github/workflows/tests.yml index 96a72d5..22a19f3 100644 --- a/suricata-update/.github/workflows/tests.yml +++ b/suricata-update/.github/workflows/tests.yml @@ -4,6 +4,12 @@ on: - push - pull_request +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +permissions: read-all + jobs: alma-9: @@ -70,26 +76,26 @@ jobs: - name: Python 3 integration tests run: PYTHONPATH=. python3 ./tests/integration_tests.py - fedora-38: - name: Fedora 38 + fedora-39: + name: Fedora 39 runs-on: ubuntu-latest - container: fedora:38 + container: fedora:39 steps: - run: | dnf -y install \ python3 \ python3-pytest \ python3-pyyaml - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Python 3 unit tests run: PYTHONPATH=. pytest-3 - name: Python 3 integration tests run: PYTHONPATH=. python3 ./tests/integration_tests.py - fedora-37: - name: Fedora 37 + fedora-38: + name: Fedora 38 runs-on: ubuntu-latest - container: fedora:37 + container: fedora:38 steps: - run: | dnf -y install \ diff --git a/suricata-update/.readthedocs.yaml b/suricata-update/.readthedocs.yaml new file mode 100644 index 0000000..635dca4 --- /dev/null +++ b/suricata-update/.readthedocs.yaml @@ -0,0 +1,17 @@ +version: 2 + +build: + os: ubuntu-22.04 + tools: + python: "3.11" + +python: + install: + - requirements: ./requirements.txt + +sphinx: + builder: html + configuration: doc/conf.py + fail_on_warning: false + +formats: all diff --git a/suricata-update/CHANGELOG.md b/suricata-update/CHANGELOG.md index a033b02..03310ca 100644 --- a/suricata-update/CHANGELOG.md +++ b/suricata-update/CHANGELOG.md @@ -1,5 +1,25 @@ # Change Log +## 1.3.2 - 2024-03-14 +- Fix copying of file hash lists which was broken in the dataset fix + as part of ticket #6833: + https://redmine.openinfosecfoundation.org/issues/6854 + +## 1.3.1 - 2024-03-11 +- Fix detecting dataset "load" when preceded by a space: + https://redmine.openinfosecfoundation.org/issues/6777 +- If no Suricata is found, Suricata-Update will assume version 6.0.0 + instead of 4.0.0. +- Handle URLs of bare files that don't end in .rules: + https://redmine.openinfosecfoundation.org/issues/3664 +- Don't base dataset filenames on the contents of the file, but + instead the filename path: + https://redmine.openinfosecfoundation.org/issues/6763 +- Give each file in a source a unique filename by prefixing the files + with a hash of the URL to prevent duplicate filenames from + cloberring each other, in particular dataset files: + https://redmine.openinfosecfoundation.org/issues/6833 + ## 1.3.0 - 2023-07-07 - Fix loading of configuration files specified in update.yaml: diff --git a/suricata-update/Makefile b/suricata-update/Makefile index 26f2753..e0ceb8f 100644 --- a/suricata-update/Makefile +++ b/suricata-update/Makefile @@ -253,10 +253,10 @@ OTOOL64 = PACKAGE = suricata PACKAGE_BUGREPORT = PACKAGE_NAME = suricata -PACKAGE_STRING = suricata 7.0.3 +PACKAGE_STRING = suricata 7.0.4 PACKAGE_TARNAME = suricata PACKAGE_URL = -PACKAGE_VERSION = 7.0.3 +PACKAGE_VERSION = 7.0.4 PATH_SEPARATOR = : PCAP_CFLAGS = -I/usr/include PCAP_LIBS = -lpcap @@ -280,7 +280,7 @@ SHELL = /bin/bash SPHINX_BUILD = /usr/bin/sphinx-build STRIP = strip SURICATA_UPDATE_DIR = suricata-update -VERSION = 7.0.3 +VERSION = 7.0.4 abs_builddir = /builds/dev/suricata/suricata-update abs_srcdir = /builds/dev/suricata/suricata-update abs_top_builddir = /builds/dev/suricata diff --git a/suricata-update/doc/quickstart.rst b/suricata-update/doc/quickstart.rst index 012b4e3..bf57de5 100644 --- a/suricata-update/doc/quickstart.rst +++ b/suricata-update/doc/quickstart.rst @@ -120,7 +120,7 @@ This command will: and do not need to exist. * Download the Emerging Threats Open ruleset for your version of - Suricata, defaulting to 4.0.0 if not found. + Suricata, defaulting to 6.0.0 if not found. * Apply enable, disable, drop and modify filters as loaded above. diff --git a/suricata-update/suricata/update/data/index.py b/suricata-update/suricata/update/data/index.py index 48d4ebb..02a9c4f 100644 --- a/suricata-update/suricata/update/data/index.py +++ b/suricata-update/suricata/update/data/index.py @@ -51,6 +51,28 @@ index = { 'sources': { 'et/open': { 'description': 'Proofpoint ET Open is 'support-url': 'https://redmine.openinfosecfoundation.org/', 'url': 'https://openinfosecfoundation.org/rules/trafficid/trafficid.rules', 'vendor': 'OISF'}, + 'pawpatrules': { 'checksum': False, + 'description': 'PAW Patrules ruleset ' + 'permit to detect many ' + 'events on\n' + 'network. Suspicious ' + 'flow, malicious tool, ' + 'unsuported and\n' + 'vulnerable system, known ' + 'threat actors with ' + 'various IOCs,\n' + 'lateral movement, bad ' + 'practice, shadow IT... ' + 'Rules are\n' + 'frequently updated.\n', + 'homepage': 'https://pawpatrules.fr/', + 'license': 'CC-BY-SA-4.0', + 'min-version': '6.0.0', + 'summary': 'PAW Patrules is a collection ' + 'of rules for IDPS / NSM ' + 'Suricata engine', + 'url': 'https://rules.pawpatrules.fr/suricata/paw-patrules.tar.gz', + 'vendor': 'pawpatrules'}, 'ptresearch/attackdetection': { 'description': 'The ' 'Attack ' 'Detection ' @@ -261,6 +283,184 @@ index = { 'sources': { 'et/open': { 'description': 'Proofpoint ET Open is 'support-url': 'https://discord.com/channels/911231224448712714/911238451842666546', 'url': 'https://ti.stamus-networks.io/open/stamus-lateral-rules.tar.gz', 'vendor': 'Stamus Networks'}, + 'stamus/nrd-14-open': { 'description': 'Newly Registered ' + 'Domains list ' + '(last 14 days) to ' + 'match on DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced by ' + 'Stamus Labs ' + 'research team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly Registered ' + 'Domains Open only - ' + '14 day list, complete', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-nrd-14.tar.gz', + 'vendor': 'Stamus Networks'}, + 'stamus/nrd-30-open': { 'description': 'Newly Registered ' + 'Domains list ' + '(last 30 days) to ' + 'match on DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced by ' + 'Stamus Labs ' + 'research team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly Registered ' + 'Domains Open only - ' + '30 day list, complete', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-nrd-30.tar.gz', + 'vendor': 'Stamus Networks'}, + 'stamus/nrd-entropy-14-open': { 'description': 'Suspicious ' + 'Newly ' + 'Registered ' + 'Domains ' + 'list with ' + 'high ' + 'entropy ' + '(last 14 ' + 'days) to ' + 'match on ' + 'DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced ' + 'by Stamus ' + 'Labs ' + 'research ' + 'team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly ' + 'Registered ' + 'Domains Open ' + 'only - 14 day ' + 'list, high ' + 'entropy', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-entropy-14.tar.gz', + 'vendor': 'Stamus ' + 'Networks'}, + 'stamus/nrd-entropy-30-open': { 'description': 'Suspicious ' + 'Newly ' + 'Registered ' + 'Domains ' + 'list with ' + 'high ' + 'entropy ' + '(last 30 ' + 'days) to ' + 'match on ' + 'DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced ' + 'by Stamus ' + 'Labs ' + 'research ' + 'team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly ' + 'Registered ' + 'Domains Open ' + 'only - 30 day ' + 'list, high ' + 'entropy', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-entropy-30.tar.gz', + 'vendor': 'Stamus ' + 'Networks'}, + 'stamus/nrd-phishing-14-open': { 'description': 'Suspicious ' + 'Newly ' + 'Registered ' + 'Domains ' + 'Phishing ' + 'list ' + '(last 14 ' + 'days) to ' + 'match on ' + 'DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced ' + 'by ' + 'Stamus ' + 'Labs ' + 'research ' + 'team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly ' + 'Registered ' + 'Domains Open ' + 'only - 14 ' + 'day list, ' + 'phishing', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-phishing-14.tar.gz', + 'vendor': 'Stamus ' + 'Networks'}, + 'stamus/nrd-phishing-30-open': { 'description': 'Suspicious ' + 'Newly ' + 'Registered ' + 'Domains ' + 'Phishing ' + 'list ' + '(last 30 ' + 'days) to ' + 'match on ' + 'DNS, TLS ' + 'and HTTP ' + 'communication.\n' + 'Produced ' + 'by ' + 'Stamus ' + 'Labs ' + 'research ' + 'team.\n', + 'license': 'Commercial', + 'min-version': '6.0.0', + 'parameters': { 'secret-code': { 'prompt': 'Stamus ' + 'Networks ' + 'License ' + 'code'}}, + 'subscribe-url': 'https://www.stamus-networks.com/stamus-labs/subscribe-to-threat-intel-feed', + 'summary': 'Newly ' + 'Registered ' + 'Domains Open ' + 'only - 30 ' + 'day list, ' + 'phishing', + 'url': 'https://ti.stamus-networks.io/%(secret-code)s/sti-domains-phishing-30.tar.gz', + 'vendor': 'Stamus ' + 'Networks'}, 'tgreen/hunting': { 'checksum': False, 'description': 'Heuristic ruleset for ' 'hunting. Focus on ' diff --git a/suricata-update/suricata/update/engine.py b/suricata-update/suricata/update/engine.py index c57da82..22ad9b3 100644 --- a/suricata-update/suricata/update/engine.py +++ b/suricata-update/suricata/update/engine.py @@ -135,7 +135,7 @@ def get_path(program="suricata"): return None def parse_version(buf): - m = re.search("((\d+)\.(\d+)(\.(\d+))?([\w\-]+)?)", str(buf).strip()) + m = re.search(r"((\d+)\.(\d+)(\.(\d+))?([\w\-]+)?)", str(buf).strip()) if m: full = m.group(1) major = int(m.group(2)) diff --git a/suricata-update/suricata/update/main.py b/suricata-update/suricata/update/main.py index 4a0e7a6..18af7a8 100644 --- a/suricata-update/suricata/update/main.py +++ b/suricata-update/suricata/update/main.py @@ -88,7 +88,7 @@ else: logger = logging.getLogger() # If Suricata is not found, default to this version. -DEFAULT_SURICATA_VERSION = "4.0.0" +DEFAULT_SURICATA_VERSION = "6.0.0" # The default filename to use for the output rule file. This is a # single file concatenating all input rule files together. @@ -235,6 +235,8 @@ class Fetch: # The file is not an archive, treat it as an individual file. basename = os.path.basename(filename).split("-", 1)[1] + if not basename.endswith(".rules"): + basename = "{}.rules".format(basename) files = {} files[basename] = open(filename, "rb").read() return files @@ -435,8 +437,7 @@ def manage_classification(suriconf, files): def handle_dataset_files(rule, dep_files): if not rule.enabled: return - - dataset_load = [el.strip() for el in rule.dataset.split(",") if el.startswith("load")] + dataset_load = [el for el in (el.strip() for el in rule.dataset.split(",")) if el.startswith("load")] if not dataset_load: # No dataset load found. return @@ -446,7 +447,7 @@ def handle_dataset_files(rule, dep_files): prefix = os.path.dirname(rule.group) # Construct the source filename. - source_filename = "{}/{}".format(prefix, dataset_filename) + source_filename = os.path.join(prefix, dataset_filename) # If a source filename starts with a "/", look for it on the filesystem. The archive # unpackers will take care of removing a leading / so this shouldn't happen for @@ -464,9 +465,9 @@ def handle_dataset_files(rule, dep_files): return dataset_contents = dep_files[source_filename] - content_hash = hashlib.md5(dataset_contents).hexdigest() - new_rule = re.sub("(dataset.*?load\s+){}".format(dataset_filename), "\g<1>datasets/{}".format(content_hash), rule.format()) - dest_filename = os.path.join(config.get_output_dir(), "datasets", content_hash) + source_filename_hash = hashlib.md5(source_filename.encode()).hexdigest() + new_rule = re.sub(r"(dataset.*?load\s+){}".format(dataset_filename), r"\g<1>datasets/{}".format(source_filename_hash), rule.format()) + dest_filename = os.path.join(config.get_output_dir(), "datasets", source_filename_hash) dest_dir = os.path.dirname(dest_filename) logger.debug("Copying dataset file {} to {}".format(dataset_filename, dest_filename)) try: @@ -482,10 +483,19 @@ def handle_filehash_files(rule, dep_files, fhash): if not rule.enabled: return filehash_fname = rule.get(fhash) - filename = [fname for fname, content in dep_files.items() if os.path.join(*(fname.split(os.path.sep)[1:])) == filehash_fname] - if filename: + + # Get the directory name the rule is from. + prefix = os.path.dirname(rule.group) + + source_filename = os.path.join(prefix, filehash_fname) + dest_filename = source_filename[len(prefix) + len(os.path.sep):] + logger.debug("dest_filename={}".format(dest_filename)) + + if source_filename not in dep_files: + logger.error("{} file {} was not found".format(fhash, filehash_fname)) + else: logger.debug("Copying %s file %s to output directory" % (fhash, filehash_fname)) - filepath = os.path.join(config.get_state_dir(), os.path.dirname(filename[0])) + filepath = os.path.join(config.get_output_dir(), os.path.dirname(dest_filename)) logger.debug("filepath: %s" % filepath) try: os.makedirs(filepath) @@ -493,11 +503,10 @@ def handle_filehash_files(rule, dep_files, fhash): if oserr.errno != errno.EEXIST: logger.error(oserr) sys.exit(1) - logger.debug("output fname: %s" % os.path.join(filepath, os.path.basename(filehash_fname))) - with open(os.path.join(filepath, os.path.basename(filehash_fname)), "w+") as fp: - fp.write(dep_files[os.path.join("rules", filehash_fname)].decode("utf-8")) - else: - logger.error("{} file {} was not found".format(fhash, filehash_fname)) + output_filename = os.path.join(filepath, os.path.basename(filehash_fname)) + logger.debug("output fname: %s" % output_filename) + with open(output_filename, "w") as fp: + fp.write(dep_files[source_filename].decode("utf-8")) def write_merged(filename, rulemap, dep_files): @@ -700,9 +709,9 @@ def resolve_flowbits(rulemap, disabled_rules): class ThresholdProcessor: patterns = [ - re.compile("\s+(re:\"(.*)\")"), - re.compile("\s+(re:(.*?)),.*"), - re.compile("\s+(re:(.*))"), + re.compile(r"\s+(re:\"(.*)\")"), + re.compile(r"\s+(re:(.*?)),.*"), + re.compile(r"\s+(re:(.*))"), ] def extract_regex(self, buf): @@ -984,9 +993,14 @@ def load_sources(suricata_version): # Now download each URL. files = [] for url in urls: + + # To de-duplicate filenames, add a prefix that is a hash of the URL. + prefix = hashlib.md5(url[0].encode()).hexdigest() source_files = Fetch().run(url) for key in source_files: - files.append(SourceFile(key, source_files[key])) + content = source_files[key] + key = os.path.join(prefix, key) + files.append(SourceFile(key, content)) # Now load local rules. if config.get("local") is not None: @@ -1184,7 +1198,7 @@ def _main(): # Disable rule that are for app-layers that are not enabled. if suriconf: for key in suriconf.keys(): - m = re.match("app-layer\.protocols\.([^\.]+)\.enabled", key) + m = re.match(r"app-layer\.protocols\.([^\.]+)\.enabled", key) if m: proto = m.group(1) if not suriconf.is_true(key, ["detection-only"]): diff --git a/suricata-update/suricata/update/matchers.py b/suricata-update/suricata/update/matchers.py index e886c79..56a9e29 100644 --- a/suricata-update/suricata/update/matchers.py +++ b/suricata-update/suricata/update/matchers.py @@ -251,7 +251,7 @@ class ModifyRuleFilter(object): pattern = re.compile(a) # Convert Oinkmaster backticks to Python. - b = re.sub("\$\{(\d+)\}", "\\\\\\1", b) + b = re.sub(r"\$\{(\d+)\}", "\\\\\\1", b) return cls(matcher, pattern, b) @@ -269,7 +269,7 @@ class DropRuleFilter(object): def run(self, rule): drop_rule = suricata.update.rule.parse(re.sub( - "^\w+", "drop", rule.raw)) + r"^\w+", "drop", rule.raw)) drop_rule.enabled = rule.enabled return drop_rule @@ -284,7 +284,7 @@ class AddMetadataFilter(object): return self.matcher.match(rule) def run(self, rule): - new_rule_string = re.sub(";\s*\)$", "; metadata: {} {};)".format(self.key, self.val), rule.format()) + new_rule_string = re.sub(r";\s*\)$", "; metadata: {} {};)".format(self.key, self.val), rule.format()) new_rule = suricata.update.rule.parse(new_rule_string, rule.group) if not new_rule: logger.error("Rule is not valid after adding metadata: [{}]: {}".format(rule.idstr, new_rule_string)) diff --git a/suricata-update/suricata/update/osinfo.py b/suricata-update/suricata/update/osinfo.py index 82816bc..c3e417b 100644 --- a/suricata-update/suricata/update/osinfo.py +++ b/suricata-update/suricata/update/osinfo.py @@ -27,7 +27,7 @@ def parse_os_release(filename="/etc/os-release"): with open(filename) as fileobj: for line in fileobj: line = line.strip() - m = re.match("^(\w+)=\"?(.*?)\"?$", line) + m = re.match(r"^(\w+)=\"?(.*?)\"?$", line) if m: os_release[m.group(1)] = m.group(2) return os_release diff --git a/suricata-update/suricata/update/rule.py b/suricata-update/suricata/update/rule.py index 42c673e..169af6c 100644 --- a/suricata-update/suricata/update/rule.py +++ b/suricata-update/suricata/update/rule.py @@ -436,4 +436,4 @@ def parse_var_names(var): """ Parse out the variable names from a string. """ if var is None: return [] - return re.findall("\$([\w_]+)", var) + return re.findall(r"\$([\w_]+)", var) diff --git a/suricata-update/suricata/update/version.py b/suricata-update/suricata/update/version.py index 1cdf5a1..75d1205 100644 --- a/suricata-update/suricata/update/version.py +++ b/suricata-update/suricata/update/version.py @@ -4,4 +4,4 @@ # Alpha: 1.0.0a1 # Development: 1.0.0dev0 # Release candidate: 1.0.0rc1 -version = "1.3.0" +version = "1.3.2" diff --git a/suricata-update/tests/integration_tests.py b/suricata-update/tests/integration_tests.py index 8970585..c4b119b 100755 --- a/suricata-update/tests/integration_tests.py +++ b/suricata-update/tests/integration_tests.py @@ -118,6 +118,15 @@ run(common_args + [ "testing-header-with-spaces", "file:///doesnotexist" ]) +run(common_args + [ + "add-source", + "suricata-test-rules", + "file://{}/tests/suricata-test-rules.zip".format(os.getcwd()), +]) +run(common_args) +assert(os.path.exists(os.path.join(DATA_DIR, "rules/testmyids.md5"))) +assert(os.path.exists(os.path.join(DATA_DIR, "rules/testmyids.sha1"))) +assert(os.path.exists(os.path.join(DATA_DIR, "rules/testmyids.sha256"))) class IntegrationTest: def __init__(self, configs={}): diff --git a/suricata-update/tests/suricata-test-rules.zip b/suricata-update/tests/suricata-test-rules.zip Binary files differnew file mode 100644 index 0000000..4f834f8 --- /dev/null +++ b/suricata-update/tests/suricata-test-rules.zip diff --git a/suricata-update/tests/test_main.py b/suricata-update/tests/test_main.py index 86fa486..919b88b 100644 --- a/suricata-update/tests/test_main.py +++ b/suricata-update/tests/test_main.py @@ -127,7 +127,7 @@ class ModifyRuleFilterTestCase(unittest.TestCase): def test_id_match(self): rule0 = suricata.update.rule.parse(self.rule_string) - line = '2020757 "\|0d 0a\|" "|ff ff|"' + line = r'2020757 "\|0d 0a\|" "|ff ff|"' rule_filter = matchers_mod.ModifyRuleFilter.parse(line) self.assertTrue(rule_filter != None) self.assertTrue(rule_filter.match(rule0)) @@ -138,7 +138,7 @@ class ModifyRuleFilterTestCase(unittest.TestCase): def test_re_match(self): rule0 = suricata.update.rule.parse(self.rule_string) - line = 're:classtype:trojan-activity "\|0d 0a\|" "|ff ff|"' + line = r're:classtype:trojan-activity "\|0d 0a\|" "|ff ff|"' rule_filter = matchers_mod.ModifyRuleFilter.parse(line) self.assertTrue(rule_filter != None) self.assertTrue(rule_filter.match(rule0)) diff --git a/suricata-update/tox.ini b/suricata-update/tox.ini index 5ce1245..3200b2d 100644 --- a/suricata-update/tox.ini +++ b/suricata-update/tox.ini @@ -4,7 +4,7 @@ # and then run "tox" from this directory. [tox] -envlist = py27, py36, py37, py38 +envlist = py27, py36, py37, py38, py39, py310, py311 [testenv] commands = pytest |