diff --git a/.copr/Makefile b/.copr/Makefile new file mode 100644 index 0000000..919eb2b --- /dev/null +++ b/.copr/Makefile @@ -0,0 +1,24 @@ +top=.. + +all: srpm + +prereq: $(top)/rpmbuild + rpm -q git rpm-build >/dev/null || dnf -y install git rpm-build + +update-dist-tools: $(top)/dist-tools + ( cd "$(top)/dist-tools" && git pull ) + +$(top)/dist-tools: + git clone https://github.com/jelu/dist-tools.git "$(top)/dist-tools" + +$(top)/rpmbuild: + mkdir -p "$(top)"/rpmbuild/{BUILD,RPMS,SOURCES,SPECS,SRPMS} + +srpm: prereq update-dist-tools + git config --global safe.directory "*" + test -f .gitmodules && git submodule update --init || true + echo "$(spec)" | grep -q "develop.spec" && auto_build_number=`date --utc +%s` message="Auto build `date --utc --iso-8601=seconds`" "$(top)/dist-tools/spec-new-changelog-entry" || true + overwrite=yes nosign=yes "$(top)/dist-tools/create-source-packages" rpm + cp ../*.orig.tar.gz "$(top)/rpmbuild/SOURCES/" + echo "$(spec)" | grep -q "develop.spec" && rpmbuild -bs --define "%_topdir $(top)/rpmbuild" --undefine=dist rpm/*.spec || rpmbuild -bs --define "%_topdir $(top)/rpmbuild" --undefine=dist "$(spec)" + cp "$(top)"/rpmbuild/SRPMS/*.src.rpm "$(outdir)" diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 0000000..38cc1c4 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +custom: https://www.dns-oarc.net/donate diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..985ca02 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,41 @@ +name: "CodeQL" + +on: + push: + branches: [ "develop", "master" ] + pull_request: + branches: [ "develop" ] + schedule: + - cron: "41 0 * * 3" + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ python ] + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + queries: +security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 + with: + category: "/language:${{ matrix.language }}" diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 0000000..805f7a8 --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,32 @@ +name: "Test" + +on: + push: + branches: [ "develop" ] + pull_request: + branches: [ "develop" ] + +permissions: + contents: read + +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v3 + with: + python-version: "3.10" + - name: Install dependencies + run: | + sudo apt-get install python3-maxminddb python3-yaml wget + python -m pip install --upgrade pip + pip install . + - name: Test + run: | + cd tests + rm ipv4-address-space.csv ipv6-unicast-address-assignments.csv + wget https://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.csv + wget https://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.csv + ./test.sh diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a703a82 --- /dev/null +++ b/.gitignore @@ -0,0 +1,16 @@ +/venv/ + +*.pyc +__pycache__/ + +.pytest_cache/ +.coverage +htmlcov/ + +dist/ +build/ +*.egg-info/ + +tests/test.out +tests/test.*.tmp +coverage.xml diff --git a/CHANGES b/CHANGES new file mode 100644 index 0000000..99808c4 --- /dev/null +++ b/CHANGES @@ -0,0 +1,223 @@ +2024-06-18 Jerry Lundström + + Release 1.4.2 + + This release fixes issues with IANA's IPv6 parameters file, + dsc-datatool expected a RIR in the Designation field but IANA recently + added a title for SRv6 reservation which caused an exception. + + Other updates are related to packages and GitHub workflows. + + 7560d82 Tests + 8568c84 Fix client subnet authority + a8c58a9 Workflow + fd8915c RPM SUSE + +2023-12-06 Jerry Lundström + + Release 1.4.1 + + This release fixes issue with InfluxDB quoting, was missing to quote + the quote character. + + Other changes: + - Dependency correction for SLE 15.5 + - Tweaks to test layouts + + b44b874 Tests + eef3ae0 SLE 15.5 + 75c7fc1 Influx quoting + +2023-06-15 Jerry Lundström + + Release 1.4.0 + + This release adds the option `--encoding` to set an encoding to use + for reading and writing files. + + f64c8b6 encoding man-page + 09c0ce9 Encoding + +2022-11-10 Jerry Lundström + + Release 1.3.0 + + This release adds option `nonstrict` to `client_subnet_authority` + generator for skipping bad data in datasets. + + The contrib DSC+Grafana test site dashboards has been moved to its + own repository, feel free to contribute your own creations to it: + https://github.com/DNS-OARC/dsc-datatool-grafana + + 90b232d Add CodeQL workflow for GitHub code scanning + e4fa3b0 Test site + 474f97d client_subnet_authority non-strict mode + +2022-06-13 Jerry Lundström + + Release 1.2.0 + + This release fixes handling of base64'ed strings in DSC XML and will + now decode them back into text when reading, the selected output will + then handling any quoting or escaping needed. + Added a new option for Prometheus output to set a prefix for metrics so + that they can be easily separated from other metrics if needed, see + `man dsc-datatool-output prometheus`. + + 5f9f972 Fix COPR + 3d72019 Prometheus metric prefix + bdc992e base64 labels + +2022-04-05 Jerry Lundström + + Release 1.1.0 + + This release adds support for Prometheus' node_exporter using it's + Textfile Collector (see `man dsc-datatool-output prometheus`) and + fixes a bug in InfluxDB output when selecting what timestamp to use. + Also updates packages and Grafana test site dashboards. + + 4381541 RPM + 19bc153 Typo/clarification + 2a32dd8 Prometheus, InfluxDB, Copyright + dd5323e debhelper + 7352c1e Bye Travis + 32b3bbe Grafana dashboards + 304ab76 Info + +2020-10-21 Jerry Lundström + + Release 1.0.2 + + This release fixed a bug in DAT file parsing that was discovered when + adding coverage tests. + + 45b1aa3 Coverage + 7aedc1a Coverage + 64957b9 DAT, Coverage + 370fb86 Coverage + 891cb7c Coverage + 9374faa Coverage + +2020-08-07 Jerry Lundström + + Release 1.0.1 + + This release adds compatibility with Python v3.5 which allows + packages to be built for Ubuntu Xenial. + + bc0be5b python 3.5 + +2020-08-03 Jerry Lundström + + Release 1.0.0 + + This release brings a complete rewrite of the tool, from Perl to + Python. This rewrite was made possible thanks to funding from EURid, + and will help with maintainability and packaging. + + Core design and command line syntax is kept the same but as the + libraries the generators use have been changed additional command line + options must be used. + + - client_subnet_authority (generator) + + This generator now uses IANA's IP address space registry CSVs to + look up the network authority, therefor it needs either to fetch + the CSV files or be given them on command line. + + See `man dsc-datatool-generator client_subnet_authority` for more + information. + + - client_subnet_country (generator) + + This generator now uses MaxMind databases to look up country based + on subnet. + + See `man dsc-datatool generator client_subnet_country` for more + information and setup guide of the MaxMind databases. + + 589ea8b Badges + c32038b nonstrict + 0ea3e32 LGTM + cff2e1c COPR + 02c31b0 COPR + e8332fd COPR + 6d9f71c Input, YAML + 93ba755 EPEL 8 packages + 3e2df6f Authority + f5d023f Debian packaging + 1a59f09 Documentation + 85cb1e1 restructure + decd3f6 man-pages, URLs + f264854 man-pages + d73c319 man-pages + f5ca007 man-pages + 7bfaf53 Fedora dependencies + 3452b48 RPM dependencies + 7a4edbc Test + ed43406 client_subnet_authority + 62c7d9d Server, node + e0c6419 RPM package + 938f154 Rewrite + 5400464 README + 968ccb1 COPR, spec + 14d987f RPM requires + ee10efb Package + a25870f Funding + +Revision history for App::DSC::DataTool + +0.05 2019-05-31 + Release 0.05 + + Fixed issue with empty values in InfluxDB output, they are now + quoted as an empty string. + + 9917c4e InfluxDB quote keys/values + +0.04 2019-01-21 + Release 0.04 + + Package dependency fix and update of example Grafana dashboards. + + d3babc9 Copyright years + 9955c88 Travis Perl versions + 134a8b3 Debian dependency + 2d2114d Fix #23: Rework Grafana dashboards to hopefully show more + correct numbers and also split them up. + 9bca0d3 Prepare SPEC for OSB/COPR + +0.03 2016-12-16 + Release 0.03 + + Support processing of 25 of the 37 DAT files that the Extractor + can produce, the others can not be converted into time series data + since they lack timestamps. Processing of XML is the recommended + approach to secure all information. + + 72e829c Implement processing of DAT directories + 45294d0 RPM spec + 4e8ff69 Fix 5.24 forbidden keys usage + 7589ad2 Use perl 5.24 also + cfac110 Fix #16: Handle directories in --xml and warn that --dat is + not implemented yet + +0.02 2016-11-11 + Release 0.02 + + First release of `dsc-datatool` with support for: + - Reading DSC XML files + - Transformer: + - Labler: convert indexes/keys to textual names such as QTYPE + - ReRanger: (re)compile lists/ranges/buckets into new buckets + - NetRemap: (re)compile IP addresses and subets into new subnets + - Generator: + - client_subnet_authority: Create a dataset with IP Authority for subnets + - client_subnet_country: Create a dataset with Countries for subnets + - Output: + - Graphite + - InfluxDB + + See `dsc-datatool -h` for options and wiki article: + https://github.com/DNS-OARC/dsc-datatool/wiki/Setting-up-a-test-Grafana diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..6e6115d --- /dev/null +++ b/LICENSE @@ -0,0 +1,33 @@ +DSC DataTool + +Copyright (c) 2016-2024 OARC, Inc. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in + the documentation and/or other materials provided with the + distribution. + +3. Neither the name of the copyright holder nor the names of its + contributors may be used to endorse or promote products derived + from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +POSSIBILITY OF SUCH DAMAGE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..7712d2c --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +graft dsc_datatool +global-exclude *.pyc +include CHANGES diff --git a/README.md b/README.md new file mode 100644 index 0000000..2ef09e3 --- /dev/null +++ b/README.md @@ -0,0 +1,37 @@ +# DSC DataTool + +[![Total alerts](https://img.shields.io/lgtm/alerts/g/DNS-OARC/dsc-datatool.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/DNS-OARC/dsc-datatool/alerts/) [![Bugs](https://sonarcloud.io/api/project_badges/measure?project=dns-oarc%3Adsc-datatool&metric=bugs)](https://sonarcloud.io/dashboard?id=dns-oarc%3Adsc-datatool) [![Security Rating](https://sonarcloud.io/api/project_badges/measure?project=dns-oarc%3Adsc-datatool&metric=security_rating)](https://sonarcloud.io/dashboard?id=dns-oarc%3Adsc-datatool) + +Tool for converting, exporting, merging and transforming DSC data. + +Please have a look at the man-page(s) `dsc-datatool` (1) on how to use or +[the wiki article](https://github.com/DNS-OARC/dsc-datatool/wiki/Setting-up-a-test-Grafana) +on how to set this up using Influx DB and Grafana. + +More information about DSC may be found here: +- https://www.dns-oarc.net/tools/dsc +- https://www.dns-oarc.net/oarc/data/dsc + +Issues should be reported here: +- https://github.com/DNS-OARC/dsc-datatool/issues + +General support and discussion: +- Mattermost: https://chat.dns-oarc.net/community/channels/oarc-software +- mailing-list: https://lists.dns-oarc.net/mailman/listinfo/dsc + +## Dependencies + +`dsc-datatool` requires the following Python libraries: +- PyYAML +- maxminddb + +## Python Development Environment + +Using Ubuntu/Debian: + +``` +sudo apt-get install python3-maxminddb python3-yaml python3-venv +python3 -m venv venv --system-site-packages +. venv/bin/activate +pip install -e . --no-deps +``` diff --git a/contrib/iana-dns-params-toyaml.py b/contrib/iana-dns-params-toyaml.py new file mode 100644 index 0000000..bf482c6 --- /dev/null +++ b/contrib/iana-dns-params-toyaml.py @@ -0,0 +1,44 @@ +import yaml +import csv +from urllib.request import Request, urlopen +from io import StringIO + +rcode = {} +qtype = {} +opcode = {} + +for row in csv.reader(StringIO(urlopen(Request('http://www.iana.org/assignments/dns-parameters/dns-parameters-6.csv')).read().decode('utf-8'))): + if row[0] == 'RCODE': + continue + rcode[row[0]] = row[1] + +for row in csv.reader(StringIO(urlopen(Request('http://www.iana.org/assignments/dns-parameters/dns-parameters-4.csv')).read().decode('utf-8'))): + if row[0] == 'TYPE': + continue + qtype[row[1]] = row[0] + +for row in csv.reader(StringIO(urlopen(Request('http://www.iana.org/assignments/dns-parameters/dns-parameters-5.csv')).read().decode('utf-8'))): + if row[0] == 'OpCode': + continue + opcode[row[0]] = row[1] + +y = {} + +for n in ['rcode', 'client_addr_vs_rcode', 'rcode_vs_replylen']: + y[n] = { 'Rcode': {} } + for k, v in rcode.items(): + y[n]['Rcode'][k] = v + +for n in ['qtype', 'transport_vs_qtype', 'certain_qnames_vs_qtype', 'qtype_vs_tld', 'qtype_vs_qnamelen', 'chaos_types_and_names', 'dns_ip_version_vs_qtype']: + y[n] = { 'Qtype': {} } + for k, v in qtype.items(): + if v == '*': + v = 'wildcard' + y[n]['Qtype'][k] = v + +for n in ['opcode']: + y[n] = { 'Opcode': {} } + for k, v in rcode.items(): + y[n]['Opcode'][k] = v + +print(yaml.dump(y, explicit_start=True, default_flow_style=False)) diff --git a/dsc_datatool/__init__.py b/dsc_datatool/__init__.py new file mode 100644 index 0000000..e12b4dc --- /dev/null +++ b/dsc_datatool/__init__.py @@ -0,0 +1,484 @@ +"""dsc_datatool + +The main Python module for the command line tool `dsc-datatool`, see +`man dsc-datatool` on how to run it. + +On runtime it will load all plugins under the following module path: +- dsc_datatool.input +- dsc_datatool.output +- dsc_datatool.generator +- dsc_datatool.transformer + +Each plugin category should base it class on one of the follow superclasses: +- dsc_datatool.Input +- dsc_datatool.Output +- dsc_datatool.Generator +- dsc_datatool.Transformer + +Doing so it will be automatically registered as available and indexed in +the following public dicts using the class name: +- inputs +- outputs +- generators +- transformers + +Example of an output: + + from dsc_datatool import Output + class ExampleOutput(Output): + def process(self, datasets) + ... + +:copyright: 2024 OARC, Inc. +""" + +__version__ = '1.4.2' + +import argparse +import logging +import os +import importlib +import pkgutil +import sys +import traceback +import re + +args = argparse.Namespace() +inputs = {} +outputs = {} +generators = {} +transformers = {} +process_dataset = {} +encoding = 'utf-8' + + +class Dataset(object): + """A representation of a DSC dataset + + A DSC dataset is one to two dimensional structure where the last + dimension holds an array of values and counters. + + It is based on the XML structure of DSC: + + + + + + + + + + + + + + Attributes: + - name: The name of the dataset + - start_time: The start time of the dataset in seconds + - stop_time: The stop time of the dataset in seconds + - dimensions: An array with `Dimension`, the first dimension + """ + name = None + start_time = None + stop_time = None + dimensions = None + + + def __init__(self): + self.dimensions = [] + + + def __repr__(self): + return '' % (self.name, self.dimensions) + + +class Dimension(object): + """A representation of a DSC dimension + + A DSC dataset dimension which can be the first or second dimension, + see `Dataset` for more information. + + Attributes: + - name: The name of the dimension + - value: Is set to the value of the dimension if it's the first dimension + - values: A dict of values with corresponding counters if it's the second dimension + """ + name = None + value = None + values = None + dimensions = None + + + def __init__(self, name): + self.name = name + self.values = {} + self.dimensions = [] + + + def __repr__(self): + return '' % (self.name, self.values or self.value, self.dimensions) + + +class Input(object): + """Base class of an input plugin""" + + + def process(self, file): + """Input.process(...) -> [ Dataset, ... ] + + Called to process a file and return an array of `Dataset`'s found in it. + """ + raise Exception('process() not overloaded') + + + def __init_subclass__(cls): + """This method is called when a class is subclassed and it will + register the input plugin in `inputs`.""" + global inputs + if cls.__name__ in inputs: + raise Exception('Duplicate input module: %s already exists' % cls.__name__) + inputs[cls.__name__] = cls + + +class Output(object): + """Base class of an output plugin""" + + + def process(self, datasets): + """Output.process([ Dataset, ... ]) + + Called to output the `Dataset`'s in the given array.""" + raise Exception('process() not overloaded') + + + def __init__(self, opts): + """instance = Output({ 'opt': value, ... }) + + Called to create an instance of the output plugin, will get a dict + with options provided on command line.""" + pass + + + def __init_subclass__(cls): + """This method is called when a class is subclassed and it will + register the output plugin in `outputs`.""" + global outputs + if cls.__name__ in outputs: + raise Exception('Duplicate output module: %s already exists' % cls.__name__) + outputs[cls.__name__] = cls + + +class Generator(object): + """Base class of a generator plugin""" + + + def process(self, datasets): + """Generator.process([ Dataset, ... ]) -> [ Dataset, ... ] + + Called to generate additional `Dataset`'s based on the given array + of `Dataset`'s.""" + raise Exception('process() not overloaded') + + + def __init__(self, opts): + """instance = Generator({ 'opt': value, ... }) + + Called to create an instance of the generator plugin, will get a dict + with options provided on command line.""" + pass + + + def __init_subclass__(cls): + """This method is called when a class is subclassed and it will + register the generator plugin in `generators`.""" + global generators + if cls.__name__ in generators: + raise Exception('Duplicate generator module: %s already exists' % cls.__name__) + generators[cls.__name__] = cls + + +class Transformer(object): + """Base class of a transformer plugin""" + + + def process(self, datasets): + """Transformer.process([ Dataset, ... ]) + + Called to do transformation of the given `Dataset`'s, as in modifying + them directly.""" + raise Exception('process() not overloaded') + + + def __init__(self, opts): + """instance = Transformer({ 'opt': value, ... }) + + Called to create an instance of the transformer plugin, will get a dict + with options provided on command line.""" + pass + + + def __init_subclass__(cls): + """This method is called when a class is subclassed and it will + register the transformer plugin in `transformers`.""" + global transformers + if cls.__name__ in transformers: + raise Exception('Duplicate transformer module: %s already exists' % cls.__name__) + transformers[cls.__name__] = cls + + +def main(): + """Called when running `dsc-datatool`.""" + def iter_namespace(ns_pkg): + return pkgutil.iter_modules(ns_pkg.__path__, ns_pkg.__name__ + ".") + + + def split_arg(arg, num=1): + sep = arg[0] + p = arg.split(sep) + p.pop(0) + ret = () + while num > 0: + ret += (p.pop(0),) + num -= 1 + ret += (p,) + return ret + + + def parse_opts(opts): + ret = {} + for opt in opts: + p = opt.split('=', maxsplit=1) + if len(p) > 1: + if p[0] in ret: + if isinstance(ret[p[0]], list): + ret[p[0]].append(p[1]) + else: + ret[p[0]] = [ ret[p[0]], p[1] ] + else: + ret[p[0]] = p[1] + elif len(p) > 0: + ret[p[0]] = True + return ret + + + def _process(datasets, generators, transformers, outputs): + gen_datasets = [] + for generator in generators: + try: + gen_datasets += generator.process(datasets) + except Exception as e: + logging.warning('Generator %s failed: %s' % (generator, e)) + exc_type, exc_value, exc_traceback = sys.exc_info() + for tb in traceback.format_tb(exc_traceback): + logging.warning(str(tb)) + return 2 + + datasets += gen_datasets + + if '*' in transformers: + for transformer in transformers['*']: + try: + transformer.process(datasets) + except Exception as e: + logging.warning('Transformer %s failed: %s' % (transformer, e)) + exc_type, exc_value, exc_traceback = sys.exc_info() + for tb in traceback.format_tb(exc_traceback): + logging.warning(str(tb)) + return 2 + for dataset in datasets: + if dataset.name in transformers: + for transformer in transformers[dataset.name]: + try: + transformer.process([dataset]) + except Exception as e: + logging.warning('Transformer %s failed: %s' % (transformer, e)) + exc_type, exc_value, exc_traceback = sys.exc_info() + for tb in traceback.format_tb(exc_traceback): + logging.warning(str(tb)) + return 2 + + for output in outputs: + try: + output.process(datasets) + except Exception as e: + logging.warning('Output %s failed: %s' % (output, e)) + exc_type, exc_value, exc_traceback = sys.exc_info() + for tb in traceback.format_tb(exc_traceback): + logging.warning(str(tb)) + return 2 + + return 0 + + + global args, inputs, outputs, generators, transformers, process_dataset + + parser = argparse.ArgumentParser(prog='dsc-datatool', + description='Export DSC data into various formats and databases.', + epilog='See man-page dsc-datatool(1) and dsc-datatool-[generator|transformer|output] (5) for more information') + parser.add_argument('-c', '--conf', nargs=1, + help='Not implemented') + # help='Specify the YAML configuration file to use (default to ~/.dsc-datatool.conf), any command line option will override the options in the configuration file. See dsc-datatool.conf(5)for more information.') + parser.add_argument('-s', '--server', nargs=1, + help='Specify the server for where the data comes from. (required)') + parser.add_argument('-n', '--node', nargs=1, + help='Specify the node for where the data comes from. (required)') + parser.add_argument('-x', '--xml', action='append', + help='Read DSC data from the given file or directory, can be specified multiple times. If a directory is given then all files ending with .xml will be read.') + parser.add_argument('-d', '--dat', action='append', + help='Read DSC data from the given directory, can be specified multiple times. Note that the DAT format is depended on the filename to know what type of data it is.') + parser.add_argument('--dataset', action='append', + help='Specify that only the list of datasets will be processed, the list is comma separated and the option can be given multiple times.') + parser.add_argument('-o', '--output', action='append', + help='"[option=value...]>" Output data to and use as an options separator.') + parser.add_argument('-t', '--transform', action='append', + help='"[option=value...]>" Use the transformer to change the list of datasets in .') + parser.add_argument('-g', '--generator', action='append', + help='"[,,...]" or "[option=value...]>" Use the specified generators to generate additional datasets.') + parser.add_argument('--list', action='store_true', + help='List the available generators, transformers and outputs then exit.') + parser.add_argument('--skipped-key', nargs=1, default='-:SKIPPED:-', + help='Set the special DSC skipped key. (default to "-:SKIPPED:-")') + parser.add_argument('--skipped-sum-key', nargs=1, default='-:SKIPPED_SUM:-', + help='Set the special DSC skipped sum key. (default to "-:SKIPPED_SUM:-")') + parser.add_argument('--encoding', nargs=1, default='utf-8', + help='Encoding to use for all files, default utf-8.') + parser.add_argument('-v', '--verbose', action='count', default=0, + help='Increase the verbose level, can be given multiple times.') + parser.add_argument('-V', '--version', action='version', version='%(prog)s v'+__version__, + help='Display version and exit.') + + args = parser.parse_args() + + log_level = 30 - (args.verbose * 10) + if log_level < 0: + log_level = 0 + logging.basicConfig(format='%(asctime)s %(levelname)s %(module)s: %(message)s', level=log_level, stream=sys.stderr) + + import dsc_datatool.input + import dsc_datatool.output + import dsc_datatool.generator + import dsc_datatool.transformer + + for finder, name, ispkg in iter_namespace(dsc_datatool.input): + importlib.import_module(name) + for finder, name, ispkg in iter_namespace(dsc_datatool.output): + importlib.import_module(name) + for finder, name, ispkg in iter_namespace(dsc_datatool.generator): + importlib.import_module(name) + for finder, name, ispkg in iter_namespace(dsc_datatool.transformer): + importlib.import_module(name) + + if args.list: + print('Generators:') + for name in generators: + print('',name) + print('Transformers:') + for name in transformers: + print('',name) + print('Outputs:') + for name in outputs: + print('',name) + return 0 + + if not args.server or not args.node: + raise Exception('--server and --node must be given') + + if isinstance(args.server, list): + args.server = ' '.join(args.server) + elif not isinstance(args.server, str): + raise Exception('Invalid argument for --server: %r' % args.server) + if isinstance(args.node, list): + args.node = ' '.join(args.node) + elif not isinstance(args.node, str): + raise Exception('Invalid argument for --node: %r' % args.node) + + gens = [] + if args.generator: + for arg in args.generator: + if not re.match(r'^\w', arg): + name, opts = split_arg(arg) + if not name in generators: + logging.critical('Generator %s does not exist' % name) + return 1 + gens.append(generators[name](parse_opts(opts))) + continue + for name in arg.split(','): + if not name in generators: + logging.critical('Generator %s does not exist' % name) + return 1 + gens.append(generators[name]({})) + + trans = {} + if args.transform: + for arg in args.transform: + name, datasets, opts = split_arg(arg, num=2) + if not name in transformers: + logging.critical('Transformer %s does not exist' % name) + return 1 + for dataset in datasets.split(','): + if not dataset in trans: + trans[dataset] = [] + trans[dataset].append(transformers[name](parse_opts(opts))) + + out = [] + if args.output: + for arg in args.output: + name, opts = split_arg(arg) + if not name in outputs: + logging.critical('Output %s does not exist' % name) + return 1 + out.append(outputs[name](parse_opts(opts))) + + if args.dataset: + for dataset in args.dataset: + for p in dataset.split(','): + process_dataset[p] = True + + xml = [] + if args.xml: + for entry in args.xml: + if os.path.isfile(entry): + xml.append(entry) + elif os.path.isdir(entry): + with os.scandir(entry) as dir: + for file in dir: + if not file.name.startswith('.') and file.is_file() and file.name.lower().endswith('.xml'): + xml.append(file.path) + else: + logging.error('--xml %r is not a file or directory' % entry) + + dat = [] + if args.dat: + for entry in args.dat: + if os.path.isdir(entry): + dat.append(entry) + else: + logging.error('--dat %r is not a directory' % entry) + + if not xml and not dat: + logging.error('No valid --xml or --dat given') + return 1 + + xml_input = inputs['XML']() + for file in xml: + try: + datasets = xml_input.process(file) + except Exception as e: + logging.critical('Unable to process XML file %s: %s' % (file, e)) + return 1 + + ret = _process(datasets, gens, trans, out) + if ret > 0: + return ret + + dat_input = inputs['DAT']() + for dir in dat: + try: + datasets = dat_input.process(dir) + except Exception as e: + logging.critical('Unable to process DAT files in %s: %s' % (dir, e)) + return 1 + + ret = _process(datasets, gens, trans, out) + if ret > 0: + return ret diff --git a/dsc_datatool/generator/client_subnet_authority.py b/dsc_datatool/generator/client_subnet_authority.py new file mode 100644 index 0000000..2c9e919 --- /dev/null +++ b/dsc_datatool/generator/client_subnet_authority.py @@ -0,0 +1,181 @@ +"""dsc_datatool.generator.client_subnet_authority + +See `man dsc-datatool-generator client_subnet_authority`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import csv +import ipaddress +import logging +from urllib.request import Request, urlopen +from io import StringIO + +from dsc_datatool import Generator, Dataset, Dimension, args, encoding + + +_whois2rir = { + 'whois.apnic.net': 'APNIC', + 'whois.arin.net': 'ARIN', + 'whois.ripe.net': 'RIPE NCC', + 'whois.lacnic.net': 'LACNIC', + 'whois.afrinic.net': 'AFRINIC', +} + +_desig2rir = { + 'apnic': 'APNIC', + 'arin': 'ARIN', + 'ripe ncc': 'RIPE NCC', + 'lacnic': 'LACNIC', + 'afrinic': 'AFRINIC', + 'iana': 'IANA', + '6to4': 'IANA', +} + +class client_subnet_authority(Generator): + auth = None + nonstrict = False + + + def _read(self, input): + global _whois2rir, _desig2rir + for row in csv.reader(input): + prefix, designation, date, whois, rdap, status, note = row + if prefix == 'Prefix': + continue + rir = designation.replace('Administered by ', '').lower() + + whois = whois.lower() + if whois in _whois2rir: + rir = _whois2rir[whois] + else: + if rir in _desig2rir: + rir = _desig2rir[rir] + else: + found = None + for k, v in _desig2rir.items(): + if k in rir: + found = v + break + if found: + rir = found + else: + if status == 'RESERVED': + rir = 'IANA' + elif designation == 'Segment Routing (SRv6) SIDs': + # TODO: How to better handle this weird allocation? + rir = 'IANA' + else: + raise Exception('Unknown whois/designation: %r/%r' % (whois, designation)) + + try: + net = ipaddress.ip_network(prefix) + except Exception: + ip, net = prefix.split('/') + net = ipaddress.ip_network('%s.0.0.0/%s' % (int(ip), net)) + + if net.version == 4: + idx = ipaddress.ip_network('%s/8' % net.network_address, strict=False) + else: + idx = ipaddress.ip_network('%s/24' % net.network_address, strict=False) + + if idx.network_address in self.auth: + self.auth[idx.network_address].append({'net': net, 'auth': rir}) + else: + self.auth[idx.network_address] = [{'net': net, 'auth': rir}] + + + def __init__(self, opts): + Generator.__init__(self, opts) + self.auth = {} + csvs = opts.get('csv', None) + urlv4 = opts.get('urlv4', 'https://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.csv') + urlv6 = opts.get('urlv6', 'https://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.csv') + if opts.get('nonstrict', False): + self.nonstrict = True + + if csvs: + if not isinstance(csvs, list): + csvs = [ csvs ] + for file in csvs: + with open(file, newline='', encoding=encoding) as csvfile: + self._read(csvfile) + elif opts.get('fetch', 'no').lower() == 'yes': + urls = opts.get('url', [ urlv4, urlv6 ]) + if urls and not isinstance(urls, list): + urls = [ urls ] + logging.info('bootstrapping client subnet authority using URLs') + for url in urls: + logging.info('fetching %s' % url) + self._read(StringIO(urlopen(Request(url)).read().decode('utf-8'))) + else: + raise Exception('No authorities bootstrapped, please specify csv= or fetch=yes') + + + def process(self, datasets): + gen_datasets = [] + + for dataset in datasets: + if dataset.name != 'client_subnet': + continue + + subnets = {} + for d1 in dataset.dimensions: + for d2 in d1.dimensions: + for k, v in d2.values.items(): + if k == args.skipped_key: + continue + elif k == args.skipped_sum_key: + continue + + if k in subnets: + subnets[k] += v + else: + subnets[k] = v + + auth = {} + for subnet in subnets: + try: + ip = ipaddress.ip_address(subnet) + except Exception as e: + if not self.nonstrict: + raise e + continue + if ip.version == 4: + idx = ipaddress.ip_network('%s/8' % ip, strict=False) + ip = ipaddress.ip_network('%s/32' % ip) + else: + idx = ipaddress.ip_network('%s/24' % ip, strict=False) + ip = ipaddress.ip_network('%s/128' % ip) + if not idx.network_address in self.auth: + idx = '??' + else: + for entry in self.auth[idx.network_address]: + if entry['net'].overlaps(ip): + idx = entry['auth'] + break + + if idx in auth: + auth[idx] += subnets[subnet] + else: + auth[idx] = subnets[subnet] + + if auth: + authd = Dataset() + authd.name = 'client_subnet_authority' + authd.start_time = dataset.start_time + authd.stop_time = dataset.stop_time + gen_datasets.append(authd) + + authd1 = Dimension('ClientAuthority') + authd1.values = auth + authd.dimensions.append(authd1) + + return gen_datasets + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Generator.__init_subclass__(client_subnet_authority) diff --git a/dsc_datatool/generator/client_subnet_country.py b/dsc_datatool/generator/client_subnet_country.py new file mode 100644 index 0000000..30d67ca --- /dev/null +++ b/dsc_datatool/generator/client_subnet_country.py @@ -0,0 +1,98 @@ +"""dsc_datatool.generator.client_subnet_country + +See `man dsc-datatool-generator client_subnet_country`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import maxminddb +import os +import logging + +from dsc_datatool import Generator, Dataset, Dimension, args + + +class client_subnet_country(Generator): + reader = None + nonstrict = False + + + def __init__(self, opts): + Generator.__init__(self, opts) + paths = opts.get('path', ['/var/lib/GeoIP', '/usr/share/GeoIP', '/usr/local/share/GeoIP']) + if not isinstance(paths, list): + paths = [ paths ] + filename = opts.get('filename', 'GeoLite2-Country.mmdb') + db = opts.get('db', None) + + if db is None: + for path in paths: + db = '%s/%s' % (path, filename) + if os.path.isfile(db) and os.access(db, os.R_OK): + break + db = None + if db is None: + raise Exception('Please specify valid Maxmind database with path=,filename= or db=') + + logging.info('Using %s' % db) + self.reader = maxminddb.open_database(db) + + if opts.get('nonstrict', False): + self.nonstrict = True + + + def process(self, datasets): + gen_datasets = [] + + for dataset in datasets: + if dataset.name != 'client_subnet': + continue + + subnets = {} + for d1 in dataset.dimensions: + for d2 in d1.dimensions: + for k, v in d2.values.items(): + if k == args.skipped_key: + continue + elif k == args.skipped_sum_key: + continue + + if k in subnets: + subnets[k] += v + else: + subnets[k] = v + + cc = {} + for subnet in subnets: + try: + c = self.reader.get(subnet) + except Exception as e: + if not self.nonstrict: + raise e + continue + if c: + iso_code = c.get('country', {}).get('iso_code', '??') + if iso_code in cc: + cc[iso_code] += subnets[subnet] + else: + cc[iso_code] = subnets[subnet] + + if cc: + ccd = Dataset() + ccd.name = 'client_subnet_country' + ccd.start_time = dataset.start_time + ccd.stop_time = dataset.stop_time + gen_datasets.append(ccd) + + ccd1 = Dimension('ClientCountry') + ccd1.values = cc + ccd.dimensions.append(ccd1) + + return gen_datasets + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Generator.__init_subclass__(client_subnet_country) diff --git a/dsc_datatool/input/dat.py b/dsc_datatool/input/dat.py new file mode 100644 index 0000000..5667151 --- /dev/null +++ b/dsc_datatool/input/dat.py @@ -0,0 +1,177 @@ +"""dsc_datatool.input.dat + +Input plugin to generate `Dataset`'s from DSC DAT files. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import re + +from dsc_datatool import Input, Dataset, Dimension, process_dataset, encoding + + +_dataset1d = [ + 'client_subnet_count', + 'ipv6_rsn_abusers_count', +] + +_dataset2d = { + 'qtype': 'Qtype', + 'rcode': 'Rcode', + 'do_bit': 'D0', + 'rd_bit': 'RD', + 'opcode': 'Opcode', + 'dnssec_qtype': 'Qtype', + 'edns_version': 'EDNSVersion', + 'client_subnet2_count': 'Class', + 'client_subnet2_trace': 'Class', + 'edns_bufsiz': 'EDNSBufSiz', + 'idn_qname': 'IDNQname', + 'client_port_range': 'PortRange', + 'priming_responses': 'ReplyLen', +} + +_dataset3d = { + 'chaos_types_and_names': [ 'Qtype', 'Qname' ], + 'certain_qnames_vs_qtype': [ 'CertainQnames', 'Qtype' ], + 'direction_vs_ipproto': [ 'Direction', 'IPProto' ], + 'pcap_stats': [ 'pcap_stat', 'ifname' ], + 'transport_vs_qtype': [ 'Transport', 'Qtype' ], + 'dns_ip_version': [ 'IPVersion', 'Qtype' ], + 'priming_queries': [ 'Transport', 'EDNSBufSiz' ], + 'qr_aa_bits': [ 'Direction', 'QRAABits' ], +} + + +class DAT(Input): + def process(self, dir): + global _dataset1d, _dataset2d, _dataset3d + + datasets = [] + + for d in _dataset1d: + if process_dataset and not d in process_dataset: + continue + try: + datasets += self.process1d('%s/%s.dat' % (dir, d), d) + except FileNotFoundError: + pass + for k, v in _dataset2d.items(): + if process_dataset and not k in process_dataset: + continue + try: + datasets += self.process2d('%s/%s.dat' % (dir, k), k, v) + except FileNotFoundError: + pass + for k, v in _dataset3d.items(): + if process_dataset and not k in process_dataset: + continue + try: + datasets += self.process3d('%s/%s.dat' % (dir, k), k, v[0], v[1]) + except FileNotFoundError: + pass + + return datasets + + + def process1d(self, file, name): + datasets = [] + with open(file, 'r', encoding=encoding) as f: + for l in f.readlines(): + if re.match(r'^#', l): + continue + l = re.sub(r'[\r\n]+$', '', l) + dat = re.split(r'\s+', l) + if len(dat) != 2: + raise Exception('DAT %r dataset %r: invalid number of elements for a 1d dataset' % (file, name)) + + dataset = Dataset() + dataset.name = name + dataset.start_time = int(dat.pop(0)) + dataset.stop_time = dataset.start_time + 60 + + d1 = Dimension('All') + d1.values = { 'ALL': int(dat[0]) } + dataset.dimensions.append(d1) + + datasets.append(dataset) + + return datasets + + + def process2d(self, file, name, field): + datasets = [] + with open(file, 'r', encoding=encoding) as f: + for l in f.readlines(): + if re.match(r'^#', l): + continue + l = re.sub(r'[\r\n]+$', '', l) + dat = re.split(r'\s+', l) + + dataset = Dataset() + dataset.name = name + dataset.start_time = int(dat.pop(0)) + dataset.stop_time = dataset.start_time + 60 + + d1 = Dimension('All') + d1.value = 'ALL' + dataset.dimensions.append(d1) + + d2 = Dimension(field) + while dat: + if len(dat) < 2: + raise Exception('DAT %r dataset %r: invalid number of elements for a 2d dataset' % (file, name)) + k = dat.pop(0) + v = dat.pop(0) + d2.values[k] = int(v) + d1.dimensions.append(d2) + + datasets.append(dataset) + + return datasets + + + def process3d(self, file, name, first, second): + datasets = [] + with open(file, 'r', encoding=encoding) as f: + for l in f.readlines(): + if re.match(r'^#', l): + continue + l = re.sub(r'[\r\n]+$', '', l) + dat = re.split(r'\s+', l) + + dataset = Dataset() + dataset.name = name + dataset.start_time = int(dat.pop(0)) + dataset.stop_time = dataset.start_time + 60 + + while dat: + if len(dat) < 2: + raise Exception('DAT %r dataset %r: invalid number of elements for a 2d dataset' % (file, name)) + k = dat.pop(0) + v = dat.pop(0) + + d1 = Dimension(first) + d1.value = k + dataset.dimensions.append(d1) + + d2 = Dimension(second) + dat2 = v.split(':') + while dat2: + if len(dat2) < 2: + raise Exception('DAT %r dataset %r: invalid number of elements for a 2d dataset' % (file, name)) + k2 = dat2.pop(0) + v2 = dat2.pop(0) + d2.values[k2] = int(v2) + d1.dimensions.append(d2) + + datasets.append(dataset) + + return datasets + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Input.__init_subclass__(DAT) diff --git a/dsc_datatool/input/xml.py b/dsc_datatool/input/xml.py new file mode 100644 index 0000000..bb8c0ff --- /dev/null +++ b/dsc_datatool/input/xml.py @@ -0,0 +1,71 @@ +"""dsc_datatool.input.xml + +Input plugin to generate `Dataset`'s from DSC XML files. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import logging +from xml.dom import minidom +import base64 + +from dsc_datatool import Input, Dataset, Dimension, process_dataset + + +class XML(Input): + def process(self, file): + dom = minidom.parse(file) + datasets = [] + for array in dom.getElementsByTagName('array'): + if process_dataset and not array.getAttribute('name') in process_dataset: + continue + + dataset = Dataset() + dataset.name = array.getAttribute('name') + dataset.start_time = int(array.getAttribute('start_time')) + dataset.stop_time = int(array.getAttribute('stop_time')) + + dimensions = [None, None] + for dimension in array.getElementsByTagName('dimension'): + if dimension.getAttribute('number') == '1': + if dimensions[0]: + logging.warning('Overwriting dimension 1 for %s' % dataset.name) + dimensions[0] = dimension.getAttribute('type') + elif dimension.getAttribute('number') == '2': + if dimensions[1]: + logging.warning('Overwriting dimension 2 for %s' % dataset.name) + dimensions[1] = dimension.getAttribute('type') + else: + logging.warning('Invalid dimension number %r for %s' % (dimension.getAttribute('number'), dataset.name)) + + for node1 in array.getElementsByTagName(dimensions[0]): + d1 = Dimension(dimensions[0]) + d1.value = node1.getAttribute('val') + try: + if node1.getAttribute('base64'): + d1.value = base64.b64decode(d1.value).decode('utf-8') + except Exception as e: + pass + dataset.dimensions.append(d1) + + d2 = Dimension(dimensions[1]) + d1.dimensions.append(d2) + for node2 in node1.getElementsByTagName(dimensions[1]): + val = node2.getAttribute('val') + try: + if node2.getAttribute('base64'): + val = base64.b64decode(val).decode('utf-8') + except Exception as e: + pass + d2.values[val] = int(node2.getAttribute('count')) + + datasets.append(dataset) + + return datasets + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Input.__init_subclass__(XML) diff --git a/dsc_datatool/output/influxdb.py b/dsc_datatool/output/influxdb.py new file mode 100644 index 0000000..bb4d2e6 --- /dev/null +++ b/dsc_datatool/output/influxdb.py @@ -0,0 +1,103 @@ +"""dsc_datatool.output.influxdb + +See `man dsc-datatool-output influxdb`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import re +import sys +import atexit + +from dsc_datatool import Output, args, encoding + + +_re = re.compile(r'([,=\\\s])') + + +def _key(key): + return re.sub(_re, r'\\\1', key) + + +def _val(val): + ret = re.sub(_re, r'\\\1', val) + if ret == '': + return '""' + return ret + + +def _process(tags, timestamp, dimension, fh): + if dimension.dimensions is None: + return + + if len(dimension.dimensions) > 0: + if not (dimension.name == 'All' and dimension.value == 'ALL'): + tags += ',%s=%s' % (_key(dimension.name.lower()), _val(dimension.value)) + for d2 in dimension.dimensions: + _process(tags, timestamp, d2, fh) + return + + if dimension.values is None: + return + + if len(dimension.values) > 0: + tags += ',%s=' % _key(dimension.name.lower()) + + for k, v in dimension.values.items(): + print('%s%s value=%s %s' % (tags, _val(k), v, timestamp), file=fh) + + +class InfluxDB(Output): + start_timestamp = True + fh = None + + + def __init__(self, opts): + Output.__init__(self, opts) + timestamp = opts.get('timestamp', 'start') + if timestamp == 'start': + pass + elif timestamp == 'stop': + self.start_timestamp = False + else: + raise Exception('timestamp option invalid') + file = opts.get('file', None) + append = opts.get('append', False) + if file: + if append: + self.fh = open(file, 'a', encoding=encoding) + else: + self.fh = open(file, 'w', encoding=encoding) + atexit.register(self.close) + else: + self.fh = sys.stdout + + if opts.get('dml', False): + print('# DML', file=self.fh) + database = opts.get('database', None) + if database: + print('# CONTEXT-DATABASE: %s' % database, file=self.fh) + + + def close(self): + if self.fh: + self.fh.close() + self.fh = None + + + def process(self, datasets): + for dataset in datasets: + tags = '%s,server=%s,node=%s' % (_key(dataset.name.lower()), args.server, args.node) + if self.start_timestamp: + timestamp = dataset.start_time * 1000000000 + else: + timestamp = dataset.end_time * 1000000000 + + for d in dataset.dimensions: + _process(tags, timestamp, d, self.fh) + + +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Output.__init_subclass__(InfluxDB) diff --git a/dsc_datatool/output/prometheus.py b/dsc_datatool/output/prometheus.py new file mode 100644 index 0000000..c3f2dfb --- /dev/null +++ b/dsc_datatool/output/prometheus.py @@ -0,0 +1,112 @@ +"""dsc_datatool.output.prometheus + +See `man dsc-datatool-output prometheus`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import re +import sys +import atexit + +from dsc_datatool import Output, args, encoding + + +_re = re.compile(r'([\\\n"])') + + +def _key(key): + return re.sub(_re, r'\\\1', key) + + +def _val(val): + ret = re.sub(_re, r'\\\1', val) + if ret == '': + return '""' + return '"%s"' % ret + + +class Prometheus(Output): + show_timestamp = True + start_timestamp = True + fh = None + type_def = '' + type_printed = False + prefix = '' + + + def __init__(self, opts): + Output.__init__(self, opts) + timestamp = opts.get('timestamp', 'start') + if timestamp == 'hide': + self.show_timestamp = False + elif timestamp == 'start': + pass + elif timestamp == 'stop': + self.start_timestamp = False + else: + raise Exception('timestamp option invalid') + file = opts.get('file', None) + append = opts.get('append', False) + if file: + if append: + self.fh = open(file, 'a', encoding=encoding) + else: + self.fh = open(file, 'w', encoding=encoding) + atexit.register(self.close) + else: + self.fh = sys.stdout + self.prefix = opts.get('prefix', '') + + + def close(self): + if self.fh: + self.fh.close() + self.fh = None + + + def _process(self, tags, timestamp, dimension, fh): + if dimension.dimensions is None: + return + + if len(dimension.dimensions) > 0: + if not (dimension.name == 'All' and dimension.value == 'ALL'): + tags += ',%s=%s' % (_key(dimension.name.lower()), _val(dimension.value)) + for d2 in dimension.dimensions: + self._process(tags, timestamp, d2, fh) + return + + if dimension.values is None: + return + + if len(dimension.values) > 0: + tags += ',%s=' % _key(dimension.name.lower()) + + for k, v in dimension.values.items(): + if not self.type_printed: + print(self.type_def, file=fh) + self.type_printed = True + if self.show_timestamp: + print('%s%s} %s %s' % (tags, _val(k), v, timestamp), file=fh) + else: + print('%s%s} %s' % (tags, _val(k), v), file=fh) + + + def process(self, datasets): + for dataset in datasets: + self.type_def = '# TYPE %s gauge' % _key(dataset.name.lower()) + self.type_printed = False + tags = '%s%s{server=%s,node=%s' % (self.prefix, _key(dataset.name.lower()), _val(args.server), _val(args.node)) + if self.start_timestamp: + timestamp = dataset.start_time * 1000 + else: + timestamp = dataset.end_time * 1000 + + for d in dataset.dimensions: + self._process(tags, timestamp, d, self.fh) + + +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Output.__init_subclass__(Prometheus) diff --git a/dsc_datatool/transformer/labler.py b/dsc_datatool/transformer/labler.py new file mode 100644 index 0000000..e510511 --- /dev/null +++ b/dsc_datatool/transformer/labler.py @@ -0,0 +1,69 @@ +"""dsc_datatool.transformer.labler + +See `man dsc-datatool-transformer labler`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import yaml + +from dsc_datatool import Transformer, encoding + + +def _process(label, d): + l = label.get(d.name, None) + if d.values: + if l is None: + return + + values = d.values + d.values = {} + + for k, v in values.items(): + nk = l.get(k, None) + d.values[nk or k] = v + + return + + if l: + v = l.get(d.value, None) + if v: + d.value = v + for d2 in d.dimensions: + _process(label, d2) + + +class Labler(Transformer): + label = None + + + def __init__(self, opts): + Transformer.__init__(self, opts) + if not 'yaml' in opts: + raise Exception('yaml=file option required') + f = open(opts.get('yaml'), 'r', encoding=encoding) + try: + self.label = yaml.full_load(f) + except AttributeError: + self.label = yaml.load(f) + f.close() + + + def process(self, datasets): + if self.label is None: + return + + for dataset in datasets: + label = self.label.get(dataset.name, None) + if label is None: + continue + + for d in dataset.dimensions: + _process(label, d) + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Transformer.__init_subclass__(Labler) diff --git a/dsc_datatool/transformer/net_remap.py b/dsc_datatool/transformer/net_remap.py new file mode 100644 index 0000000..01a9aa5 --- /dev/null +++ b/dsc_datatool/transformer/net_remap.py @@ -0,0 +1,77 @@ +"""dsc_datatool.transformer.net_remap + +See `man dsc-datatool-transformer netremap`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import ipaddress + +from dsc_datatool import Transformer, args + + +class NetRemap(Transformer): + v4net = None + v6net = None + nonstrict = False + + + def __init__(self, opts): + Transformer.__init__(self, opts) + net = opts.get('net', None) + self.v4net = opts.get('v4net', net) + self.v6net = opts.get('v6net', net) + + if not self.v4net: + raise Exception('v4net (or net) must be given') + if not self.v6net: + raise Exception('v6net (or net) must be given') + + if opts.get('nonstrict', False): + self.nonstrict = True + + + def _process(self, dimension): + if not dimension.values: + for d2 in dimension.dimensions: + self._process(d2) + return + + values = dimension.values + dimension.values = {} + + for k, v in values.items(): + if k == args.skipped_key: + continue + elif k == args.skipped_sum_key: + dimension.values['0'] = v + continue + + try: + ip = ipaddress.ip_address(k) + except Exception as e: + if not self.nonstrict: + raise e + continue + if ip.version == 4: + nkey = str(ipaddress.IPv4Network('%s/%s' % (ip, self.v4net), strict=False).network_address) + else: + nkey = str(ipaddress.IPv6Network('%s/%s' % (ip, self.v6net), strict=False).network_address) + + if not nkey in dimension.values: + dimension.values[nkey] = v + else: + dimension.values[nkey] += v + + + def process(self, datasets): + for dataset in datasets: + for dimension in dataset.dimensions: + self._process(dimension) + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Transformer.__init_subclass__(NetRemap) diff --git a/dsc_datatool/transformer/re_ranger.py b/dsc_datatool/transformer/re_ranger.py new file mode 100644 index 0000000..b2d3691 --- /dev/null +++ b/dsc_datatool/transformer/re_ranger.py @@ -0,0 +1,123 @@ +"""dsc_datatool.transformer.re_ranger + +See `man dsc-datatool-transformer reranger`. + +Part of dsc_datatool. + +:copyright: 2024 OARC, Inc. +""" + +import re + +from dsc_datatool import Transformer, args + + +_key_re = re.compile(r'^(?:(\d+)|(\d+)-(\d+))$') + + +class ReRanger(Transformer): + key = None + func = None + allow_invalid_keys = None + range = None + split_by = None + + + def __init__(self, opts): + Transformer.__init__(self, opts) + self.key = opts.get('key', 'mid') + self.func = opts.get('func', 'sum') + self.allow_invalid_keys = opts.get('allow_invalid_keys', False) + self.range = opts.get('range', None) + + if self.allow_invalid_keys != False: + self.allow_invalid_keys = True + + if self.range is None: + raise Exception('range must be given') + m = re.match(r'^/(\d+)$', self.range) + if m is None: + raise Exception('invalid range') + self.split_by = int(m.group(1)) + + if self.key != 'low' and self.key != 'mid' and self.key != 'high': + raise Exception('invalid key %r' % self.key) + + if self.func != 'sum': + raise Exception('invalid func %r' % self.func) + + + def _process(self, dimension): + global _key_re + + if not dimension.values: + for d2 in dimension.dimensions: + self._process(d2) + return + + values = dimension.values + dimension.values = {} + skipped = None + + for k, v in values.items(): + low = None + high = None + + m = _key_re.match(k) + if m: + low, low2, high = m.group(1, 2, 3) + if high is None: + low = int(low) + high = low + else: + low = int(low2) + high = int(high) + elif k == args.skipped_key: + continue + elif k == args.skipped_sum_key: + if skipped is None: + skipped = v + else: + skipped += v + continue + elif self.allow_invalid_keys: + dimension.values[k] = v + continue + else: + raise Exception('invalid key %r' % k) + + if self.key == 'low': + nkey = low + elif self.key == 'mid': + nkey = int(low + ( (high - low) / 2 )) + else: + nkey = high + + nkey = int(nkey / self.split_by) * self.split_by + low = nkey + high = nkey + self.split_by - 1 + + if self.func == 'sum': + if low != high: + nkey = '%d-%d' % (low, high) + else: + nkey = str(nkey) + + if nkey in dimension.values: + dimension.values[nkey] += v + else: + dimension.values[nkey] = v + + if skipped: + dimension.values['skipped'] = skipped + + + def process(self, datasets): + for dataset in datasets: + for dimension in dataset.dimensions: + self._process(dimension) + + +import sys +if sys.version_info[0] == 3 and sys.version_info[1] == 5: # pragma: no cover + Transformer.__init_subclass__(ReRanger) diff --git a/man/man1/dsc-datatool.1 b/man/man1/dsc-datatool.1 new file mode 100644 index 0000000..262f786 --- /dev/null +++ b/man/man1/dsc-datatool.1 @@ -0,0 +1,188 @@ +.TH "dsc-datatool" "1" +.SH NAME +dsc-datatool \- Tool for converting, exporting, merging and transforming DSC data. +.SH SYNOPSIS +.SY dsc-datatool +.OP \-h +.OP \-c CONF +.OP \-s SERVER +.OP \-n NODE +.OP \-x XML +.OP \-d DAT +.OP \-\-dataset DATASET +.OP \-o OUTPUT +.OP \-t TRANSFORM] +.OP \-g GENERATOR +.OP \-\-list +.OP \-\-skipped\-key SKIPPED_KEY +.OP \-\-skipped\-sum\-key SKIPPED_SUM_KEY +.OP \-v +.OP \-V +.YS +.SH DESCRIPTION +Tool for converting, exporting, merging and transforming DSC data. + +Please have a look at the wiki article on how to set this up using +Influx DB and Grafana. + +https://github.com/DNS-OARC/dsc-datatool/wiki/Setting-up-a-test-Grafana +.SH OPTIONS +.TP +.B -h, --help +show this help message and exit +.TP +.BI "-c " CONF ", --conf " CONF +Not implemented +.TP +.BI "-s " SERVER ", --server " SERVER +Specify the server for where the data comes from. (required) +.TP +.BI "-n " NODE ", --node " NODE +Specify the node for where the data comes from. (required) +.TP +.BI "-x " XML ", --xml " XML +Read DSC data from the given file or directory, can be specified multiple +times. +If a directory is given then all files ending with .xml will be read. +.TP +.BI "-d " DAT ", --dat " DAT +Read DSC data from the given directory, can be specified multiple times. +Note that the DAT format is depended on the filename to know what type of +data it is. +.TP +.BI "--dataset " DATASET +Specify that only the list of datasets will be processed, the list is +comma separated and the option can be given multiple times. +.TP +.BI "-o " OUTPUT ", --output " OUTPUT +.I OUTPUT +has the following format that uses +.I output +to specify the output module and +.I sep +as an options separator. + +.EX + [option=value...]> +.EE + +Can be specified multiple times to output to more then one. + +Use +.B dsc-datatool --list +to see a list of modules and the man-page of each output for information +about options. +.TP +.BI "-t " TRANSFORM ", --transform " TRANSFORM +.I TRANSFORM +has the following format that uses +.I name +to specify the transformer module and +.I sep +as an options separator. +The +.I datasets +field can specify which dataset to run the transformer on, or use +.I * +to specify all datasets. + +.EX + [option=value...]> +.EE + +Can be specific multiple times to chain transformation, the chain will be +executed in the order on command line with one exception. +All transformations specified for dataset +.I * +will be executed before named dataset transformations. + +Use +.B dsc-datatool --list +to see a list of modules and the man-page of each transformer for +information about options. +For a list of datasets see the DSC configuration that creates the XML files +or the documentation for the Presenter that creates the DAT files. +.TP +.BI "-g " GENERATOR ", --generator " GENERATOR +.I GENERATOR +has two formats, one to specify a comma separated list of generators +and one that uses +.I name +to specify the generator module and +.I sep +as an options separator. + +.EX + [,,...] + + [option=value...]> +.EE + +This option can be given multiple times. + +Use +.B dsc-datatool --list +to see a list of modules and the man-page of each generator for +information about options. +.TP +.B --list +List the available generators, transformers and outputs then exit. +.TP +.BI "--skipped-key " SKIPPED_KEY +Set the special DSC skipped key. (default to "-:SKIPPED:-") +.TP +.BI "--skipped-sum-key " SKIPPED_SUM_KEY +Set the special DSC skipped sum key. (default to "-:SKIPPED_SUM:-") +.TP +.BI "--encoding " ENCODING +Set the encoding to use when reading and writing files, default to utf-8. +.TP +.B -v, --verbose +Increase the verbose level, can be given multiple times. +.TP +.B -V, --version +Display version and exit. +.LP +.SH EXAMPLE +.EX +dsc-datatool \\ + --server "$SERVER" \\ + --node "$NODE" \\ + --output ";InfluxDB;file=influx.txt;dml=1;database=dsc" \\ + --transform ";Labler;*;yaml=$HOME/labler.yaml" \\ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \\ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \\ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \\ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \\ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \\ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=8" \\ + --generator client_subnet_country \\ + --generator ";client_subnet_authority;fetch=yes" \\ + --xml "$XML" +.EE +.SH "SEE ALSO" +.BR dsc-datatool.conf (5), +.BI dsc-datatool- [ generator | transformer | output ] +.BR (7) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man5/dsc-datatool.conf.5 b/man/man5/dsc-datatool.conf.5 new file mode 100644 index 0000000..2e165db --- /dev/null +++ b/man/man5/dsc-datatool.conf.5 @@ -0,0 +1,29 @@ +.TH "dsc-datatool.conf" "5" +.SH NAME +dsc-datatool.conf \- Configuration file for dsc-datatool. +.SH DESCRIPTION +Not implemented. +.SH "SEE ALSO" +.BR dsc-datatool (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-generator-client_subnet_authority.7 b/man/man7/dsc-datatool-generator-client_subnet_authority.7 new file mode 100644 index 0000000..de47d5c --- /dev/null +++ b/man/man7/dsc-datatool-generator-client_subnet_authority.7 @@ -0,0 +1,66 @@ +.TH "dsc-datatool-generator client_subnet_authority" "7" +.SH NAME +client_subnet_authority \- Generates network authority information (RIR) based on client subnet. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-generator +.I """;client_subnet_authority;""" +.YS +.SH DESCRIPTION +This generator will lookup the network authority (RIR) based network +information in the +.I client_subnet +dataset and generate a new dataset +.IR client_subnet_authority . + +The network authority lookup will be done from imports of CSV files or by +fetching the required data from IANA. +.SH OPTIONS +.TP +.BR csv = +Read the network authority information from the given +.IR file(s) . +This option can be a single file, a list of comma separated files or given +multiple times. +.TP +.BR fetch =yes +Fetch the network authority information from IANA IP address space registry +for IPv4 and IPv6. +.TP +.BR urlv4 =url +URL for IANA IPv4 address space registry, default to +.IR https://www.iana.org/assignments/ipv4-address-space/ipv4-address-space.csv . +.TP +.BR urlv6 =url +URL for IANA IPv6 address space registry, default to +.IR https://www.iana.org/assignments/ipv6-unicast-address-assignments/ipv6-unicast-address-assignments.csv . +.TP +.BR nonstrict = +If set (to any value) then a non-strict mode is enabled which will disregard +any bad data in the dataset, skipping it completely. +Default is strict mode. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-generator-client_subnet_country.7 b/man/man7/dsc-datatool-generator-client_subnet_country.7 new file mode 100644 index 0000000..49671c8 --- /dev/null +++ b/man/man7/dsc-datatool-generator-client_subnet_country.7 @@ -0,0 +1,67 @@ +.TH "dsc-datatool-generator client_subnet_country" "7" +.SH NAME +client_subnet_country \- Generates country information based on client subnet using Maxmind database. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-generator +.I """;client_subnet_country;""" +.YS +.SH DESCRIPTION +This generator looks up country code in a Maxmind database for the subnets +in the +.I client_subnet +dataset and the dataset +.I client_subnet_country +from that. +.SH OPTIONS +.TP +.BR path = +Search for the Maxmind database in the specified +.IR directory . +This option can be given multiple times to search in multiple directories. + +Default search paths are: +.IR /var/lib/GeoIP , +.IR /usr/share/GeoIP , +.IR /usr/local/share/GeoIP . +.TP +.BR filename =filename +The database filename to search for, default to +.IR GeoLite2-Country.mmdb . +.TP +.BR db =/path/to/database.mmdb +Full path to Maxmind database, this option overrides +.I path +and +.IR filename . +.TP +.BR nonstrict = +If set (to any value) then a non-strict mode is enabled which will disregard +any bad data in the dataset, skipping it completely. +Default is strict mode. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1), +.BR geoipupdate (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-output-influxdb.7 b/man/man7/dsc-datatool-output-influxdb.7 new file mode 100644 index 0000000..bf7fa38 --- /dev/null +++ b/man/man7/dsc-datatool-output-influxdb.7 @@ -0,0 +1,64 @@ +.TH "dsc-datatool-output influxdb" "7" +.SH NAME +InfluxDB \- InfluxDB output. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-output +.I """;InfluxDB;""" +.YS +.SH DESCRIPTION +This output generates InfluxDB importable output to stdout or to a specified +file. +.SH OPTIONS +.TP +.BR timestamp =[start|stop] +Choose which timestamp from the dataset to use in the InfluxDB output to +specify when the metrics took place. + +Default to +.I start +timestamp. +.TP +.BR file = +Specify a file to output to instead of stdout. +.TP +.BR append +If given, the output will be appended to the file specified rather then +overwritten. +.TP +.BR dml +Add a +.I "# DML" +header to the output. +.TP +.BR database =name +If +.I dml +is used, this can be used to specify which database the output should be +imported into. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1), +.BR influx (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-output-prometheus.7 b/man/man7/dsc-datatool-output-prometheus.7 new file mode 100644 index 0000000..c27bcc7 --- /dev/null +++ b/man/man7/dsc-datatool-output-prometheus.7 @@ -0,0 +1,78 @@ +.TH "dsc-datatool-output prometheus" "7" +.SH NAME +Prometheus \- Prometheus output. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-output +.I """;Prometheus;""" +.YS +.SH DESCRIPTION +This output generates Prometheus importable output to stdout or to a specified +file. +.SS Prometheus' node_exporter +This output can be used together with Prometheus' +.IR node_exporter 's +Textfile Collector to automate statistics gathering but some specific +setup and requirements must be meet. + +You must hide the timestamp with option +.B timestamp=hide +because timestamps are not supported by the Textfile Collector. + +You must make sure only one XML file from a server+node combination is +processed at a time. +Because otherwise you will get multiple data point for the same metric +and this will generate errors from the Textfile Collector, since it does +not support timestamps and cannot separate the measurements. + +You must make sure that only one file (per server+node combo) is generated +for the Textfile Collector to read, and it should be the same between runs. +See Textfile Collectors documentation how to setup that atomically. +.SH OPTIONS +.TP +.BR timestamp =[hide|start|stop] +Choose which timestamp from the dataset to use in the Prometheus output to +specify when the metrics took place. + +Default to +.I start +timestamp. +.TP +.BR file = +Specify a file to output to instead of stdout. +.TP +.BR append +If given, the output will be appended to the file specified rather then +overwritten. +.TP +.BR prefix = +Use the given string as prefix on all metric names. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1) + +.I https://prometheus.io/docs/guides/node-exporter/ + +.I https://github.com/prometheus/node_exporter#textfile-collector +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-transformer-labler.7 b/man/man7/dsc-datatool-transformer-labler.7 new file mode 100644 index 0000000..53e428d --- /dev/null +++ b/man/man7/dsc-datatool-transformer-labler.7 @@ -0,0 +1,62 @@ +.TH "dsc-datatool-transformer labler" "7" +.SH NAME +Labler \- Rewrite numeric labels to textual labels using the provided YAML data as lookup tables. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-transform +.I """;Labler;;yaml=""" +.YS +.SH DESCRIPTION +This transformer rewrites labels in datasets based on information provided +in a YAML file. + +The YAML structure is as follows: +.EX +--- +dataset_name: + DimentionName: + DimentionValue: RewriteTo +.EE + +The +.I dataset_name +is the name of the dataset, +.I DimentionName +is the name of the dimension to rewrite and +.I DimensionValue +is the value to change to +.IR RewriteTo . +.SH OPTIONS +.TP +.B dataset +See +.IR dsc-datatool (1) +on how to specify which dataset(s) to run the transformer on. +.TP +.BR yaml = +The YAML file to load rewrite data from (required). +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-transformer-netremap.7 b/man/man7/dsc-datatool-transformer-netremap.7 new file mode 100644 index 0000000..9d0dee3 --- /dev/null +++ b/man/man7/dsc-datatool-transformer-netremap.7 @@ -0,0 +1,60 @@ +.TH "dsc-datatool-transformer netremap" "7" +.SH NAME +NetRemap \- Remap network addresses to other ranges/subnets. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-transform +.I """;NetRemap;;""" +.YS +.SH DESCRIPTION +This transformer can remap network ranges on IP addresses and subnets. +.SH OPTIONS +.TP +.B dataset +See +.IR dsc-datatool (1) +on how to specify which dataset(s) to run the transformer on. +.TP +.BR net = +The network to remap both IPv4 and IPv6 addresses and networks, specify +only the number in a short-form network (e.g. "/"). +.TP +.BR v4net = +The IPv4 network to use for IPv4 addresses and networks, default to +.I net +if that is specified. +.TP +.BR v6net = +The IPv6 network to use for IPv6 addresses and networks, default to +.I net +if that is specified. +.TP +.BR nonstrict = +If set (to any value) then a non-strict mode is enabled which will disregard +any bad data in the dataset, skipping it completely. +Default is strict mode. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/man/man7/dsc-datatool-transformer-reranger.7 b/man/man7/dsc-datatool-transformer-reranger.7 new file mode 100644 index 0000000..410e384 --- /dev/null +++ b/man/man7/dsc-datatool-transformer-reranger.7 @@ -0,0 +1,75 @@ +.TH "dsc-datatool-transformer reranger" "7" +.SH NAME +ReRanger \- Rewrite ranged or numerical statistics into other ranges. +.SH SYNOPSIS +.SY dsc-datatool +.B \-\-transform +.I """;ReRanger;;""" +.YS +.SH DESCRIPTION +This transformer can re-range values in datasets to new rangers. + +It supports both values that are a single numerical value or one that is +already a range itself +.RI ( num\-num ). +.SH OPTIONS +.TP +.B dataset +See +.IR dsc-datatool (1) +on how to specify which dataset(s) to run the transformer on. +.TP +.BR key =[low|mid|high] +Specify what value to use when re-ranging an already ranged value +.RI (low\-high). + +Default to +.IR mid , +which is high minus low then divided by 2. +.TP +.BR func = +The function to use when aggregating multiple values into a new ranger, +default to +.IR sum . + +Currently only one function exists, +.IR sum , +which adds the sum of all values into the new range. +.TP +.B allow_invalid_keys +If given the "invalid" keys/values that is not numerical or a range will be +passed through unmodified. +.TP +.BR range = +The new range given as +.I /number +(required). + +For example +.I range=/64 +will re-range all values into buckets of 64, 0-63, 64-127 and so on. +.LP +.SH "SEE ALSO" +.BR dsc-datatool (1) +.SH AUTHORS +Jerry Lundström, DNS-OARC +.LP +Maintained by DNS-OARC +.LP +.RS +.I https://www.dns-oarc.net/tools/dsc +.RE +.LP +.SH BUGS +For issues and feature requests please use: +.LP +.RS +\fIhttps://github.com/DNS-OARC/dsc-datatool/issues\fP +.RE +.LP +For question and help please use: +.LP +.RS +\fIhttps://lists.dns-oarc.net/mailman/listinfo/dsc\fP +.RE +.LP diff --git a/rpm/dsc-datatool.spec b/rpm/dsc-datatool.spec new file mode 100644 index 0000000..6863b87 --- /dev/null +++ b/rpm/dsc-datatool.spec @@ -0,0 +1,259 @@ +Name: dsc-datatool +Version: 1.4.2 +Release: 1%{?dist} +Summary: Export DSC data to other formats and/or databases +Group: Productivity/Networking/DNS/Utilities + +License: BSD-3-Clause +URL: https://www.dns-oarc.net/oarc/data/dsc +# Source needs to be generated by dist-tools/create-source-packages, see +# https://github.com/jelu/dist-tools +Source0: https://github.com/DNS-OARC/dsc-datatool/archive/v%{version}.tar.gz?/%{name}_%{version}.orig.tar.gz + +BuildArch: noarch + +BuildRequires: python3-devel +BuildRequires: python3-setuptools +BuildRequires: python-rpm-macros +%if 0%{?el7} +BuildRequires: python36-maxminddb +BuildRequires: python36-PyYAML +%else +%if (0%{?sle_version} == 150500 && !0%{?is_opensuse}) || 0%{?sle_version} >= 150600 +BuildRequires: python311-maxminddb +%else +BuildRequires: python3-maxminddb +%endif +BuildRequires: python3-PyYAML +%endif + +%if 0%{?el7} +Requires: python36-maxminddb +Requires: python36-PyYAML +%else +%if (0%{?sle_version} == 150500 && !0%{?is_opensuse}) || 0%{?sle_version} >= 150600 +Requires: python311-maxminddb +%else +Requires: python3-maxminddb +%endif +Requires: python3-PyYAML +%endif + +%package doc +Summary: Documentation files for %{name} +Group: Documentation + + +%description +Tool for converting, exporting, merging and transforming DSC data. + + +%description doc +Tool for converting, exporting, merging and transforming DSC data. + +This package contains the documentation for dsc-datatool. + + +%prep +%setup -q -n %{name}_%{version} + + +%build +python3 setup.py build + + +%install +python3 setup.py install --prefix=%{_prefix} --root=%{buildroot} +mkdir -p %{buildroot}%{_mandir}/man1/ +install -m644 man/man1/dsc-datatool.1 %{buildroot}%{_mandir}/man1/ +mkdir -p %{buildroot}%{_mandir}/man5/ +install -m644 man/man5/dsc-datatool.conf.5 %{buildroot}%{_mandir}/man5/ +mkdir -p %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-transformer-reranger.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-generator-client_subnet_country.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-generator-client_subnet_authority.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-output-influxdb.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-output-prometheus.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-transformer-labler.7 %{buildroot}%{_mandir}/man7/ +install -m644 man/man7/dsc-datatool-transformer-netremap.7 %{buildroot}%{_mandir}/man7/ + + +%files +%license LICENSE +%{_bindir}/dsc-datatool +%{_mandir}/man1/dsc-datatool.1* +%{_mandir}/man5/dsc-datatool.conf.5* +%{_mandir}/man7/dsc-datatool*.7* +%{python3_sitelib}/dsc_datatool* + + +%files doc +%doc CHANGES README.md +%license LICENSE + + +%changelog +* Tue Jun 18 2024 Jerry Lundström 1.4.2-1 +- Release 1.4.2 + * This release fixes issues with IANA's IPv6 parameters file, + dsc-datatool expected a RIR in the Designation field but IANA recently + added a title for SRv6 reservation which caused an exception. + * Other updates are related to packages and GitHub workflows. + * Commits: + 7560d82 Tests + 8568c84 Fix client subnet authority + a8c58a9 Workflow + fd8915c RPM SUSE +* Wed Dec 06 2023 Jerry Lundström 1.4.1-1 +- Release 1.4.1 + * This release fixes issue with InfluxDB quoting, was missing to quote + the quote character. + * Other changes: + - Dependency correction for SLE 15.5 + - Tweaks to test layouts + * Commits: + b44b874 Tests + eef3ae0 SLE 15.5 + 75c7fc1 Influx quoting +* Thu Jun 15 2023 Jerry Lundström 1.4.0-1 +- Release 1.4.0 + * This release adds the option `--encoding` to set an encoding to use + for reading and writing files. + * Commits: + f64c8b6 encoding man-page + 09c0ce9 Encoding +* Thu Nov 10 2022 Jerry Lundström 1.3.0-1 +- Release 1.3.0 + * This release adds option `nonstrict` to `client_subnet_authority` + generator for skipping bad data in datasets. + * The contrib DSC+Grafana test site dashboards has been moved to its + own repository, feel free to contribute your own creations to it: + https://github.com/DNS-OARC/dsc-datatool-grafana + * Commits: + 90b232d Add CodeQL workflow for GitHub code scanning + e4fa3b0 Test site + 474f97d client_subnet_authority non-strict mode +* Mon Jun 13 2022 Jerry Lundström 1.2.0-1 +- Release 1.2.0 + * This release fixes handling of base64'ed strings in DSC XML and will + now decode them back into text when reading, the selected output will + then handling any quoting or escaping needed. + * Added a new option for Prometheus output to set a prefix for metrics so + that they can be easily separated from other metrics if needed, see + `man dsc-datatool-output prometheus`. + * Commits: + 5f9f972 Fix COPR + 3d72019 Prometheus metric prefix + bdc992e base64 labels +* Tue Apr 05 2022 Jerry Lundström 1.1.0-1 +- Release 1.1.0 + * This releases adds support for Prometheus' node_exporter using it's + Textfile Collector (see `man dsc-datatool-output prometheus`) and + fixes a bug in InfluxDB output when selecting what timestamp to use. + Also updates packages and Grafana test site dashboards. + * Commits: + 4381541 RPM + 19bc153 Typo/clarification + 2a32dd8 Prometheus, InfluxDB, Copyright + dd5323e debhelper + 7352c1e Bye Travis + 32b3bbe Grafana dashboards + 304ab76 Info +* Wed Oct 21 2020 Jerry Lundström 1.0.2-1 +- Release 1.0.2 + * This release fixed a bug in DAT file parsing that was discovered when + adding coverage tests. + * Commits: + 45b1aa3 Coverage + 7aedc1a Coverage + 64957b9 DAT, Coverage + 370fb86 Coverage + 891cb7c Coverage + 9374faa Coverage +* Fri Aug 07 2020 Jerry Lundström 1.0.1-1 +- Release 1.0.1 + * This release adds compatibility with Python v3.5 which allows + packages to be built for Ubuntu Xenial. + * Commits: + bc0be5b python 3.5 +* Mon Aug 03 2020 Jerry Lundström 1.0.0-2 +- Release 1.0.0 + * This release brings a complete rewrite of the tool, from Perl to + Python. This rewrite was made possible thanks to funding from EURid, + and will help with maintainability and packaging. + * Core design and command line syntax is kept the same but as the + libraries the generators use have been changed additional command line + options must be used. + - client_subnet_authority (generator) + This generator now uses IANA's IP address space registry CSVs to + look up the network authority, therefor it needs either to fetch + the CSV files or be given them on command line. + See `man dsc-datatool-generator client_subnet_authority` for more + information. + - client_subnet_country (generator) + This generator now uses MaxMind databases to look up country based + on subnet. + See `man dsc-datatool generator client_subnet_country` for more + information and setup guide of the MaxMind databases. + * Commits: + 589ea8b Badges + c32038b nonstrict + 0ea3e32 LGTM + cff2e1c COPR + 02c31b0 COPR + e8332fd COPR + 6d9f71c Input, YAML + 93ba755 EPEL 8 packages + 3e2df6f Authority + f5d023f Debian packaging + 1a59f09 Documentation + 85cb1e1 restructure + decd3f6 man-pages, URLs + f264854 man-pages + d73c319 man-pages + f5ca007 man-pages + 7bfaf53 Fedora dependencies + 3452b48 RPM dependencies + 7a4edbc Test + ed43406 client_subnet_authority + 62c7d9d Server, node + e0c6419 RPM package + 938f154 Rewrite + 5400464 README + 968ccb1 COPR, spec + 14d987f RPM requires + ee10efb Package + a25870f Funding +* Wed Apr 15 2020 Jerry Lundström 1.0.0-1 +- Prepare for v1.0.0 +* Fri May 31 2019 Jerry Lundström 0.05-1 +- Release 0.05 + * Fixed issue with empty values in InfluxDB output, they are now + quoted as an empty string. + * Commits: + 9917c4e InfluxDB quote keys/values +* Mon Jan 21 2019 Jerry Lundström 0.04-1 +- Release 0.04 + * Package dependency fix and update of example Grafana dashboards. + * Commits: + d3babc9 Copyright years + 9955c88 Travis Perl versions + 134a8b3 Debian dependency + 2d2114d Fix #23: Rework Grafana dashboards to hopefully show more + correct numbers and also split them up. + 9bca0d3 Prepare SPEC for OSB/COPR +* Fri Dec 16 2016 Jerry Lundström 0.03-1 +- Release 0.03 + * Support processing of 25 of the 37 DAT files that the Extractor + can produce, the others can not be converted into time series data + since they lack timestamps. Processing of XML is the recommended + approach to secure all information. + * Commits: + 72e829c Implement processing of DAT directories + 45294d0 RPM spec + 4e8ff69 Fix 5.24 forbidden keys usage + 7589ad2 Use perl 5.24 also + cfac110 Fix #16: Handle directories in --xml and warn that --dat is + not implemented yet +* Thu Dec 15 2016 Jerry Lundström 0.02-1 +- Initial package diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..3dcdd2e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,7 @@ +[tool:pytest] +testpaths = tests + +[coverage:run] +branch = True +source = + dsc_datatool diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..7970564 --- /dev/null +++ b/setup.py @@ -0,0 +1,28 @@ +from setuptools import find_packages, setup + +setup( + name='dsc_datatool', + version='1.4.2', + packages=find_packages(), + include_package_data=True, + zip_safe=False, + python_requires='>=3.5.1', + install_requires=[ + 'maxminddb>=1.2.0', + 'PyYAML>=3.11', + ], + extras_require={ + 'dev': [ + 'pytest>=4', + 'coverage', + 'watchdog', + ], + }, + entry_points={ + 'console_scripts': [ + 'dsc-datatool = dsc_datatool:main', + ], + }, + scripts=[ + ], +) diff --git a/sonar-project.properties.local b/sonar-project.properties.local new file mode 100644 index 0000000..eaff419 --- /dev/null +++ b/sonar-project.properties.local @@ -0,0 +1 @@ +sonar.sources=dsc_datatool diff --git a/tests/1458044657.xml b/tests/1458044657.xml new file mode 100644 index 0000000..e2a2a0f --- /dev/null +++ b/tests/1458044657.xml @@ -0,0 +1,336 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/1563520620.dscdata.xml b/tests/1563520620.dscdata.xml new file mode 100644 index 0000000..0069368 --- /dev/null +++ b/tests/1563520620.dscdata.xml @@ -0,0 +1,507 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/20190719/certain_qnames_vs_qtype.dat b/tests/20190719/certain_qnames_vs_qtype.dat new file mode 100644 index 0000000..ab38436 --- /dev/null +++ b/tests/20190719/certain_qnames_vs_qtype.dat @@ -0,0 +1,2 @@ +1563520560 else else:48:1:128:28:71:15:9:12:78:2:7:6:4 +#MD5 4bc33435cfa929c19592fa5bbfd4bb69 diff --git a/tests/20190719/chaos_types_and_names.dat b/tests/20190719/chaos_types_and_names.dat new file mode 100644 index 0000000..bcc589c --- /dev/null +++ b/tests/20190719/chaos_types_and_names.dat @@ -0,0 +1,2 @@ +1563520560 +#MD5 d0a25752e8594d8c013f127947cbc5f9 diff --git a/tests/20190719/client_addr_vs_rcode_accum.dat b/tests/20190719/client_addr_vs_rcode_accum.dat new file mode 100644 index 0000000..68b5268 --- /dev/null +++ b/tests/20190719/client_addr_vs_rcode_accum.dat @@ -0,0 +1,8 @@ +-:SKIPPED:- 0 131 +2620:ff:c000::198 0 50 +2620:ff:c000::198 3 2 +-:SKIPPED_SUM:- 0 131 +64.191.0.198 2 6 +64.191.0.198 0 78 +64.191.0.198 3 3 +#MD5 69367713c72c2e8901daffc14632c2d9 diff --git a/tests/20190719/client_port_range.dat b/tests/20190719/client_port_range.dat new file mode 100644 index 0000000..6abf275 --- /dev/null +++ b/tests/20190719/client_port_range.dat @@ -0,0 +1,2 @@ +1563520560 58368-59391 8 1024-2047 3 45056-46079 7 31744-32767 5 15360-16383 7 30720-31743 4 9216-10239 3 5120-6143 4 25600-26623 4 41984-43007 9 43008-44031 6 62464-63487 4 22528-23551 4 10240-11263 4 39936-40959 5 34816-35839 7 16384-17407 5 32768-33791 6 37888-38911 7 53248-54271 3 3072-4095 7 47104-48127 5 21504-22527 2 49152-50175 6 6144-7167 2 33792-34815 12 61440-62463 8 51200-52223 2 27648-28671 3 4096-5119 3 46080-47103 7 56320-57343 11 63488-64511 8 50176-51199 4 2048-3071 5 8192-9215 3 57344-58367 3 14336-15359 6 52224-53247 6 55296-56319 10 12288-13311 5 24576-25599 11 60416-61439 1 26624-27647 4 17408-18431 4 7168-8191 4 48128-49151 4 38912-39935 4 23552-24575 4 28672-29695 5 20480-21503 3 40960-41983 7 54272-55295 5 13312-14335 6 11264-12287 4 19456-20479 8 36864-37887 6 29696-30719 4 35840-36863 12 18432-19455 4 44032-45055 3 59392-60415 7 +#MD5 21f1a5b2da887ab9b0c608f34992d7de diff --git a/tests/20190719/client_subnet2_accum.dat b/tests/20190719/client_subnet2_accum.dat new file mode 100644 index 0000000..97213e7 --- /dev/null +++ b/tests/20190719/client_subnet2_accum.dat @@ -0,0 +1,5 @@ +2620:ff:c000::198 rfc1918-ptr 1 +2620:ff:c000::198 ok 5 +64.191.0.0 ok 180 +64.191.0.0 non-auth-tld 18 +#MD5 dddb87e8a8963e86cb8a503630d4357f diff --git a/tests/20190719/client_subnet2_count.dat b/tests/20190719/client_subnet2_count.dat new file mode 100644 index 0000000..05c71b6 --- /dev/null +++ b/tests/20190719/client_subnet2_count.dat @@ -0,0 +1,2 @@ +1563520560 rfc1918-ptr 1 ok 2 non-auth-tld 1 +#MD5 e883a01b1b022817133db1995aa2c10b diff --git a/tests/20190719/client_subnet2_trace.dat b/tests/20190719/client_subnet2_trace.dat new file mode 100644 index 0000000..0f347ec --- /dev/null +++ b/tests/20190719/client_subnet2_trace.dat @@ -0,0 +1,2 @@ +1563520560 non-auth-tld 18 rfc1918-ptr 1 ok 185 +#MD5 b4a4ddc379bfe93124277fc6e2c330cb diff --git a/tests/20190719/client_subnet_accum.dat b/tests/20190719/client_subnet_accum.dat new file mode 100644 index 0000000..bc4b41e --- /dev/null +++ b/tests/20190719/client_subnet_accum.dat @@ -0,0 +1,3 @@ +2620:ff:c000:: 5 +64.191.0.0 198 +#MD5 7b9f4f6c1fad290e8d9f0f6ef2905b0f diff --git a/tests/20190719/client_subnet_count.dat b/tests/20190719/client_subnet_count.dat new file mode 100644 index 0000000..bfe6b3a --- /dev/null +++ b/tests/20190719/client_subnet_count.dat @@ -0,0 +1,2 @@ +1563520560 2 +#MD5 387ea7cf05e936539c3ec40a63d29dbf diff --git a/tests/20190719/direction_vs_ipproto.dat b/tests/20190719/direction_vs_ipproto.dat new file mode 100644 index 0000000..a3fcd30 --- /dev/null +++ b/tests/20190719/direction_vs_ipproto.dat @@ -0,0 +1,2 @@ +1563520560 recv udp:219 sent udp:289 else udp:267 +#MD5 425e11a1b3d2f17c5e87e54ff5ecbeb3 diff --git a/tests/20190719/dnssec_qtype.dat b/tests/20190719/dnssec_qtype.dat new file mode 100644 index 0000000..2749e86 --- /dev/null +++ b/tests/20190719/dnssec_qtype.dat @@ -0,0 +1,2 @@ +1563520560 else 315 48 25 43 5 +#MD5 2c7018cb7254ee62b7097fcb6239a57d diff --git a/tests/20190719/do_bit.dat b/tests/20190719/do_bit.dat new file mode 100644 index 0000000..b975c7e --- /dev/null +++ b/tests/20190719/do_bit.dat @@ -0,0 +1,2 @@ +1563520560 set 134 clr 211 +#MD5 7bb564b9b0ebd370f10339a349e82e42 diff --git a/tests/20190719/edns_bufsiz.dat b/tests/20190719/edns_bufsiz.dat new file mode 100644 index 0000000..4874477 --- /dev/null +++ b/tests/20190719/edns_bufsiz.dat @@ -0,0 +1,2 @@ +1563520560 3584-4095 12 4096-4607 73 2048-2559 1 512-1023 26 None 204 8192-8703 1 1536-2047 6 1024-1535 22 +#MD5 e70ac75f31b3de1176f1975ca1ab03d7 diff --git a/tests/20190719/edns_version.dat b/tests/20190719/edns_version.dat new file mode 100644 index 0000000..8fd36ec --- /dev/null +++ b/tests/20190719/edns_version.dat @@ -0,0 +1,2 @@ +1563520560 0 141 none 204 +#MD5 4c2f5b7124dea738c503bb5dccb232e2 diff --git a/tests/20190719/idn_qname.dat b/tests/20190719/idn_qname.dat new file mode 100644 index 0000000..9d711f3 --- /dev/null +++ b/tests/20190719/idn_qname.dat @@ -0,0 +1,2 @@ +1563520560 normal 345 +#MD5 df6cf377b07e083c57953eda1e9100ae diff --git a/tests/20190719/idn_vs_tld.dat b/tests/20190719/idn_vs_tld.dat new file mode 100644 index 0000000..e69de29 diff --git a/tests/20190719/ipv6_rsn_abusers_accum.dat b/tests/20190719/ipv6_rsn_abusers_accum.dat new file mode 100644 index 0000000..e69de29 diff --git a/tests/20190719/ipv6_rsn_abusers_count.dat b/tests/20190719/ipv6_rsn_abusers_count.dat new file mode 100644 index 0000000..b271a98 --- /dev/null +++ b/tests/20190719/ipv6_rsn_abusers_count.dat @@ -0,0 +1,2 @@ +1563520560 0 +#MD5 9b8bcef0308351ed7d24cbed894c2217 diff --git a/tests/20190719/opcode.dat b/tests/20190719/opcode.dat new file mode 100644 index 0000000..76c5e4f --- /dev/null +++ b/tests/20190719/opcode.dat @@ -0,0 +1,2 @@ +1563520560 0 345 +#MD5 3a960c4f0c7ba6d9062f78477f11f1e8 diff --git a/tests/20190719/pcap_stats.dat b/tests/20190719/pcap_stats.dat new file mode 100644 index 0000000..27c6b34 --- /dev/null +++ b/tests/20190719/pcap_stats.dat @@ -0,0 +1,2 @@ +1563520560 filter_received eth0:5625 kernel_dropped eth0:731 pkts_captured eth0:4894 +#MD5 6dbbefe12290b8e7f589f86ec9c30556 diff --git a/tests/20190719/qtype.dat b/tests/20190719/qtype.dat new file mode 100644 index 0000000..334b71e --- /dev/null +++ b/tests/20190719/qtype.dat @@ -0,0 +1,2 @@ +1563520560 12 78 2 7 6 4 15 9 else 48 1 128 28 71 +#MD5 afc958bfa6de0678f87f62fe9c9c134f diff --git a/tests/20190719/qtype_vs_qnamelen.dat b/tests/20190719/qtype_vs_qnamelen.dat new file mode 100644 index 0000000..1203daa --- /dev/null +++ b/tests/20190719/qtype_vs_qnamelen.dat @@ -0,0 +1,46 @@ +2 23 1 +2 9 1 +2 21 2 +2 12 3 +43 26 2 +43 17 1 +43 15 2 +12 28 1 +12 72 2 +12 23 35 +12 26 16 +12 25 21 +12 27 1 +12 24 2 +6 9 1 +6 21 2 +6 17 1 +1 18 1 +1 46 1 +1 12 1 +1 20 84 +1 23 1 +1 16 8 +1 24 2 +1 15 28 +1 22 1 +1 28 1 +28 16 7 +28 24 2 +28 15 12 +28 18 1 +28 35 1 +28 20 45 +28 26 1 +28 23 1 +28 59 1 +48 12 2 +48 13 10 +48 23 11 +48 17 2 +16 74 9 +16 15 1 +16 21 8 +15 26 1 +15 13 8 +#MD5 1cf280838630abd4d8cc5fe322bbd8ff diff --git a/tests/20190719/qtype_vs_tld.dat b/tests/20190719/qtype_vs_tld.dat new file mode 100644 index 0000000..2482fab --- /dev/null +++ b/tests/20190719/qtype_vs_tld.dat @@ -0,0 +1,13 @@ +arpa 2 3 +arpa 12 78 +arpa 6 2 +... 1 128 +org 2 1 +asia 15 8 +net 28 71 +net 1 128 +net 15 1 +net 6 2 +net 2 2 +com 2 1 +#MD5 bebe5fd28997da768999fb36bd299c05 diff --git a/tests/20190719/rcode.dat b/tests/20190719/rcode.dat new file mode 100644 index 0000000..4ae351d --- /dev/null +++ b/tests/20190719/rcode.dat @@ -0,0 +1,2 @@ +1563520560 0 416 3 8 2 6 +#MD5 7ae0768474205df1d88a4fae8d03b56d diff --git a/tests/20190719/rcode_vs_replylen.dat b/tests/20190719/rcode_vs_replylen.dat new file mode 100644 index 0000000..2cc577f --- /dev/null +++ b/tests/20190719/rcode_vs_replylen.dat @@ -0,0 +1,88 @@ +2 31 6 +0 293 3 +0 112 1 +0 232 31 +0 396 2 +0 89 1 +0 302 1 +0 94 1 +0 298 3 +0 1118 10 +0 85 1 +0 563 6 +0 132 2 +0 245 3 +0 233 7 +0 389 1 +0 174 1 +0 88 4 +0 241 4 +0 253 2 +0 412 1 +0 42 4 +0 104 4 +0 75 40 +0 66 45 +0 54 84 +0 155 1 +0 131 2 +0 151 1 +0 229 1 +0 328 1 +0 362 1 +0 1100 2 +0 98 1 +0 722 4 +0 368 7 +0 111 1 +0 237 1 +0 235 1 +0 1141 1 +0 62 3 +0 99 2 +0 84 2 +0 102 2 +0 345 1 +0 61 2 +0 239 2 +0 261 2 +0 259 1 +0 309 5 +0 244 17 +0 80 1 +0 69 1 +0 1472 6 +0 587 3 +0 77 1 +0 125 1 +0 127 1 +0 110 2 +0 324 1 +0 591 1 +0 96 2 +0 271 9 +0 50 2 +0 296 8 +0 202 1 +0 299 1 +0 268 6 +0 1146 1 +0 272 11 +0 41 2 +0 57 1 +0 123 2 +0 49 7 +0 294 1 +0 478 3 +0 326 1 +0 82 5 +0 379 4 +0 64 1 +0 87 2 +0 574 2 +3 130 1 +3 101 2 +3 92 2 +3 795 1 +3 694 2 +#MD5 f7092a6510cfbece8835bc3b41f8e20d diff --git a/tests/20190719/rd_bit.dat b/tests/20190719/rd_bit.dat new file mode 100644 index 0000000..63c2d1e --- /dev/null +++ b/tests/20190719/rd_bit.dat @@ -0,0 +1,2 @@ +1563520560 clr 159 set 186 +#MD5 2dd58158fe3ebb4176faabd9199d06fe diff --git a/tests/20190719/transport_vs_qtype.dat b/tests/20190719/transport_vs_qtype.dat new file mode 100644 index 0000000..5a3ded9 --- /dev/null +++ b/tests/20190719/transport_vs_qtype.dat @@ -0,0 +1,2 @@ +1563520560 udp else:48:28:71:1:128:15:9:12:78:2:7:6:4 +#MD5 9712453c813073917ee807bfb91dc07a diff --git a/tests/broken.xml b/tests/broken.xml new file mode 100644 index 0000000..45eaf59 --- /dev/null +++ b/tests/broken.xml @@ -0,0 +1,3 @@ + + + /dev/null + +! dsc-datatool \ + -vvv \ + -s test-server \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";Labler;*;yaml=$base/labler.yaml" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --generator ";client_subnet_country;path=$HOME/GeoIP" \ + --xml "$base" >/dev/null + +dsc-datatool \ + -vvv \ + -s test-server \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --generator ";client_subnet_country;path=$HOME/GeoIP" \ + --dat "$base/20190719" >/dev/null + +dsc-datatool -vvvvvvv --list >/dev/null +! dsc-datatool -s test -n test --output ";InfluxDB;test=a;test=b;test=c" >/dev/null +! dsc-datatool -s test -n test --generator does_not_exist >/dev/null +! dsc-datatool -s test -n test --generator does_not_exist,really_does_not_exist >/dev/null +! dsc-datatool -s test -n test --transform ";does_not_exist;*" >/dev/null +! dsc-datatool -s test -n test --transform ";ReRanger;a,a,a;range=/8" >/dev/null +! dsc-datatool -s test -n test --output does_not_exists >/dev/null +! dsc-datatool -s test -n test --dataset a --dataset b --dataset c,d,e >/dev/null +! dsc-datatool -s test -n test --dat "$base/coverage.sh" >/dev/null diff --git a/tests/dsc-datatool b/tests/dsc-datatool new file mode 100755 index 0000000..7ad998c --- /dev/null +++ b/tests/dsc-datatool @@ -0,0 +1,6 @@ +#!/bin/sh -e + +base=`dirname $0` +export PYTHONPATH="$base/..:$PYTHONPATH" + +exec python3-coverage run -a "$base/dsc-datatool.py" "$@" diff --git a/tests/dsc-datatool.py b/tests/dsc-datatool.py new file mode 100644 index 0000000..bd00474 --- /dev/null +++ b/tests/dsc-datatool.py @@ -0,0 +1,6 @@ +#!/usr/bin/python3 +import sys +from dsc_datatool import main + +if __name__ == "__main__": + sys.exit(main()) diff --git a/tests/ipv4-address-space.csv b/tests/ipv4-address-space.csv new file mode 100644 index 0000000..170db8d --- /dev/null +++ b/tests/ipv4-address-space.csv @@ -0,0 +1,374 @@ +Prefix,Designation,Date,WHOIS,RDAP,Status [1],Note +000/8,IANA - Local Identification,1981-09,,,RESERVED,[2][3] +001/8,APNIC,2010-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +002/8,RIPE NCC,2009-09,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +003/8,Administered by ARIN,1994-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +004/8,Administered by ARIN,1992-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +005/8,RIPE NCC,2010-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +006/8,Army Information Systems Center,1994-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +007/8,Administered by ARIN,1995-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +008/8,Administered by ARIN,1992-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +009/8,Administered by ARIN,1992-08,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +010/8,IANA - Private Use,1995-06,,,RESERVED,[4] +011/8,DoD Intel Information Systems,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +012/8,AT&T Bell Laboratories,1995-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +013/8,Administered by ARIN,1991-09,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +014/8,APNIC,2010-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED,[5] +015/8,Administered by ARIN,1994-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +016/8,Administered by ARIN,1994-11,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +017/8,Apple Computer Inc.,1992-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +018/8,Administered by ARIN,1994-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +019/8,Ford Motor Company,1995-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +020/8,Administered by ARIN,1994-10,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +021/8,DDN-RVN,1991-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +022/8,Defense Information Systems Agency,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +023/8,ARIN,2010-11,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +024/8,ARIN,2001-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +025/8,Administered by RIPE NCC,1995-01,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +026/8,Defense Information Systems Agency,1995-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +027/8,APNIC,2010-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +028/8,DSI-North,1992-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +029/8,Defense Information Systems Agency,1991-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +030/8,Defense Information Systems Agency,1991-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +031/8,RIPE NCC,2010-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +032/8,Administered by ARIN,1994-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +033/8,DLA Systems Automation Center,1991-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +034/8,Administered by ARIN,1993-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +035/8,Administered by ARIN,1994-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +036/8,APNIC,2010-10,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +037/8,RIPE NCC,2010-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +038/8,"PSINet, Inc.",1994-09,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +039/8,APNIC,2011-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +040/8,Administered by ARIN,1994-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +041/8,AFRINIC,2005-04,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +042/8,APNIC,2010-10,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +043/8,Administered by APNIC,1991-01,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +044/8,Administered by ARIN,1992-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +045/8,Administered by ARIN,1995-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +046/8,RIPE NCC,2009-09,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +047/8,Administered by ARIN,1991-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +048/8,Administered by ARIN,1995-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +049/8,APNIC,2010-08,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +050/8,ARIN,2010-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +051/8,Administered by RIPE NCC,1994-08,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +052/8,Administered by ARIN,1991-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +053/8,Daimler AG,1993-10,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +054/8,Administered by ARIN,1992-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +055/8,DoD Network Information Center,1995-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +056/8,Administered by ARIN,1994-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +057/8,Administered by RIPE NCC,1995-05,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +058/8,APNIC,2004-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +059/8,APNIC,2004-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +060/8,APNIC,2003-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +061/8,APNIC,1997-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +062/8,RIPE NCC,1997-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +063/8,ARIN,1997-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +064/8,ARIN,1999-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +065/8,ARIN,2000-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +066/8,ARIN,2000-07,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +067/8,ARIN,2001-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +068/8,ARIN,2001-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +069/8,ARIN,2002-08,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +070/8,ARIN,2004-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +071/8,ARIN,2004-08,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +072/8,ARIN,2004-08,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +073/8,ARIN,2005-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +074/8,ARIN,2005-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +075/8,ARIN,2005-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +076/8,ARIN,2005-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +077/8,RIPE NCC,2006-08,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +078/8,RIPE NCC,2006-08,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +079/8,RIPE NCC,2006-08,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +080/8,RIPE NCC,2001-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +081/8,RIPE NCC,2001-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +082/8,RIPE NCC,2002-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +083/8,RIPE NCC,2003-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +084/8,RIPE NCC,2003-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +085/8,RIPE NCC,2004-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +086/8,RIPE NCC,2004-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +087/8,RIPE NCC,2004-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +088/8,RIPE NCC,2004-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +089/8,RIPE NCC,2005-06,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +090/8,RIPE NCC,2005-06,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +091/8,RIPE NCC,2005-06,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +092/8,RIPE NCC,2007-03,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +093/8,RIPE NCC,2007-03,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +094/8,RIPE NCC,2007-07,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +095/8,RIPE NCC,2007-07,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +096/8,ARIN,2006-10,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +097/8,ARIN,2006-10,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +098/8,ARIN,2006-10,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +099/8,ARIN,2006-10,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +100/8,ARIN,2010-11,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED,[6] +101/8,APNIC,2010-08,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +102/8,AFRINIC,2011-02,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +103/8,APNIC,2011-02,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +104/8,ARIN,2011-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +105/8,AFRINIC,2010-11,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +106/8,APNIC,2011-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +107/8,ARIN,2010-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +108/8,ARIN,2008-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +109/8,RIPE NCC,2009-01,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +110/8,APNIC,2008-11,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +111/8,APNIC,2008-11,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +112/8,APNIC,2008-05,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +113/8,APNIC,2008-05,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +114/8,APNIC,2007-10,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +115/8,APNIC,2007-10,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +116/8,APNIC,2007-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +117/8,APNIC,2007-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +118/8,APNIC,2007-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +119/8,APNIC,2007-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +120/8,APNIC,2007-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +121/8,APNIC,2006-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +122/8,APNIC,2006-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +123/8,APNIC,2006-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +124/8,APNIC,2005-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +125/8,APNIC,2005-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +126/8,APNIC,2005-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +127/8,IANA - Loopback,1981-09,,,RESERVED,[7] +128/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +129/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +130/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +131/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +132/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +133/8,Administered by APNIC,1997-03,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +134/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +135/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +136/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +137/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +138/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +139/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +140/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +141/8,Administered by RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +142/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +143/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +144/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +145/8,Administered by RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +146/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +147/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +148/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +149/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +150/8,Administered by APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +151/8,Administered by RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +152/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +153/8,Administered by APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +154/8,Administered by AFRINIC,1993-05,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",LEGACY, +155/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +156/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +157/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +158/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +159/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +160/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +161/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +162/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +163/8,Administered by APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +164/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +165/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +166/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +167/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +168/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +169/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY,[8] +170/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +171/8,Administered by APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,LEGACY, +172/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY,[9] +173/8,ARIN,2008-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +174/8,ARIN,2008-02,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +175/8,APNIC,2009-08,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +176/8,RIPE NCC,2010-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +177/8,LACNIC,2010-06,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +178/8,RIPE NCC,2009-01,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +179/8,LACNIC,2011-02,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +180/8,APNIC,2009-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +181/8,LACNIC,2010-06,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +182/8,APNIC,2009-08,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +183/8,APNIC,2009-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +184/8,ARIN,2008-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +185/8,RIPE NCC,2011-02,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +186/8,LACNIC,2007-09,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +187/8,LACNIC,2007-09,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +188/8,Administered by RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,LEGACY, +189/8,LACNIC,1995-06,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +190/8,LACNIC,1995-06,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +191/8,Administered by LACNIC,1993-05,whois.lacnic.net,https://rdap.lacnic.net/rdap/,LEGACY, +192/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY,[10][11] +193/8,RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +194/8,RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +195/8,RIPE NCC,1993-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +196/8,Administered by AFRINIC,1993-05,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",LEGACY, +197/8,AFRINIC,2008-10,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +198/8,Administered by ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY,[12] +199/8,ARIN,1993-05,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +200/8,LACNIC,2002-11,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +201/8,LACNIC,2003-04,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +202/8,APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +203/8,APNIC,1993-05,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED,[13] +204/8,ARIN,1994-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +205/8,ARIN,1994-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +206/8,ARIN,1995-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +207/8,ARIN,1995-11,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +208/8,ARIN,1996-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +209/8,ARIN,1996-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +210/8,APNIC,1996-06,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +211/8,APNIC,1996-06,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +212/8,RIPE NCC,1997-10,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +213/8,RIPE NCC,1993-10,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +214/8,US-DOD,1998-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +215/8,US-DOD,1998-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",LEGACY, +216/8,ARIN,1998-04,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +217/8,RIPE NCC,2000-06,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +218/8,APNIC,2000-12,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +219/8,APNIC,2001-09,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +220/8,APNIC,2001-12,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +221/8,APNIC,2002-07,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +222/8,APNIC,2003-02,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +223/8,APNIC,2010-04,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +224/8,Multicast,1981-09,,,RESERVED,[14] +225/8,Multicast,1981-09,,,RESERVED,[14] +226/8,Multicast,1981-09,,,RESERVED,[14] +227/8,Multicast,1981-09,,,RESERVED,[14] +228/8,Multicast,1981-09,,,RESERVED,[14] +229/8,Multicast,1981-09,,,RESERVED,[14] +230/8,Multicast,1981-09,,,RESERVED,[14] +231/8,Multicast,1981-09,,,RESERVED,[14] +232/8,Multicast,1981-09,,,RESERVED,[14] +233/8,Multicast,1981-09,,,RESERVED,[14] +234/8,Multicast,1981-09,,,RESERVED,[14][15] +235/8,Multicast,1981-09,,,RESERVED,[14] +236/8,Multicast,1981-09,,,RESERVED,[14] +237/8,Multicast,1981-09,,,RESERVED,[14] +238/8,Multicast,1981-09,,,RESERVED,[14] +239/8,Multicast,1981-09,,,RESERVED,[14][16] +240/8,Future use,1981-09,,,RESERVED,[17] +241/8,Future use,1981-09,,,RESERVED,[17] +242/8,Future use,1981-09,,,RESERVED,[17] +243/8,Future use,1981-09,,,RESERVED,[17] +244/8,Future use,1981-09,,,RESERVED,[17] +245/8,Future use,1981-09,,,RESERVED,[17] +246/8,Future use,1981-09,,,RESERVED,[17] +247/8,Future use,1981-09,,,RESERVED,[17] +248/8,Future use,1981-09,,,RESERVED,[17] +249/8,Future use,1981-09,,,RESERVED,[17] +250/8,Future use,1981-09,,,RESERVED,[17] +251/8,Future use,1981-09,,,RESERVED,[17] +252/8,Future use,1981-09,,,RESERVED,[17] +253/8,Future use,1981-09,,,RESERVED,[17] +254/8,Future use,1981-09,,,RESERVED,[17] +255/8,Future use,1981-09,,,RESERVED,[17][18] diff --git a/tests/ipv6-unicast-address-assignments.csv b/tests/ipv6-unicast-address-assignments.csv new file mode 100644 index 0000000..23bb1fa --- /dev/null +++ b/tests/ipv6-unicast-address-assignments.csv @@ -0,0 +1,80 @@ +Prefix,Designation,Date,WHOIS,RDAP,Status,Note +2001:0000::/23,IANA,1999-07-01,whois.iana.org,,ALLOCATED,"2001:0000::/23 is reserved for IETF Protocol Assignments [RFC2928]. +2001:0000::/32 is reserved for TEREDO [RFC4380]. +2001:1::1/128 is reserved for Port Control Protocol Anycast [RFC7723]. +2001:1::2/128 is reserved for Traversal Using Relays around NAT Anycast [RFC8155]. +2001:1::3/128 is reserved for DNS-SD Service Registration Protocol Anycast [RFC-ietf-dnssd-srp-25]. +2001:2::/48 is reserved for Benchmarking [RFC5180][RFC Errata 1752]. +2001:3::/32 is reserved for AMT [RFC7450]. +2001:4:112::/48 is reserved for AS112-v6 [RFC7535]. +2001:10::/28 is deprecated (previously ORCHID) [RFC4843]. +2001:20::/28 is reserved for ORCHIDv2 [RFC7343]. +2001:30::/28 is reserved for Drone Remote ID Protocol Entity Tags (DETs) [RFC9374]. +For complete registration details, see [IANA registry iana-ipv6-special-registry]." +2001:0200::/23,APNIC,1999-07-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2001:0400::/23,ARIN,1999-07-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2001:0600::/23,RIPE NCC,1999-07-01,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:0800::/22,RIPE NCC,2002-11-02,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED,2001:0800::/23 was allocated on 2002-05-02. The more recent allocation (2002-11-02) incorporates the previous allocation. +2001:0c00::/23,APNIC,2002-05-02,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED,"2001:db8::/32 reserved for Documentation [RFC3849]. +For complete registration details, see [IANA registry iana-ipv6-special-registry]." +2001:0e00::/23,APNIC,2003-01-01,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2001:1200::/23,LACNIC,2002-11-01,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED, +2001:1400::/22,RIPE NCC,2003-07-01,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED,2001:1400::/23 was allocated on 2003-02-01. The more recent allocation (2003-07-01) incorporates the previous allocation. +2001:1800::/23,ARIN,2003-04-01,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2001:1a00::/23,RIPE NCC,2004-01-01,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:1c00::/22,RIPE NCC,2004-05-04,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:2000::/19,RIPE NCC,2019-03-12,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED,"2001:2000::/20, 2001:3000::/21, and 2001:3800::/22 were allocated on 2004-05-04. The more recent allocation (2019-03-12) incorporates all these previous allocations." +2001:4000::/23,RIPE NCC,2004-06-11,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:4200::/23,AFRINIC,2004-06-01,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +2001:4400::/23,APNIC,2004-06-11,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2001:4600::/23,RIPE NCC,2004-08-17,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:4800::/23,ARIN,2004-08-24,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2001:4a00::/23,RIPE NCC,2004-10-15,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:4c00::/23,RIPE NCC,2004-12-17,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:5000::/20,RIPE NCC,2004-09-10,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2001:8000::/19,APNIC,2004-11-30,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2001:a000::/20,APNIC,2004-11-30,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2001:b000::/20,APNIC,2006-03-08,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED, +2002:0000::/16,6to4,2001-02-01,,,ALLOCATED,"2002::/16 is reserved for 6to4 [RFC3056]. +For complete registration details, see [IANA registry iana-ipv6-special-registry]." +2003:0000::/18,RIPE NCC,2005-01-12,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2400:0000::/12,APNIC,2006-10-03,whois.apnic.net,https://rdap.apnic.net/,ALLOCATED,"2400:0000::/19 was allocated on 2005-05-20. 2400:2000::/19 was allocated on 2005-07-08. 2400:4000::/21 was +allocated on 2005-08-08. 2404:0000::/23 was allocated on 2006-01-19. The more recent allocation (2006-10-03) +incorporates all these previous allocations." +2600:0000::/12,ARIN,2006-10-03,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED,"2600:0000::/22, 2604:0000::/22, 2608:0000::/22 and 260c:0000::/22 were allocated on 2005-04-19. The more +recent allocation (2006-10-03) incorporates all these previous allocations." +2610:0000::/23,ARIN,2005-11-17,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2620:0000::/23,ARIN,2006-09-12,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2630:0000::/12,ARIN,2019-11-06,whois.arin.net,"https://rdap.arin.net/registry +http://rdap.arin.net/registry",ALLOCATED, +2800:0000::/12,LACNIC,2006-10-03,whois.lacnic.net,https://rdap.lacnic.net/rdap/,ALLOCATED,"2800:0000::/23 was allocated on 2005-11-17. The more recent allocation (2006-10-03) incorporates the +previous allocation." +2a00:0000::/12,RIPE NCC,2006-10-03,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED,"2a00:0000::/21 was originally allocated on 2005-04-19. 2a01:0000::/23 was allocated on 2005-07-14. +2a01:0000::/16 (incorporating the 2a01:0000::/23) was allocated on 2005-12-15. The more recent allocation +(2006-10-03) incorporates these previous allocations." +2a10:0000::/12,RIPE NCC,2019-06-05,whois.ripe.net,https://rdap.db.ripe.net/,ALLOCATED, +2c00:0000::/12,AFRINIC,2006-10-03,whois.afrinic.net,"https://rdap.afrinic.net/rdap/ +http://rdap.afrinic.net/rdap/",ALLOCATED, +2d00:0000::/8,IANA,1999-07-01,,,RESERVED, +2e00:0000::/7,IANA,1999-07-01,,,RESERVED, +3000:0000::/4,IANA,1999-07-01,,,RESERVED, +3ffe::/16,IANA,2008-04,,,RESERVED,"3ffe:831f::/32 was used for Teredo in some old but widely distributed networking stacks. This usage is +deprecated in favor of 2001::/32, which was allocated for the purpose in [RFC4380]. +3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]" +5f00::/16,Segment Routing (SRv6) SIDs,2024-04-23,,,ALLOCATED,"5f00::/16 is reserved for Segment Routing (SRv6) SIDs [RFC-ietf-6man-sids-06]. +For complete registration details, see [IANA registry iana-ipv6-special-registry]." +5f01::/16,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f02::/15,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f04::/14,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f08::/13,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f10::/12,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f20::/11,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f40::/10,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. +5f80::/9,IANA,2008-04,,,RESERVED,3ffe::/16 and 5f00::/8 were used for the 6bone but were returned. [RFC5156]. diff --git a/tests/labler.yaml b/tests/labler.yaml new file mode 100644 index 0000000..8653b72 --- /dev/null +++ b/tests/labler.yaml @@ -0,0 +1,782 @@ +--- +certain_qnames_vs_qtype: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +chaos_types_and_names: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +client_addr_vs_rcode: + Rcode: + '0': NoError + '1': FormErr + '10': NotZone + '11': DSOTYPENI + 12-15: Unassigned + '16': BADSIG + '17': BADKEY + '18': BADTIME + '19': BADMODE + '2': ServFail + '20': BADNAME + '21': BADALG + '22': BADTRUNC + '23': BADCOOKIE + 24-3840: Unassigned + '3': NXDomain + 3841-4095: Reserved for Private Use + '4': NotImp + 4096-65534: Unassigned + '5': Refused + '6': YXDomain + '65535': Reserved, can be allocated by Standards Action + '7': YXRRSet + '8': NXRRSet + '9': NotAuth +dns_ip_version_vs_qtype: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +opcode: + Opcode: + '0': NoError + '1': FormErr + '10': NotZone + '11': DSOTYPENI + 12-15: Unassigned + '16': BADSIG + '17': BADKEY + '18': BADTIME + '19': BADMODE + '2': ServFail + '20': BADNAME + '21': BADALG + '22': BADTRUNC + '23': BADCOOKIE + 24-3840: Unassigned + '3': NXDomain + 3841-4095: Reserved for Private Use + '4': NotImp + 4096-65534: Unassigned + '5': Refused + '6': YXDomain + '65535': Reserved, can be allocated by Standards Action + '7': YXRRSet + '8': NXRRSet + '9': NotAuth +qtype: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +qtype_vs_qnamelen: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +qtype_vs_tld: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF +rcode: + Rcode: + '0': NoError + '1': FormErr + '10': NotZone + '11': DSOTYPENI + 12-15: Unassigned + '16': BADSIG + '17': BADKEY + '18': BADTIME + '19': BADMODE + '2': ServFail + '20': BADNAME + '21': BADALG + '22': BADTRUNC + '23': BADCOOKIE + 24-3840: Unassigned + '3': NXDomain + 3841-4095: Reserved for Private Use + '4': NotImp + 4096-65534: Unassigned + '5': Refused + '6': YXDomain + '65535': Reserved, can be allocated by Standards Action + '7': YXRRSet + '8': NXRRSet + '9': NotAuth +rcode_vs_replylen: + Rcode: + '0': NoError + '1': FormErr + '10': NotZone + '11': DSOTYPENI + 12-15: Unassigned + '16': BADSIG + '17': BADKEY + '18': BADTIME + '19': BADMODE + '2': ServFail + '20': BADNAME + '21': BADALG + '22': BADTRUNC + '23': BADCOOKIE + 24-3840: Unassigned + '3': NXDomain + 3841-4095: Reserved for Private Use + '4': NotImp + 4096-65534: Unassigned + '5': Refused + '6': YXDomain + '65535': Reserved, can be allocated by Standards Action + '7': YXRRSet + '8': NXRRSet + '9': NotAuth +transport_vs_qtype: + Qtype: + '1': A + '10': 'NULL' + '100': UINFO + '101': UID + '102': GID + '103': UNSPEC + '104': NID + '105': L32 + '106': L64 + '107': LP + '108': EUI48 + '109': EUI64 + '11': WKS + 110-248: Unassigned + '12': PTR + '13': HINFO + '14': MINFO + '15': MX + '16': TXT + '17': RP + '18': AFSDB + '19': X25 + '2': NS + '20': ISDN + '21': RT + '22': NSAP + '23': NSAP-PTR + '24': SIG + '249': TKEY + '25': KEY + '250': TSIG + '251': IXFR + '252': AXFR + '253': MAILB + '254': MAILA + '255': wildcard + '256': URI + '257': CAA + '258': AVC + '259': DOA + '26': PX + '260': AMTRELAY + 261-32767: Unassigned + '27': GPOS + '28': AAAA + '29': LOC + '3': MD + '30': NXT + '31': EID + '32': NIMLOC + '32768': TA + '32769': DLV + 32770-65279: Unassigned + '33': SRV + '34': ATMA + '35': NAPTR + '36': KX + '37': CERT + '38': A6 + '39': DNAME + '4': MF + '40': SINK + '41': OPT + '42': APL + '43': DS + '44': SSHFP + '45': IPSECKEY + '46': RRSIG + '47': NSEC + '48': DNSKEY + '49': DHCID + '5': CNAME + '50': NSEC3 + '51': NSEC3PARAM + '52': TLSA + '53': SMIMEA + '54': Unassigned + '55': HIP + '56': NINFO + '57': RKEY + '58': TALINK + '59': CDS + '6': SOA + '60': CDNSKEY + '61': OPENPGPKEY + '62': CSYNC + '63': ZONEMD + 64-98: Unassigned + 65280-65534: Private use + '65535': Reserved + '7': MB + '8': MG + '9': MR + '99': SPF + diff --git a/tests/test.gold b/tests/test.gold new file mode 100644 index 0000000..489b6e5 --- /dev/null +++ b/tests/test.gold @@ -0,0 +1,151 @@ +# DML +# CONTEXT-DATABASE: dsc +pcap_stats,server=test-server,node=test-node,ifname=eth0,pcap_stat=filter_received value=5625 1563520560000000000 +pcap_stats,server=test-server,node=test-node,ifname=eth0,pcap_stat=pkts_captured value=4894 1563520560000000000 +pcap_stats,server=test-server,node=test-node,ifname=eth0,pcap_stat=kernel_dropped value=731 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=sent,ipproto=udp value=289 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=recv,ipproto=udp value=219 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=else,ipproto=udp value=267 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=32768-34815 value=18 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=34816-36863 value=19 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=24576-26623 value=15 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=55296-57343 value=21 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=40960-43007 value=16 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=18432-20479 value=12 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=57344-59391 value=11 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=61440-63487 value=12 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=63488-65535 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=2048-4095 value=12 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=14336-16383 value=13 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=36864-38911 value=13 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=45056-47103 value=14 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=59392-61439 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=12288-14335 value=11 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=43008-45055 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=49152-51199 value=10 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=51200-53247 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=16384-18431 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=28672-30719 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=30720-32767 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=38912-40959 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=47104-49151 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=53248-55295 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=4096-6143 value=7 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=6144-8191 value=6 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=10240-12287 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=22528-24575 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=26624-28671 value=7 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=0-2047 value=3 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=8192-10239 value=6 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=20480-22527 value=5 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=A value=128 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=PTR value=78 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=AAAA value=71 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=DNSKEY value=25 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=TXT value=18 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=MX value=9 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=NS value=7 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=DS value=5 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=SOA value=4 1563520560000000000 +rd_bit,server=test-server,node=test-node,rd=set value=186 1563520560000000000 +rd_bit,server=test-server,node=test-node,rd=clr value=159 1563520560000000000 +do_bit,server=test-server,node=test-node,d0=clr value=211 1563520560000000000 +do_bit,server=test-server,node=test-node,d0=set value=134 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=None value=204 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=4096-4607 value=73 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=512-1023 value=26 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=1024-1535 value=22 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=3584-4095 value=12 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=1536-2047 value=6 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=2048-2559 value=1 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=8192-8703 value=1 1563520560000000000 +edns_version,server=test-server,node=test-node,ednsversion=none value=204 1563520560000000000 +edns_version,server=test-server,node=test-node,ednsversion=0 value=141 1563520560000000000 +idn_qname,server=test-server,node=test-node,idnqname=normal value=345 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NoError,clientaddr=64.191.0.0 value=78 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NoError,clientaddr=2620:: value=50 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NoError,clientaddr=0 value=131 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NXDomain,clientaddr=64.191.0.0 value=3 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NXDomain,clientaddr=2620:: value=2 1563520560000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=ServFail,clientaddr=64.191.0.0 value=6 1563520560000000000 +client_subnet2,server=test-server,node=test-node,class=ok,clientsubnet=64.191.0.0 value=180 1563520560000000000 +client_subnet2,server=test-server,node=test-node,class=ok,clientsubnet=2620:: value=5 1563520560000000000 +client_subnet2,server=test-server,node=test-node,class=non-auth-tld,clientsubnet=64.191.0.0 value=18 1563520560000000000 +client_subnet2,server=test-server,node=test-node,class=rfc1918-ptr,clientsubnet=2620:: value=1 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=A value=128 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=PTR value=78 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=AAAA value=71 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=DNSKEY value=25 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=TXT value=18 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=MX value=9 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=NS value=7 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=DS value=5 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=SOA value=4 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=A,tld=net value=128 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=A,tld=... value=128 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=PTR,tld=arpa value=78 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=AAAA,tld=net value=71 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=MX,tld=asia value=8 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=MX,tld=net value=1 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=NS,tld=arpa value=3 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=NS,tld=net value=2 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=NS,tld=com value=1 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=NS,tld=org value=1 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=SOA,tld=net value=2 1563520560000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=SOA,tld=arpa value=2 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=A,qnamelen=16-31 value=98 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=A,qnamelen=0-15 value=29 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=A,qnamelen=32-47 value=1 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=DS,qnamelen=0-15 value=2 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=DS,qnamelen=16-31 value=3 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=PTR,qnamelen=16-31 value=76 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=PTR,qnamelen=64-79 value=2 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=TXT,qnamelen=64-79 value=9 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=TXT,qnamelen=16-31 value=8 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=TXT,qnamelen=0-15 value=1 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=DNSKEY,qnamelen=16-31 value=13 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=DNSKEY,qnamelen=0-15 value=12 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=AAAA,qnamelen=16-31 value=57 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=AAAA,qnamelen=0-15 value=12 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=AAAA,qnamelen=32-47 value=1 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=AAAA,qnamelen=48-63 value=1 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=MX,qnamelen=0-15 value=8 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=MX,qnamelen=16-31 value=1 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=NS,qnamelen=0-15 value=4 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=NS,qnamelen=16-31 value=3 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=SOA,qnamelen=16-31 value=3 1563520560000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=SOA,qnamelen=0-15 value=1 1563520560000000000 +client_subnet,server=test-server,node=test-node,clientsubnet=64.191.0.0 value=198 1563520560000000000 +client_subnet,server=test-server,node=test-node,clientsubnet=2620:: value=5 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=0-63 value=105 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=64-127 value=124 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=192-255 value=70 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=256-319 value=51 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=1088-1151 value=14 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=320-383 value=16 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=512-575 value=8 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=1472-1535 value=6 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=704-767 value=4 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=448-511 value=3 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=576-639 value=4 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=128-191 value=7 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=384-447 value=4 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NXDomain,replylen=64-127 value=4 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NXDomain,replylen=640-703 value=2 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NXDomain,replylen=128-191 value=1 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NXDomain,replylen=768-831 value=1 1563520560000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=ServFail,replylen=0-63 value=6 1563520560000000000 +opcode,server=test-server,node=test-node,opcode=NoError value=345 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=NoError value=416 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=NXDomain value=8 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=ServFail value=6 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=A value=128 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=PTR value=78 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=AAAA value=71 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=DNSKEY value=25 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=TXT value=18 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=MX value=9 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=NS value=7 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=DS value=5 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=SOA value=4 1563520560000000000 +client_subnet_authority,server=test-server,node=test-node,clientauthority=ARIN value=203 1563520560000000000 diff --git a/tests/test.gold2 b/tests/test.gold2 new file mode 100644 index 0000000..a09fc0f --- /dev/null +++ b/tests/test.gold2 @@ -0,0 +1,91 @@ +# DML +# CONTEXT-DATABASE: dsc +client_subnet_count,server=test-server,node=test-node,all=ALL value=2 1563520560000000000 +ipv6_rsn_abusers_count,server=test-server,node=test-node,all=ALL value=0 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=PTR value=78 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=NS value=7 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=SOA value=4 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=MX value=9 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=else value=48 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=A value=128 1563520560000000000 +qtype,server=test-server,node=test-node,qtype=AAAA value=71 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=NoError value=416 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=NXDomain value=8 1563520560000000000 +rcode,server=test-server,node=test-node,rcode=ServFail value=6 1563520560000000000 +do_bit,server=test-server,node=test-node,d0=set value=134 1563520560000000000 +do_bit,server=test-server,node=test-node,d0=clr value=211 1563520560000000000 +rd_bit,server=test-server,node=test-node,rd=clr value=159 1563520560000000000 +rd_bit,server=test-server,node=test-node,rd=set value=186 1563520560000000000 +opcode,server=test-server,node=test-node,opcode=NoError value=345 1563520560000000000 +dnssec_qtype,server=test-server,node=test-node,qtype=else value=315 1563520560000000000 +dnssec_qtype,server=test-server,node=test-node,qtype=48 value=25 1563520560000000000 +dnssec_qtype,server=test-server,node=test-node,qtype=43 value=5 1563520560000000000 +edns_version,server=test-server,node=test-node,ednsversion=0 value=141 1563520560000000000 +edns_version,server=test-server,node=test-node,ednsversion=none value=204 1563520560000000000 +client_subnet2_count,server=test-server,node=test-node,class=rfc1918-ptr value=1 1563520560000000000 +client_subnet2_count,server=test-server,node=test-node,class=ok value=2 1563520560000000000 +client_subnet2_count,server=test-server,node=test-node,class=non-auth-tld value=1 1563520560000000000 +client_subnet2_trace,server=test-server,node=test-node,class=non-auth-tld value=18 1563520560000000000 +client_subnet2_trace,server=test-server,node=test-node,class=rfc1918-ptr value=1 1563520560000000000 +client_subnet2_trace,server=test-server,node=test-node,class=ok value=185 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=3584-4095 value=12 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=4096-4607 value=73 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=2048-2559 value=1 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=512-1023 value=26 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=None value=204 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=8192-8703 value=1 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=1536-2047 value=6 1563520560000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=1024-1535 value=22 1563520560000000000 +idn_qname,server=test-server,node=test-node,idnqname=normal value=345 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=57344-59391 value=11 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=0-2047 value=3 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=45056-47103 value=14 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=30720-32767 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=14336-16383 value=13 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=8192-10239 value=6 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=4096-6143 value=7 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=24576-26623 value=15 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=40960-43007 value=16 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=43008-45055 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=61440-63487 value=12 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=22528-24575 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=10240-12287 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=38912-40959 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=34816-36863 value=19 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=16384-18431 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=32768-34815 value=18 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=36864-38911 value=13 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=53248-55295 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=2048-4095 value=12 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=47104-49151 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=20480-22527 value=5 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=49152-51199 value=10 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=6144-8191 value=6 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=51200-53247 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=26624-28671 value=7 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=55296-57343 value=21 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=63488-65535 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=12288-14335 value=11 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=59392-61439 value=8 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=28672-30719 value=9 1563520560000000000 +client_port_range,server=test-server,node=test-node,portrange=18432-20479 value=12 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=else value=48 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=A value=128 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=AAAA value=71 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=MX value=9 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=PTR value=78 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=NS value=7 1563520560000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=SOA value=4 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=recv,ipproto=udp value=219 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=sent,ipproto=udp value=289 1563520560000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=else,ipproto=udp value=267 1563520560000000000 +pcap_stats,server=test-server,node=test-node,pcap_stat=filter_received,ifname=eth0 value=5625 1563520560000000000 +pcap_stats,server=test-server,node=test-node,pcap_stat=kernel_dropped,ifname=eth0 value=731 1563520560000000000 +pcap_stats,server=test-server,node=test-node,pcap_stat=pkts_captured,ifname=eth0 value=4894 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=else value=48 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=AAAA value=71 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=A value=128 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=MX value=9 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=PTR value=78 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=NS value=7 1563520560000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=SOA value=4 1563520560000000000 diff --git a/tests/test.gold3 b/tests/test.gold3 new file mode 100644 index 0000000..01c49ea --- /dev/null +++ b/tests/test.gold3 @@ -0,0 +1,56 @@ +# DML +# CONTEXT-DATABASE: dsc +pcap_stats,server=test-server,node=test-node,ifname=./1458044657.pcap.dist,pcap_stat=pkts_captured value=8 1458044655000000000 +label_count,server=test-server,node=test-node,labelcount=3 value=4 1458044655000000000 +label_count,server=test-server,node=test-node,labelcount=6 value=4 1458044655000000000 +third_ld_vs_rcode,server=test-server,node=test-node,rcode=0,thirdld=216.in-addr.arpa value=2 1458044655000000000 +third_ld_vs_rcode,server=test-server,node=test-node,rcode=0,thirdld=www.google.se value=1 1458044655000000000 +third_ld_vs_rcode,server=test-server,node=test-node,rcode=0,thirdld=www.google.com value=1 1458044655000000000 +second_ld_vs_rcode,server=test-server,node=test-node,rcode=0,secondld=in-addr.arpa value=2 1458044655000000000 +second_ld_vs_rcode,server=test-server,node=test-node,rcode=0,secondld=google.com value=1 1458044655000000000 +second_ld_vs_rcode,server=test-server,node=test-node,rcode=0,secondld=www.google.se value=1 1458044655000000000 +server,server=test-server,node=test-node,ip=8.8.8.8 value=8 1458044655000000000 +qr_aa_bits,server=test-server,node=test-node,direction=else,qraabits=qr\=0\,aa\=0 value=4 1458044655000000000 +qr_aa_bits,server=test-server,node=test-node,direction=else,qraabits=qr\=1\,aa\=0 value=4 1458044655000000000 +qname,server=test-server,node=test-node,name=100.209.58.216.in-addr.arpa value=2 1458044655000000000 +qname,server=test-server,node=test-node,name=www.google.se value=2 1458044655000000000 +qname,server=test-server,node=test-node,name=131.209.58.216.in-addr.arpa value=2 1458044655000000000 +qname,server=test-server,node=test-node,name=www.google.com value=2 1458044655000000000 +qclass,server=test-server,node=test-node,class=1 value=8 1458044655000000000 +dns_ip_version,server=test-server,node=test-node,version=IPv4 value=8 1458044655000000000 +ip_version,server=test-server,node=test-node,version=IPv4 value=8 1458044655000000000 +direction_vs_ipproto,server=test-server,node=test-node,direction=else,ipproto=udp value=8 1458044655000000000 +client_port,server=test-server,node=test-node,port=59978 value=4 1458044655000000000 +client_port,server=test-server,node=test-node,port=0 value=1 1458044655000000000 +client_port,server=test-server,node=test-node,port=53 value=1 1458044655000000000 +client_port,server=test-server,node=test-node,port=44275 value=1 1458044655000000000 +client_port,server=test-server,node=test-node,port=57483 value=1 1458044655000000000 +client_port_range,server=test-server,node=test-node,portrange=43008-45055 value=1 1458044655000000000 +client_port_range,server=test-server,node=test-node,portrange=55296-57343 value=1 1458044655000000000 +client_port_range,server=test-server,node=test-node,portrange=57344-59391 value=2 1458044655000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=A value=2 1458044655000000000 +transport_vs_qtype,server=test-server,node=test-node,transport=udp,qtype=PTR value=2 1458044655000000000 +tc_bit,server=test-server,node=test-node,tc=clr value=8 1458044655000000000 +rd_bit,server=test-server,node=test-node,rd=set value=4 1458044655000000000 +do_bit,server=test-server,node=test-node,d0=clr value=4 1458044655000000000 +edns_bufsiz,server=test-server,node=test-node,ednsbufsiz=None value=4 1458044655000000000 +edns_version,server=test-server,node=test-node,ednsversion=none value=4 1458044655000000000 +idn_qname,server=test-server,node=test-node,idnqname=normal value=4 1458044655000000000 +client_addr_vs_rcode,server=test-server,node=test-node,rcode=NoError,clientaddr=172.17.0.0 value=4 1458044655000000000 +client_subnet2,server=test-server,node=test-node,class=ok,clientsubnet=172.17.0.0 value=3 1458044655000000000 +client_subnet2,server=test-server,node=test-node,class=non-auth-tld,clientsubnet=172.17.0.0 value=1 1458044655000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=A value=2 1458044655000000000 +certain_qnames_vs_qtype,server=test-server,node=test-node,certainqnames=else,qtype=PTR value=2 1458044655000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=A,tld=com value=1 1458044655000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=A,tld=google.se value=1 1458044655000000000 +qtype_vs_tld,server=test-server,node=test-node,qtype=PTR,tld=arpa value=2 1458044655000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=A,qnamelen=0-15 value=2 1458044655000000000 +qtype_vs_qnamelen,server=test-server,node=test-node,qtype=PTR,qnamelen=16-31 value=2 1458044655000000000 +client_subnet,server=test-server,node=test-node,clientsubnet=172.17.0.0 value=4 1458044655000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=128-191 value=2 1458044655000000000 +rcode_vs_replylen,server=test-server,node=test-node,rcode=NoError,replylen=0-63 value=2 1458044655000000000 +opcode,server=test-server,node=test-node,opcode=NoError value=4 1458044655000000000 +rcode,server=test-server,node=test-node,rcode=NoError value=4 1458044655000000000 +qtype,server=test-server,node=test-node,qtype=A value=2 1458044655000000000 +qtype,server=test-server,node=test-node,qtype=PTR value=2 1458044655000000000 +client_subnet_authority,server=test-server,node=test-node,clientauthority=ARIN value=4 1458044655000000000 diff --git a/tests/test.gold4 b/tests/test.gold4 new file mode 100644 index 0000000..fa55d4f --- /dev/null +++ b/tests/test.gold4 @@ -0,0 +1,5 @@ +# CONTEXT-DATABASE: dsc +# DML +pcap_stats,server=test-server-åäö,node=test-node,ifname=eth0åäö,pcap_stat=filter_received value=5625 1563520560000000000 +pcap_stats,server=test-server-åäö,node=test-node,ifname=eth0åäö,pcap_stat=kernel_dropped value=731 1563520560000000000 +pcap_stats,server=test-server-åäö,node=test-node,ifname=eth0åäö,pcap_stat=pkts_captured value=4894 1563520560000000000 diff --git a/tests/test.sh b/tests/test.sh new file mode 100755 index 0000000..4983b9e --- /dev/null +++ b/tests/test.sh @@ -0,0 +1,75 @@ +#!/bin/sh -xe + +base=`dirname $0` + +dsc-datatool \ + -vvv \ + -s test-server \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";Labler;*;yaml=$base/labler.yaml" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --xml "$base/1563520620.dscdata.xml" | sort -s > "$base/test.out" + +sort -s "$base/test.gold" > "$base/test.gold.tmp" +diff -u "$base/test.gold.tmp" "$base/test.out" + +dsc-datatool \ + -vvv \ + -s test-server \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";Labler;*;yaml=$base/labler.yaml" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --dat "$base/20190719" | sort -s > "$base/test.out" + +sort -s "$base/test.gold2" > "$base/test.gold2.tmp" +diff -u "$base/test.gold2.tmp" "$base/test.out" + +dsc-datatool \ + -vvv \ + -s test-server \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";Labler;*;yaml=$base/labler.yaml" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --xml "$base/1458044657.xml" | sort -s > "$base/test.out" + +sort -s "$base/test.gold3" > "$base/test.gold3.tmp" +diff -u "$base/test.gold3.tmp" "$base/test.out" + +dsc-datatool \ + -vvv \ + -s test-server-åäö \ + -n test-node \ + --output ";InfluxDB;dml=1;database=dsc" \ + --transform ";Labler;*;yaml=$base/labler.yaml" \ + --transform ";ReRanger;rcode_vs_replylen;range=/64;pad_to=5" \ + --transform ";ReRanger;qtype_vs_qnamelen;range=/16;pad_to=3" \ + --transform ";ReRanger;client_port_range;key=low;range=/2048;pad_to=5" \ + --transform ";ReRanger;edns_bufsiz,priming_queries;key=low;range=/512;pad_to=5;allow_invalid_keys=1" \ + --transform ";ReRanger;priming_responses;key=low;range=/128;pad_to=4" \ + --transform ";NetRemap;client_subnet,client_subnet2,client_addr_vs_rcode,ipv6_rsn_abusers;net=16" \ + --generator ";client_subnet_authority;csv=$base/ipv4-address-space.csv;csv=$base/ipv6-unicast-address-assignments.csv" \ + --xml "$base/utf8.xml" | sort -s > "$base/test.out" + +sort -s "$base/test.gold4" > "$base/test.gold4.tmp" +diff -u "$base/test.gold4.tmp" "$base/test.out" diff --git a/tests/test_main.py b/tests/test_main.py new file mode 100644 index 0000000..ee6bb27 --- /dev/null +++ b/tests/test_main.py @@ -0,0 +1,7 @@ +import pytest +import dsc_datatool as app + + +def test_main(): + with pytest.raises(Exception): + app.main() diff --git a/tests/test_objects.py b/tests/test_objects.py new file mode 100644 index 0000000..d574bb4 --- /dev/null +++ b/tests/test_objects.py @@ -0,0 +1,68 @@ +import pytest +from dsc_datatool import Dataset, Dimension, Input, Output, Generator, Transformer + + +def test_dataset(): + o = Dataset() + assert '%r' % o == '' + + +def test_dimension(): + o = Dimension('test') + assert '%r' % o == '' + + +def test_input(): + o = Input() + with pytest.raises(Exception): + o.process("test") + + class Input1(Input): + def process(self, file): + pass + with pytest.raises(Exception): + class Input1(Input): + def process(self, file): + pass + + +def test_output(): + o = Output({}) + with pytest.raises(Exception): + o.process([]) + + class Output1(Output): + def process(self, file): + pass + with pytest.raises(Exception): + class Output1(Output): + def process(self, file): + pass + + +def test_generator(): + o = Generator({}) + with pytest.raises(Exception): + o.process([]) + + class Generator1(Generator): + def process(self, file): + pass + with pytest.raises(Exception): + class Generator1(Generator): + def process(self, file): + pass + + +def test_transformer(): + o = Transformer({}) + with pytest.raises(Exception): + o.process([]) + + class Transformer1(Transformer): + def process(self, file): + pass + with pytest.raises(Exception): + class Transformer1(Transformer): + def process(self, file): + pass diff --git a/tests/utf8.xml b/tests/utf8.xml new file mode 100644 index 0000000..b5fcc95 --- /dev/null +++ b/tests/utf8.xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + +