diff --git a/.checkstyle.xml b/.checkstyle.xml index a214cc83f6..469a132853 100644 --- a/.checkstyle.xml +++ b/.checkstyle.xml @@ -42,7 +42,7 @@ - + diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml new file mode 100644 index 0000000000..fe101d0ee7 --- /dev/null +++ b/.github/workflows/tests.yml @@ -0,0 +1,75 @@ +# This workflow will install Python dependencies, run tests and lint with a single version of Python +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: DAQ test suite + +on: + push: + pull_request: + schedule: + - cron: '0 */2 * * *' + +jobs: + integration_tests: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + test: [base, many, aux, topo, modules, dhcp] + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Set up JDK 1.11 + uses: actions/setup-java@v1 + with: + java-version: 1.11 + - name: Installing dependencies + run: | + bin/setup_daq + - name: Running ${{ matrix.test }} test + env: + DOCKER_STARTUP_TIMEOUT_MS: 60000 + GCP_BASE64_CRED: ${{ secrets.GCP_BASE64_CRED }} + run: | + bin/test_daq ${{ matrix.test }} + - name: Generated test report + if: ${{ always() }} + run: | + echo '************* Use sed to filter out timestamp prefix *************' + echo 'Download log archive, find the right job, and use:' + echo " sed -e 's/^[-:.0-9TZ]\+ //' 7_Generated\ test\ report.txt" + cat inst/test_${{ matrix.test }}.out + + unit_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.7 + uses: actions/setup-python@v2 + with: + python-version: 3.7 + - name: Install dependencies + run: | + bin/setup_dev + - name: Check style + run: | + bin/check_style + - name: Unit test + run: | + testing/run_unit_tests.sh + + usi_tests: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Set up JDK 1.11 + uses: actions/setup-java@v1 + with: + java-version: 1.11 + - name: Build with Maven + run: mvn -B clean compile test assembly:single --file usi/pom.xml diff --git a/.gitignore b/.gitignore index ada16ef8f5..0ee48512c9 100644 --- a/.gitignore +++ b/.gitignore @@ -21,16 +21,15 @@ validations/ *.save # Runtime or sub-module files -inst/ -faucet/ -forch/ -mininet/ -local/ -local_xxx -local.old -firebase/.firebaserc -firebase/.firebase -firebase/functions/package-lock.json +/inst/ +/faucet/ +/forch/ +/udmi/ +/mininet/ +/local/ +/firebase/.firebaserc +/firebase/.firebase +/firebase/functions/package-lock.json nohup.out **/node_modules/ .vscode/ diff --git a/.idea/codeStyles/codeStyleConfig.xml b/.idea/codeStyles/codeStyleConfig.xml new file mode 100644 index 0000000000..b9d18bf599 --- /dev/null +++ b/.idea/codeStyles/codeStyleConfig.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml index b8fa9bb7a4..3fc93409e7 100644 --- a/.idea/vcs.xml +++ b/.idea/vcs.xml @@ -36,6 +36,10 @@ + + + + \ No newline at end of file diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index bf7e2b9b6d..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,30 +0,0 @@ -os: linux -dist: bionic -services: - - docker -language: python -python: - - "3.7" -cache: - pip: true -addons: - apt: - update: true - packages: - - openvswitch-switch -install: - - set -e - - bin/setup_daq -script: - - set -e - - bin/test_daq -env: - global: - - DOCKER_STARTUP_TIMEOUT_MS=60000 - jobs: - - DAQ_TEST=base - - DAQ_TEST=many - - DAQ_TEST=aux - - DAQ_TEST=topo - - DAQ_TEST=modules - - DAQ_TEST=dhcp diff --git a/README.md b/README.md index 7cc5495b1e..d162f17012 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,5 @@ +![CI Tests](https://github.com/faucetsdn/daq/workflows/DAQ%20test%20suite/badge.svg?branch=master) + # DAQ: Device Automated Qualification for IoT Devices. DAQ is a framework designed to test and operate IoT devices in an enterprise IoT environment. @@ -17,10 +19,6 @@ More details about the goals and objectives behind this can be found in the IEEE [SDN capabilities](https://queue.acm.org/detail.cfm?id=2560327), such as the [FAUCET OpenFlow controller](https://faucet.nz/), to orchestrate "microsegmentation" on the network for improved security. -* [_Universal Device Management Interface (UDMI)_](schemas/udmi/README.md): An interface -specification designed to normalize the management of IoT devices from different manufacturers. -This is a simple standard that provides for many of the common features not present in -existing protocols (e.g. BACnet). * _Device Management Tools_: A suite of tools, consoles, and dashboards that help operate a robust ecosystem of IoT devices. (Details forthcoming.) diff --git a/bin/alt_faucet b/bin/alt_faucet new file mode 100755 index 0000000000..757ad64218 --- /dev/null +++ b/bin/alt_faucet @@ -0,0 +1,24 @@ +#!/bin/bash -e + +ROOT=$(realpath $(dirname $0)/..) +cd $ROOT +source etc/config_base.sh + +if [ -z "$switch_setup_ext_br" ]; then + echo switch_setup.ext_br not defined for alternate faucet setup. + false +fi + +if [ -z "$switch_setup_alt_port" ]; then + echo switch_setup.alt_port not defined for alternate faucet setup. + false +fi + +inst_name=$switch_setup_ext_br + +inst_dir=inst/faucet/daq-faucet-$inst_name +mkdir -p $inst_dir +cp config/faucet/faucet_$inst_name.yaml $inst_dir/faucet.yaml +echo Launching alternate faucet install $inst_name on $switch_setup_alt_port +echo DAQ autoclean docker kill daq-faucet-$inst_name +cmd/faucet $inst_name $switch_setup_alt_port diff --git a/bin/build_hash b/bin/build_hash index ef31e8ecca..4dcaa8e9a6 100755 --- a/bin/build_hash +++ b/bin/build_hash @@ -10,7 +10,7 @@ build_built=.build_built faucet_version=$(cd faucet; git rev-list -n 1 HEAD) echo "$faucet_version faucet/HEAD" > $build_files -find docker/ subset/ -type f | sort | xargs sha1sum >> $build_files +find docker/ subset/ usi/ -type f | sort | xargs sha1sum >> $build_files build_hash=`cat $build_files | sha256sum | awk '{print $1}'` if [ "$1" == check ]; then diff --git a/bin/build_proto b/bin/build_proto index ad07980e9e..cd20e074ef 100755 --- a/bin/build_proto +++ b/bin/build_proto @@ -32,6 +32,11 @@ sha1sum $proto_files > $WEB_ROOT/protos.hash gen_path=$ROOT/protoc-gen-doc/bin/protoc-gen-doc +if [ -d venv ]; then + echo Entering virtual python environment... + source venv/bin/activate +fi + mkdir -p build/daq/proto build/proto cp $proto_files build/daq/proto/ proto_files2= @@ -56,3 +61,5 @@ mkdir -p libs/proto/ touch libs/proto/__init__.py cp build/daq/proto/*.py libs/proto/ cp build/protos.html $WEB_ROOT/ + +python3 -m grpc_tools.protoc -I usi/src/main/proto/ --python_out=libs/proto/ --grpc_python_out=libs/proto/ usi/src/main/proto/usi.proto diff --git a/bin/build_release b/bin/build_release new file mode 100755 index 0000000000..a74a8c9a03 --- /dev/null +++ b/bin/build_release @@ -0,0 +1,75 @@ +#!/bin/bash -e + +if [ $# != 1 ]; then + echo $0 RELEASE_VERSION + false +fi + +VERSION=$1 +shift + +ROOT=$(realpath $(dirname $0)/..) +cd $ROOT + +changes=`git status --porcelain` +if [ -n "$changes" ]; then + echo Working tree not clean. + false +fi + +git checkout release_stable +git fetch faucet +git merge faucet/release_stable +git checkout master + +changed=`git diff --name-only release_stable docs/changelog.md` +if [ -z "$changed" ]; then + git log release_stable..HEAD --pretty=oneline | sed -e 's/^[a-z0-9]\+ //g' + echo docs/changelog.md has not been updated since last release_stable + echo Use the log lines above for inspiration. + false +fi + +tagged=`git rev-list -n 1 $VERSION 2>/dev/null` || true +if [ -n "$tagged" ]; then + echo Tag $VERSION already exists. Try the next version. + false +fi + +source etc/config_base.sh + +if [ "$host_tests" != config/modules/all.conf ]; then + echo Configure your system with host_tests=config/modules/all.conf + false +fi + +cmd/build force $VERSION + +cmd/build push + +git commit -a -m "$VERSION release" +git tag -a $VERSION -m "$VERSION release" +firebase/deploy.sh bos-daq-testing + +git push +git push --tags + +# Check to see if a remote 'faucet' is defined, and if so, also update that. +faucetgit=`git config remote.faucet.url` +if [ -n "$faucetgit" ]; then + git push faucet + git push faucet --tags +fi + +git checkout release_testing && git reset --hard $VERSION +git push +if [ -n "$faucetgit" ]; then + git push faucet +fi + +git log -n 1 + +# QA pass to make sure everything is ok. +# `firebase/deploy.sh daq-qualification-labs` +# `git checkout release_stable && git reset --hard $VERSION` +# `git push` diff --git a/bin/combine_reports b/bin/combine_reports index e7ac571799..9d18b334b0 100755 --- a/bin/combine_reports +++ b/bin/combine_reports @@ -5,11 +5,4 @@ cd $ROOT source etc/config_base.sh -FAILED= -PYTHONPATH=daq python3 bin/python/combine_reports_from_date_range.py $conf_file $@ || FAILED=true - -if [ -n "$FAILED" ]; then - echo - echo Usage: $0 [from_time] [to_time] [from_gcp] - false -fi +PYTHONPATH=daq python3 bin/python/combine_reports.py $conf_file $@ diff --git a/bin/external_ovs b/bin/external_ovs index a289bd9cff..080fdcdecb 100755 --- a/bin/external_ovs +++ b/bin/external_ovs @@ -6,14 +6,19 @@ source etc/config_base.sh ext_intf=$switch_setup_data_intf ext_dpid=$switch_setup_of_dpid -ext_ofpt=$switch_setup_lo_port ext_brid=$switch_setup_ext_br ext_brpt=$switch_setup_uplink_port ext_pri=${ext_intf} ext_sec=${ext_intf%-pri}-sec -echo ext_dpid is $ext_dpid +if [ -z "$switch_setup_alt_port" ]; then + ext_ofpt=$switch_setup_lo_port +else + ext_ofpt=$switch_setup_alt_port +fi + +echo ext_dpid is $ext_dpid on port $ext_ofpt echo network_config is $network_config dpid=$(printf %016x $ext_dpid) diff --git a/bin/physical_sec b/bin/physical_sec index 82463e474d..939d6c4d5c 100755 --- a/bin/physical_sec +++ b/bin/physical_sec @@ -119,6 +119,9 @@ else sudo ip addr flush dev $ext_ctrl fi +echo Warmup ping for $ext_addr +ping -n -c 2 $ext_addr || true + echo Checking external connection to $ext_addr if ! ping -n -c 2 $ext_addr; then echo diff --git a/bin/python/combine_reports_from_date_range.py b/bin/python/combine_reports.py similarity index 88% rename from bin/python/combine_reports_from_date_range.py rename to bin/python/combine_reports.py index f5deb04b43..824099dbdf 100644 --- a/bin/python/combine_reports_from_date_range.py +++ b/bin/python/combine_reports.py @@ -11,7 +11,7 @@ from gcp import GcpManager from report import MdTable -LOGGER = logger.get_logger('combine_reports_from_date_range') +LOGGER = logger.get_logger('combine') DEFAULT_REPORTS_DIR = os.path.join('inst', 'reports') @@ -61,9 +61,9 @@ def _get_local_reports(device, reports_dir, start, end, count): LOGGER.info('Looking for reports locally') report_re = re.compile(r'^report_%s_(\d{4}-\d{2}-\d{2}T\d{6})\.json$' % device) json_files = [f for f in os.listdir(reports_dir) if report_re.match(f)] - json_files.sort() + json_files.sort(reverse=True) # Match gcp behavior if count and len(json_files) > count: - json_files = json_files[len(json_files) - count:] + json_files = json_files[:count] for json_file in json_files: timestamp = report_re.search(json_file).group(1) start_str = _iso_to_fname(start) @@ -76,7 +76,8 @@ def _get_local_reports(device, reports_dir, start, end, count): yield json.loads(json_file_handler.read()) -def main(device, start=None, end=None, gcp=None, reports_dir=DEFAULT_REPORTS_DIR, count=0): +def main(device, start=None, end=None, gcp=None, reports_dir=DEFAULT_REPORTS_DIR, + count=0, daq_run_id=None): # pylint: disable=too-many-arguments # pylint: disable=too-many-locals """Main script function""" @@ -85,9 +86,10 @@ def main(device, start=None, end=None, gcp=None, reports_dir=DEFAULT_REPORTS_DIR report_source = 'gcp' if gcp else 'local' if gcp: device_full = ":".join([device[i:i + 2] for i in range(0, len(device), 2)]) - json_reports = gcp.get_reports_from_date_range(device_full, start=start, end=end, - count=count) + json_reports = gcp.get_reports(device_full, start=start, end=end, + count=count, daq_run_id=daq_run_id) else: + assert not daq_run_id, 'daq_run_id not supported for local queries' json_reports = _get_local_reports(device, reports_dir, start, end, count) json_reports = list(json_reports) @@ -132,17 +134,23 @@ def _convert_iso(timestamp): GCP = None if CONFIG.get('gcp_cred') and CONFIG.get('from_gcp'): GCP = GcpManager(CONFIG, None) - assert all([attr in CONFIG for attr in ('from_time', 'to_time', 'device')]), """ + else: + assert not CONFIG.get('from_gcp'), 'missing gcp_cred definition' + + assert 'device' in CONFIG, """ Combines reports under inst/reports(default) or from GCP -Usage: combine_reports_from_date_range.py +Usage: combine_reports.py [local/system.yaml] device=xx:xx:xx:xx:xx:xx - from_time='YYYY-MM-DDThh:mm:ss' - to_time='YYYY-MM-DDThh:mm:ss' + [from_time='YYYY-MM-DDThh:mm:ss'] + [to_time='YYYY-MM-DDThh:mm:ss'] + [daq_run_id=] [count=N] [from_gcp='true'] """ FROM_TIME = _convert_iso(CONFIG.get('from_time')) TO_TIME = _convert_iso(CONFIG.get('to_time')) COUNT = int(CONFIG.get('count', 0)) - main(CONFIG.get('device'), start=FROM_TIME, end=TO_TIME, gcp=GCP, count=COUNT) + DAQ_RUN_ID = CONFIG.get('daq_run_id') + main(CONFIG.get('device'), start=FROM_TIME, end=TO_TIME, gcp=GCP, + count=COUNT, daq_run_id=DAQ_RUN_ID) diff --git a/bin/registrar b/bin/registrar deleted file mode 100755 index e2278951e4..0000000000 --- a/bin/registrar +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -e - -ROOT=$(realpath $(dirname $0)/..) -cd $ROOT - -if [ $# != 1 ]; then - echo $0 [project_id] - false -fi -project_id=$1 -shift - -source etc/config_base.sh - -if [ -z "$site_path" ]; then - echo Need to define [site_path] config variable. - false -fi - -if [ -z "$schema_path" ]; then - echo Need to define [schema_path] config variable. - false -fi - -echo Building validator... -validator/bin/build > /dev/null - -echo Running tools version `git describe` - -validator/bin/registrar $project_id $site_path $schema_path $* diff --git a/bin/setup_base b/bin/setup_base index b965fa934f..42b5a7de7f 100755 --- a/bin/setup_base +++ b/bin/setup_base @@ -24,10 +24,7 @@ fi echo AG is $AG -# Hacky workaround for https://travis-ci.community/t/sometimes-build-fails-when-apt-is-updating-postgresql-apt-repository/4872/17 -# Need to remove the || true at the end of the update line. Same as for instance below. Also in bin/setup_dev. -$AG update || true -$AG install realpath || true # On newer versions this is included elsewhere. +$AG update $AG install expect lsb-release git curl sudo apt-transport-https software-properties-common gnupg-agent net-tools retry=bin/retry_cmd @@ -46,9 +43,7 @@ else echo "deb http://packages.wand.net.nz $release main" | sudo tee /etc/apt/sources.list.d/wand.list $retry sudo curl http://packages.wand.net.nz/keyring.gpg -o /etc/apt/trusted.gpg.d/wand.gpg - # Hacky workaround for https://travis-ci.community/t/sometimes-build-fails-when-apt-is-updating-postgresql-apt-repository/4872/17 - #Need to remove the || true at the end of the update line. Same as for instance above. Also in bin/setup_dev. - $AG update || true + $AG update $AG install docker-ce${DOCKER_VERSION} fi @@ -61,11 +56,12 @@ echo Adding username to docker group... sudo groupadd docker || true sudo usermod -aG docker $user -DEF_IFACE=`sudo route -n | fgrep UG | awk '{print $8}'` +DEF_IFACE=`sudo route -n | egrep '\sUG\s' | awk '{print $8}'` if [ -n "$DEF_IFACE" ]; then echo Allowing docker external access through interface $DEF_IFACE... sudo iptables -o docker0 -i $DEF_IFACE -A FORWARD -j ACCEPT sudo iptables -i docker0 -o $DEF_IFACE -A FORWARD -j ACCEPT fi +sudo iptables -A INPUT -i docker0 -j ACCEPT echo Logout and log back in to run tutorials without sudo! diff --git a/bin/setup_dev b/bin/setup_dev index 62223aa9c3..01aa1f620d 100755 --- a/bin/setup_dev +++ b/bin/setup_dev @@ -20,8 +20,13 @@ FORCHB=${DAQ_FORCH_BRANCH} FORCHX=$(cat etc/FORCH_VERSION) FORCHV=${DAQ_FORCH_VER:-$FORCHX} +UDMIR=${DAQ_UDMI_REPO:-https://github.com/faucetsdn/udmi} +UDMIB=${DAQ_UDMI_BRANCH} +UDMIX=$(cat etc/UDMI_VERSION) +UDMIV=${DAQ_UDMI_VER:-$UDMIX} + MININET=https://github.com/mininet/mininet -MININETV=2.3.0d6 +MININETV=$(cat etc/MININET_VERSION) if [ -f .daq.local ]; then echo Loading config from .daq.local @@ -36,6 +41,10 @@ if [ "$FORCHX" != "$FORCHV" ]; then echo $FORCHV > etc/FORCH_VERSION fi +if [ "$UDMIX" != "$UDMIV" ]; then + echo $UDMIV > etc/UDMI_VERSION +fi + if [ -z "$AG" ]; then AG="sudo apt-get -qqy --no-install-recommends" fi @@ -44,10 +53,6 @@ if [ -z "$PIP" ]; then PIP="python$PVERSION -m pip" fi -if [ -n "$TRAVIS" ]; then - DAQ_CONTAINER=travis -fi - echo AG is $AG echo PIP is $PIP echo Setup root is $PWD @@ -57,10 +62,8 @@ if [ "$DAQ_BUILD" == "no" ]; then exit 0 fi -# Hacky workaround for https://travis-ci.community/t/sometimes-build-fails-when-apt-is-updating-postgresql-apt-repository/4872/17 -# Need to remove the || true at the end of the update line. Also in bin/setup_base. echo $AG update -$AG update || true +$AG update echo $AG install $AG install lsb-release @@ -75,10 +78,6 @@ $AG install \ python$PVERSION python3-pkg-resources python3-setuptools \ python$PVERSION-dev python3-pip python emacs-nox python$PVERSION-venv -# Jump through some hoops for mininet, which still has some python2 deps. -$AG install python-pip -python2 -m pip install setuptools - if [ -d mininet ]; then echo Checking mininet version matches $MININETV... targetrev=$(cd mininet; git rev-parse $MININETV) @@ -107,7 +106,7 @@ else fi # Can't use venv inside of containers because of absolute paths. -if [ -n "$DAQ_CONTAINER" ]; then +if [ -n "$CI" ]; then echo Skipping venv activation. mkdir -p venv/bin touch venv/bin/activate @@ -133,12 +132,14 @@ $PIP install --upgrade --index-url=https://pypi.python.org/simple Jinja2 \ pylint==2.4.2 cryptography requests netifaces codecov coverage setuptools \ pyyaml cairocffi==1.0.2 WeasyPrint==50 pypandoc==1.4 \ firebase-admin==2.16.0 \ - google-cloud-pubsub==0.40.0 \ google-api-core==1.16.0 \ - google-cloud-storage==1.16.1 \ + google-cloud-core==1.3.0 \ google-cloud-firestore==1.6.0 \ - google-cloud-logging==1.14.0 - + google-cloud-logging==1.14.0 \ + google-cloud-pubsub==0.40.0 \ + google-cloud-storage==1.16.1 \ + grpcio-tools==1.30.0 + $PIP freeze echo Resetting .cache directory permissions... test -n "$USER" && sudo chown $USER -R $HOME/.cache @@ -167,6 +168,18 @@ else (cd forch; git fetch; git checkout $FORCHV) fi +if [ -z "$UDMIV" ]; then + echo No udmi version found, skipping. +else + if [ ! -d udmi ]; then + echo Cloning $UDMIR... + git clone $UDMIR udmi + fi + + echo Setting udmi version $UDMIV + (cd udmi; git fetch; git checkout $UDMIV) +fi + echo -n "DAQ commit " git log -n 1 --pretty=format:"%h - %an, %ar : %s" || true echo @@ -179,6 +192,10 @@ echo -n "Last FORCH commit " (cd forch; git log -n 1 --pretty=format:"%h - %an, %ar : %s" || true) echo +echo -n "Last UDMI commit " +(cd udmi; git log -n 1 --pretty=format:"%h - %an, %ar : %s" || true) +echo + docker --version if ! docker images > /dev/null; then diff --git a/bin/setup_testing b/bin/setup_testing deleted file mode 100755 index f5e73ca8b1..0000000000 --- a/bin/setup_testing +++ /dev/null @@ -1,63 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/.. -cd $ROOT - -bin/build_hash check - -TARGET_ROOT=inst/faucet/daq-faucet-faucet - -for postfix in 1 2; do - TARGET=${TARGET_ROOT}$postfix - echo Preparing $TARGET - sudo rm -rf $TARGET && mkdir -p $TARGET - cp topology/alta-dev/faucet.yaml $TARGET/faucet.yaml - cp topology/alta-dev/gauge.yaml $TARGET/gauge.yaml -done - -cmd/faucet faucet1 6655 -cmd/faucet gauge faucet1 6656 9306 -cmd/faucet faucet2 6657 -cmd/faucet gauge faucet2 6658 9308 - -sudo ip addr flush ganga -sudo ip addr add 192.0.2.10/24 dev ganga - -sudo ovs-vsctl --if-exists del-br upstream -- add-br upstream -sudo ip link del daqnw || true -sudo ip link add daqnw type veth peer name t1bond -sudo ip link set daqnw up -sudo ip link set t1bond up -sudo ovs-vsctl add-port upstream daqnw - -sudo ip link del up_bond || true -sudo ip link add up_bond type bond mode 802.3ad lacp_rate fast -sudo ip link set up_bond up -sudo ip link set yamuna down -sudo ip link set yamuna master up_bond -sudo ip link set beas down -sudo ip link set beas master up_bond -sudo ovs-vsctl add-port upstream up_bond - -cmd/faux -n :t1bond -cmd/faux :satlej -cmd/faux :ravi -cmd/faux :tapti - -echo -docker exec daq-networking-t1bond ip addr -echo Waiting for DHCP... -sleep 30 -echo -docker exec daq-faux-satlej ip addr show dev satlej -echo -docker exec daq-faux-ravi ip addr show dev ravi -echo -docker exec daq-faux-tapti ip addr show dev tapti -echo -docker exec daq-faux-satlej ping -c 3 google.com -docker exec daq-faux-ravi ping -c 3 google.com -docker exec daq-faux-tapti ping -c 3 google.com -docker exec daq-faux-satlej ping -c 3 daq-faux-tapti -echo -echo Done with testing setup. diff --git a/bin/test_daq b/bin/test_daq index 279e3bc53b..416d027886 100755 --- a/bin/test_daq +++ b/bin/test_daq @@ -3,18 +3,20 @@ # Catch errors in diff piped through cat set -o pipefail -if [ -z "$DAQ_TEST" ]; then - echo DAQ_TEST not defined. +if [ $# != 1 ]; then + echo Usage: $0 test_name false fi -function delay_finish { - # Travis doesn't always wait for buffer to flush on exit, so give some time. - sleep 10 +DAQ_TEST=$1 +shift + +function test_finish { + echo Exiting test script. } -if [ -n "$TRAVIS" ]; then - trap delay_finish EXIT +if [ -n "$CI" ]; then + trap test_finish EXIT fi ROOT=$(realpath $(dirname $0)/..) @@ -31,6 +33,16 @@ echo -n "DAQ version " git describe --dirty --always echo +TAGGED_VERSION=`cat etc/docker_images.ver` +if ! git show $TAGGED_VERSION > /dev/null; then + echo + echo Tagged version $TAGGED_VERSION not found. + echo Maybe you need to fetch tags: git fetch --tags. + echo If this happens during CI testing, ensure tags were pushed to your repo. + echo + false +fi + if [ -d faucet ]; then echo -n "Last FAUCET commit " (cd $FAUCET; git log -n 1 --pretty=format:"%h - %an, %ar : %s" || true) @@ -59,9 +71,8 @@ GCP_RESULTS=inst/test_$DAQ_TEST.gcp CRED_FILE=inst/config/gcp_service_account.json echo Running test script $TEST_SCRIPT -# Combine stderr & stdout b/c travis has problems processing both. sudo PATH=$PATH TEST_RESULTS=$TEST_RESULTS GCP_RESULTS=$GCP_RESULTS \ - DAQ_CODECOV=y $TEST_SCRIPT 2>&1 + DAQ_CODECOV=y GCP_BASE64_CRED="$GCP_BASE64_CRED" $TEST_SCRIPT 2>&1 if [ -f .coverage ]; then codecov_tag=${DAQ_TEST##*/} diff --git a/bin/trigger_travis b/bin/trigger_travis deleted file mode 100755 index e32a6fe648..0000000000 --- a/bin/trigger_travis +++ /dev/null @@ -1,28 +0,0 @@ -#!/bin/bash -e - -OUTFILE=$HOME/.travis.out - -echo Trigger build at `date` > $OUTFILE - -echo Reading info from $HOME/.travis.env >> $OUTFILE -source $HOME/.travis.env - -if [ -z "$TRAVIS_API_TOKEN" ]; then - echo TRAVIS_API_TOKEN not defined. - false -fi - -body='{ -"request": { -"branch":"master" -}}' - -curl -s -X POST \ - -H "Content-Type: application/json" \ - -H "Accept: application/json" \ - -H "Travis-API-Version: 3" \ - -H "Authorization: token $TRAVIS_API_TOKEN" \ - -d "$body" \ - https://api.travis-ci.org/repo/grafnu%2Fdaq/requests >> $OUTFILE 2>&1 - -echo Trigger complete. >> $OUTFILE diff --git a/bin/troubleshoot b/bin/troubleshoot new file mode 100755 index 0000000000..06b3432bd3 --- /dev/null +++ b/bin/troubleshoot @@ -0,0 +1,29 @@ +#!/bin/bash + +ROOT=$(realpath $(dirname $0)/..) +cd $ROOT + +if [ ! -d inst ]; then + echo "Error: run this script after a test run completes" + exit 1 +fi + +# After the system settles (early on some dpid=1 messages are expected) if we see +# unknown dpid in faucet log, dpid might be misconfigured +unknown_dpid=`fgrep 'unknown datapath' inst/faucet.log | wc -l` +if [ "$unknown_dpid" -gt 20 ]; then + echo "Error: Faucet reports unknown datapath DPID:" + fgrep 'unknown datapath' inst/faucet.log | tail -n1 + echo "Check if switch_setup:of_dpid in config matches the DPID on the physical switch" +else + echo "Checking DPID misconfig: ok" +fi + +# If the switch test failed with a monitoring timeout, switch login info could be wrong +switch_timeout=`fgrep 'Monitoring timeout for switch' inst/cmdrun.log` +if [ -n "$switch_timeout" ]; then + echo "Error: Timeout connecting to physical switch" + echo "Check switch username/password configuration" +else + echo "Checking Switch timeout: ok" +fi diff --git a/bin/validate b/bin/validate deleted file mode 100755 index bd13b42612..0000000000 --- a/bin/validate +++ /dev/null @@ -1,35 +0,0 @@ -#!/bin/bash -e - -ROOT=$(realpath $(dirname $0)/..) -cd $ROOT - -source etc/config_base.sh - -if [ -z "$gcp_cred" ]; then - echo Please make sure gcp_cred is defined in local/system.conf - false -fi - -if [ -z "$gcp_topic" ]; then - echo Please make sure gcp_topic is defined in local/system.conf - false -fi - -if [ -z "$schema_path" ]; then - echo Please make sure schema_path is defined in local/system.conf - false -fi - -validator/bin/build - -unset GOOGLE_CLOUD_PROJECT -export GOOGLE_APPLICATION_CREDENTIALS=$PWD/$gcp_cred -echo Using credentials from $GOOGLE_APPLICATION_CREDENTIALS -echo Configured topic is $gcp_topic -echo Configured schema is $schema_path -if [ -n "$site_path" ]; then - echo Configured site path is $site_path -fi -echo - -validator/bin/validate $schema_path pubsub:$gcp_topic dev $site_path diff --git a/cmd/build b/cmd/build index dc428ba90d..5a2273d234 100755 --- a/cmd/build +++ b/cmd/build @@ -26,11 +26,12 @@ DOCKER_IMAGE_VER=docker_images.ver cd $ROOT source etc/config_base.sh -host_tests=$host_tests bin/docker_build_files +echo host_tests=$host_tests +test_targets=$(host_tests=$host_tests bin/docker_build_files) function pull_images { TAG=$1 declare -A test_set - for target in $(host_tests=$host_tests bin/docker_build_files); do + for target in $test_targets; do target=$(echo $target | sed 's|^.*/Dockerfile.||' | echo daqf/$( /dev/null 2>&1 || service docker start +bridges=`ovs-vsctl list-br` +for bridge in $bridges; do + echo Cleaning bridge $bridge... + # bug in ovs + timeout 10 ovs-vsctl --if-exists del-br $bridge || true + ovs-vsctl --if-exists del-br $bridge +done ovsctl=/usr/share/openvswitch/scripts/ovs-ctl $ovsctl status || sudo $ovsctl start @@ -99,12 +108,26 @@ if [ -n "$switch_setup_ext_br" ]; then autostart bin/external_ovs fi +if [ -n "$switch_setup_alt_port" ]; then + autostart bin/alt_faucet +fi + if [ -n "$switch_setup_model" ]; then autostart bin/physical_sec else echo No external switch model specified. fi +# USI related setup +docker rm -f daq-usi || true +docker0_ip=`sudo ifconfig docker0 | grep 'inet ' | awk '{print $2}'` +if [ -z "$usi_setup_url" -o "$usi_setup_url" == "localhost:5000" -o "$usi_setup_url" == "$docker0_ip:5000" ]; then + sudo iptables -C INPUT -i docker0 -j ACCEPT || sudo iptables -A INPUT -i docker0 -j ACCEPT + autostart gcp_cred=$gcp_cred cmd/usi + docker0_ip=`sudo ifconfig docker0 | grep 'inet ' | awk '{print $2}'` + uri_url_override="usi_setup.url=$docker0_ip:5000" +fi + # Kill any gateways so that they don't prematurely assign an IP address. gwids=$(docker ps --format '{{ .Image }} {{ .Names }}' | fgrep daqf/networking | awk '{print $2}') || true for gwid in $gwids; do @@ -149,7 +172,7 @@ export OVSVSCTL_ORIG=`which ovs-vsctl` export PATH=$ROOT/binhack:$PATH exit_code=0 -$runcmd daq/daq.py $conf_file $@ 2>&1 || exit_code=$? +$runcmd daq/daq.py $conf_file $uri_url_override $@ 2>&1 || exit_code=$? if [ -f "$cleanup_file" ]; then source $cleanup_file diff --git a/cmd/faux b/cmd/faux index 2ba40d83d3..dd4deb3eec 100755 --- a/cmd/faux +++ b/cmd/faux @@ -123,6 +123,7 @@ else sudo ip link del $intf 2>/dev/null || true echo Adding new interface to $pid... + sudo ip link add $intf type veth peer name faux-eth0 addr $intf_mac netns $pid sudo ip link set $intf up fi diff --git a/cmd/usi b/cmd/usi new file mode 100755 index 0000000000..44067356c1 --- /dev/null +++ b/cmd/usi @@ -0,0 +1,34 @@ +#!/bin/bash -e + +ROOT=$(realpath $(dirname $0)/..) +USI_DIR=$ROOT/inst/network +DEBUG="" + +debug_mode="true" #Always in debug mode for now +if [ -n $debug_mode ]; then + DEBUG="debug" + echo Starting USI in debug mode +else + echo Starting USI +fi + +if [ -f "$gcp_cred" ]; then + origin=`jq -r '.client_email' $gcp_cred | sed 's/@.*//'` + project=`jq -r '.project_id' $gcp_cred | sed 's/@.*//'` + log_driver="--log-driver=gcplogs --log-opt gcp-project=$project --log-opt labels=origin --label origin=$origin" + sudo mkdir -p /etc/systemd/system/docker.service.d +cat < /etc/systemd/system/docker.service.d/docker-override.conf +[Service] +Environment="GOOGLE_APPLICATION_CREDENTIALS=`realpath $gcp_cred`" +EOF + sudo systemctl daemon-reload + sudo systemctl restart docker +fi + +rm -rf $USI_DIR +mkdir -p $USI_DIR +args="-d -v $USI_DIR:/ovs --privileged --network=host -e DEBUG=$DEBUG --name daq-usi daqf/usi" +docker run $log_driver $args || docker run $args + +echo DAQ autoclean docker cp daq-usi:/root/logs.txt inst/cmdusi.log +echo DAQ autoclean docker kill daq-usi diff --git a/config/faucet/faucet_alt-switch.yaml b/config/faucet/faucet_alt-switch.yaml new file mode 100644 index 0000000000..059233aa4b --- /dev/null +++ b/config/faucet/faucet_alt-switch.yaml @@ -0,0 +1,22 @@ +dps: + alt-switch: + dp_id: 2 + interfaces: + 1: + native_vlan: 1002 + 2: + native_vlan: 1001 + 3: + native_vlan: 1003 + 4: + native_vlan: 1004 + 5: + native_vlan: 1005 + 100: + tagged_vlans: [1001, 1002, 1003, 1004, 1005] +vlans: + 1001: + 1002: + 1003: + 1004: + 1005: diff --git a/config/modules/all.conf b/config/modules/all.conf index 4d897ae7c5..37b581b695 100644 --- a/config/modules/all.conf +++ b/config/modules/all.conf @@ -3,8 +3,8 @@ include config/modules/host.conf # All contributed modules. include subset/switches/build.conf -include subset/connection/build.conf include subset/bacnet/build.conf include subset/security/build.conf include subset/cloud/build.conf -include subset/manual/build.conf \ No newline at end of file +include subset/manual/build.conf +include subset/network/build.conf diff --git a/config/modules/host.conf b/config/modules/host.conf index e6ec5e37ef..cd9e9421b4 100644 --- a/config/modules/host.conf +++ b/config/modules/host.conf @@ -12,6 +12,10 @@ add mudgee # Additional base modules include subset/pentests/build.conf +include usi/build.conf + +# Extended dhcp tests +add ipaddr # Example of how to remove something. remove unused diff --git a/config/modules/topo.conf b/config/modules/topo.conf index 4d47ec6cf9..ba02ce2374 100644 --- a/config/modules/topo.conf +++ b/config/modules/topo.conf @@ -3,3 +3,4 @@ build docker/modules # Use ping with runtime configuration for topo testing. add ping +include usi/build.conf diff --git a/config/system/all.conf b/config/system/all.conf index 0fe77da8fb..612cdc89cb 100644 --- a/config/system/all.conf +++ b/config/system/all.conf @@ -3,7 +3,7 @@ # Load defaults. source config/system/default.yaml -# Description description for dashboard. +# Description for dashboard. site_description="Multi-Device All-Tests Configuration" # Upstream dataplane port from the external (secondary) switch. diff --git a/config/system/alt.yaml b/config/system/alt.yaml new file mode 100644 index 0000000000..ab1f6945f2 --- /dev/null +++ b/config/system/alt.yaml @@ -0,0 +1,23 @@ +# Example configuration file for using an OVS switch not managed by DAQ. + +# Load defaults. +include: config/system/default.yaml + +# Description for dashboard. +site_description: "Alternate (not managed by DAQ) OVS switch configuration" + +# Network switch configuration. +switch_setup: + data_intf: alt-intf + alt_port: 6669 + uplink_port: 100 + ext_br: alt-switch + +# Faux device connection for testing. +interfaces: + faux: + opts: + port: 2 + +# use vlan trigger +run_trigger_type: VLAN diff --git a/config/system/default.yaml b/config/system/default.yaml index 644993b96e..3b3746f6f4 100644 --- a/config/system/default.yaml +++ b/config/system/default.yaml @@ -37,3 +37,13 @@ long_dhcp_response_sec: 105 # finish hook: executed at the end of every test finish_hook: bin/dump_network + +# topology hook: executed when device topology changes +topology_hook: bin/dump_network + +run_trigger_type: PORT + +# usi url for DAQ to connect to +usi_setup: + url: localhost:5000 + rpc_timeout_sec: 10 diff --git a/config/system/ext.conf b/config/system/ext.conf index 8625109734..28dc34b707 100644 --- a/config/system/ext.conf +++ b/config/system/ext.conf @@ -3,7 +3,7 @@ # Load defaults. source config/system/default.yaml -# Description description for dashboard. +# Description for dashboard. site_description="External (not integrated with DAQ) OVS switch configuration" # Network switch configuration. diff --git a/config/system/ext.yaml b/config/system/ext.yaml index 7ad626341e..4fef079c1e 100644 --- a/config/system/ext.yaml +++ b/config/system/ext.yaml @@ -3,7 +3,7 @@ # Load defaults. include: config/system/default.yaml -# Description description for dashboard. +# Description for dashboard. site_description: "External (not integrated with DAQ) OVS switch configuration" # Network switch configuration. diff --git a/config/system/muddy.conf b/config/system/muddy.conf index 6510e2b113..3d3a17b30c 100644 --- a/config/system/muddy.conf +++ b/config/system/muddy.conf @@ -3,7 +3,7 @@ # Load defaults. source config/system/default.yaml -# Description description for dashboard. +# Description for dashboard. site_description="Multi-Device Configuration" # Upstream dataplane port from the external (secondary) switch. diff --git a/config/system/multi.conf b/config/system/multi.conf index 185bbc40df..367a94e86b 100644 --- a/config/system/multi.conf +++ b/config/system/multi.conf @@ -3,7 +3,7 @@ # Load defaults. source config/system/default.yaml -# Description description for dashboard. +# Description for dashboard. site_description="Multi-Device Configuration" # Upstream dataplane port from the external (secondary) switch. diff --git a/daq/base_module.py b/daq/base_module.py new file mode 100644 index 0000000000..f8062e4dd8 --- /dev/null +++ b/daq/base_module.py @@ -0,0 +1,38 @@ +"""Host module base class""" + +from __future__ import absolute_import + +import datetime +import logger + + +LOGGER = logger.get_logger('module') + + +class HostModule: + """Base class for host test modules""" + + def __init__(self, host, tmpdir, test_name, module_config): + self.host = host + self.tmpdir = tmpdir + self.test_name = test_name + self.device = host.device + self.test_config = module_config.get('modules').get(test_name) + self.runner = host.runner + self.host_name = '%s%02d' % (test_name, host.device.set_id) + # Host name can't be more than 10 characters because it is also used to create a + # network interface with -eth0 on the end and there's a hard linux limit on length. + assert len(self.host_name) <= 10, 'Hostname %s too long' + self.callback = None + self._finish_hook = None + self.start_time = None + + def start(self, port, params, callback, finish_hook): + """Start a test module""" + LOGGER.debug('Starting test module %s', self) + self.callback = callback + self._finish_hook = finish_hook + self.start_time = datetime.datetime.now() + + def __repr__(self): + return "Target device %s test %s" % (self.device, self.test_name) diff --git a/daq/dhcp_monitor.py b/daq/dhcp_monitor.py index 6a5beceae0..4c312cb89b 100644 --- a/daq/dhcp_monitor.py +++ b/daq/dhcp_monitor.py @@ -57,10 +57,12 @@ def _dhcp_line(self): if match: if match.group(2): self.target_ip = match.group(2) - if match.group(4) == "ACK": - self._dhcp_success() if match.group(6): self.target_mac = match.group(6) + if match.group(4) == "ACK": + if not self.target_ip or not self.target_mac: + LOGGER.warning('dhcp ACK incomplete: %s', dhcp_line) + self._dhcp_success() def cleanup(self): """Cleanup any ongoing dhcp activity""" diff --git a/daq/docker_test.py b/daq/docker_test.py index fdabebd0f7..9c678ee10d 100644 --- a/daq/docker_test.py +++ b/daq/docker_test.py @@ -3,6 +3,10 @@ import datetime import os import subprocess +import string +import random + +from base_module import HostModule import logger from clib import docker_host @@ -11,40 +15,27 @@ LOGGER = logger.get_logger('docker') -class DockerTest: +class DockerTest(HostModule): """Class for running docker tests""" IMAGE_NAME_FORMAT = 'daqf/test_%s' TAGGED_IMAGE_FORMAT = IMAGE_NAME_FORMAT + ':latest' CONTAINER_PREFIX = 'daq' - # pylint: disable=too-many-arguments - def __init__(self, runner, target_port, tmpdir, test_name, env_vars=None): - self.target_port = target_port - self.tmpdir = tmpdir - self.test_name = test_name - self.runner = runner - self.host_name = '%s%02d' % (test_name, self.target_port) + def __init__(self, host, tmpdir, test_name, module_config): + super().__init__(host, tmpdir, test_name, module_config) self.docker_log = None self.docker_host = None - self.callback = None - self.start_time = None self.pipe = None - self.env_vars = env_vars or [] - self._finish_hook = None def start(self, port, params, callback, finish_hook): """Start the docker test""" - LOGGER.debug('Target port %d starting docker test %s', self.target_port, self.test_name) - - self.start_time = datetime.datetime.now() - self.callback = callback - self._finish_hook = finish_hook + super().start(port, params, callback, finish_hook) def opt_param(key): return params.get(key) or '' # Substitute empty string for None - env_vars = self.env_vars + [ + env_vars = [ "TARGET_NAME=" + self.host_name, "TARGET_IP=" + params['target_ip'], "TARGET_MAC=" + params['target_mac'], @@ -61,7 +52,7 @@ def opt_param(key): self._map_if_exists(vol_maps, params, 'type') image = self.IMAGE_NAME_FORMAT % self.test_name - LOGGER.debug("Target port %d running docker test %s", self.target_port, image) + LOGGER.debug("%s running docker test %s", self, image) cls = docker_host.make_docker_host(image, prefix=self.CONTAINER_PREFIX) # Work around an instability in the faucet/clib/docker library, b/152520627. setattr(cls, 'pullImage', self._check_image) @@ -74,20 +65,20 @@ def opt_param(key): raise wrappers.DaqException(e) try: - LOGGER.debug("Target port %d activating docker test %s", self.target_port, image) + LOGGER.debug("%s activating docker test %s", self, image) pipe = host.activate(log_name=None) # Docker tests don't use DHCP, so manually set up DNS. host.cmd('echo nameserver $GATEWAY_IP > /etc/resolv.conf') self.docker_log = host.open_log() if self._should_raise_test_exception('initialize'): - LOGGER.error('Target port %d inducing initialization failure', self.target_port) + LOGGER.error('%s inducing initialization failure', self) raise Exception('induced initialization failure') self.runner.monitor_stream(self.host_name, pipe.stdout, copy_to=self.docker_log, hangup=self._docker_complete, error=self._docker_error) self.pipe = pipe if self._should_raise_test_exception('callback'): - LOGGER.error('Target port %d will induce callback failure', self.target_port) + LOGGER.error('%s will induce callback failure', self) # Closing this now will cause error when attempting to write output. self.docker_log.close() except Exception as e: @@ -98,7 +89,7 @@ def opt_param(key): self.runner.monitor_forget(self.pipe.stdout) self.pipe = None raise e - LOGGER.info("Target port %d test %s running", self.target_port, self.test_name) + LOGGER.info("%s running", self) def _check_image(self): lines = subprocess.check_output(["docker", "images", "--format", @@ -109,7 +100,7 @@ def _check_image(self): def terminate(self): """Forcibly terminate this container""" - LOGGER.info("Target port %d test %s terminating", self.target_port, self.test_name) + LOGGER.info("%s terminating", self) return self._docker_finalize() def _map_if_exists(self, vol_maps, params, kind): @@ -117,18 +108,17 @@ def _map_if_exists(self, vol_maps, params, kind): if base and os.path.exists(base): abs_base = os.path.abspath(base) vol_maps += ['%s:/config/%s' % (abs_base, kind)] - LOGGER.info('Target port %d mapping %s to /config/%s', self.target_port, abs_base, kind) + LOGGER.info('%s mapping %s to /config/%s', self, abs_base, kind) def _docker_error(self, exception): - LOGGER.error('Target port %d docker error: %s', self.target_port, str(exception)) + LOGGER.error('%s docker error: %s', self, str(exception)) if self._docker_finalize() is None: - LOGGER.warning('Target port %d docker already terminated.', self.target_port) + LOGGER.warning('%s docker already terminated.', self) else: self.callback(exception=exception) def _docker_finalize(self): - assert self.docker_host, 'docker host %s already finalized' % self.target_port - LOGGER.info('Target port %d docker finalize', self.target_port) + assert self.docker_host, 'docker host %s already finalized' % self if self._finish_hook: self._finish_hook() self.runner.remove_host(self.docker_host) @@ -136,16 +126,17 @@ def _docker_finalize(self): self.runner.monitor_forget(self.pipe.stdout) self.pipe = None return_code = self.docker_host.terminate() + LOGGER.info('%s docker finalize %d', self, return_code) self.docker_host = None self.docker_log.close() self.docker_log = None if self._should_raise_test_exception('finalize'): - LOGGER.error('Target port %d inducing finalize failure', self.target_port) + LOGGER.error('%s inducing finalize failure', self) raise Exception('induced finalize failure') return return_code def _should_raise_test_exception(self, trigger_value): - key = '%s_%02d' % (self.test_name, self.target_port) + key = "%s_%s" % (self.test_name, self.device.mac.replace(':', '')) return self.runner.config.get('fail_module', {}).get(key) == trigger_value def _docker_complete(self): @@ -159,12 +150,18 @@ def _docker_complete(self): exception = e LOGGER.exception(e) delay = (datetime.datetime.now() - self.start_time).total_seconds() - LOGGER.debug("Target port %d docker complete, return=%d (%s)", - self.target_port, return_code, exception) + LOGGER.debug("%s docker complete, return=%d (%s)", + self, return_code, exception) if return_code: - LOGGER.info("Target port %d test %s failed %ss: %s %s", - self.target_port, self.test_name, delay, return_code, exception) + LOGGER.info("%s failed %ss: %s %s", + self, delay, return_code, exception) else: - LOGGER.info("Target port %d test %s passed %ss", - self.target_port, self.test_name, delay) + LOGGER.info("%s passed %ss", + self, delay) self.callback(return_code=return_code, exception=exception) + + def _get_random_string(self, length): + return ''.join(random.choice(string.ascii_letters) for _ in range(length)) + + def ip_listener(self, target_ip): + """Do nothing b/c docker tests don't care about ip notifications""" diff --git a/daq/faucet_event_client.py b/daq/faucet_event_client.py index a97c8c9bb5..b2254b1cf9 100644 --- a/daq/faucet_event_client.py +++ b/daq/faucet_event_client.py @@ -184,11 +184,12 @@ def as_port_state(self, event): def as_port_learn(self, event): """Convert to port learning info, if applicable""" if not event or 'L2_LEARN' not in event: - return (None, None, None) + return [None] * 4 dpid = event['dp_id'] port_no = int(event['L2_LEARN']['port_no']) eth_src = event['L2_LEARN']['eth_src'] - return (dpid, port_no, eth_src) + vid = event['L2_LEARN']['vid'] + return (dpid, port_no, eth_src, vid) def close(self): """Close the faucet event socket""" diff --git a/daq/gateway.py b/daq/gateway.py index c7c54182a5..60b6abe193 100644 --- a/daq/gateway.py +++ b/daq/gateway.py @@ -12,6 +12,7 @@ LOGGER = logger.get_logger('gateway') + class Gateway(): """Gateway collection class for managing testing services""" @@ -37,8 +38,8 @@ def __init__(self, runner, name, port_set, network): self.dummy = None self.tmpdir = None self.targets = {} - self.test_ports = {} - self.ready = {} + self.test_ports = set() + self.ready = set() self.activated = False self.result_linger = False self._scan_monitor = None @@ -125,6 +126,18 @@ def request_new_ip(self, mac): """Requests a new ip for the device""" self.execute_script('new_ip', mac) + def change_dhcp_response_time(self, mac, time): + """Change dhcp response time for device mac""" + self.execute_script('change_dhcp_response_time', mac, time) + + def stop_dhcp_response(self, mac): + """Stops DHCP response for the device""" + self.change_dhcp_response_time(mac, -1) + + def change_dhcp_range(self, start, end, prefix_length): + """Change dhcp range for devices""" + self.execute_script('change_dhcp_range', start, end, prefix_length) + def allocate_test_port(self): """Get the test port to use for this gateway setup""" test_port = self._switch_port(self.TEST_OFFSET_START) @@ -132,7 +145,7 @@ def allocate_test_port(self): test_port = test_port + 1 limit_port = self._switch_port(self.NUM_SET_PORTS) assert test_port < limit_port, 'no test ports available' - self.test_ports[test_port] = True + self.test_ports.add(test_port) return test_port def _startup_scan(self, host): @@ -160,7 +173,7 @@ def _scan_error(self, e): def release_test_port(self, test_port): """Release the given port from the gateway""" assert test_port in self.test_ports, 'test port not allocated' - del self.test_ports[test_port] + self.test_ports.remove(test_port) def _switch_port(self, offset): return self.port_set * self.SET_SPACING + offset @@ -169,9 +182,8 @@ def _is_target_expected(self, target): if not target: return False target_mac = target['mac'] - for target_port in self.targets: - if self.targets[target_port]['mac'] == target_mac: - return True + if target_mac in self.targets: + return True LOGGER.warning('No target match found for %s in %s', target_mac, self.name) return False @@ -179,7 +191,7 @@ def _dhcp_callback(self, state, target, exception=None): if exception: LOGGER.error('Gateway DHCP exception %s', exception) if self._is_target_expected(target) or exception: - self.runner.ip_notify(state, target, self.port_set, exception=exception) + self.runner.ip_notify(state, target, self, exception=exception) def _setup_tmpdir(self, base_name): tmpdir = os.path.join('inst', base_name) @@ -188,26 +200,26 @@ def _setup_tmpdir(self, base_name): os.makedirs(tmpdir) return tmpdir - def attach_target(self, target_port, target): + def attach_target(self, device): """Attach the given target to this gateway; return number of attached targets.""" - assert target_port not in self.targets, 'target already attached to gw' - LOGGER.info('Attaching target %d to gateway group %s', target_port, self.name) - self.targets[target_port] = target + assert device.mac not in self.targets, 'target %s already attached to gw' % device + LOGGER.info('Attaching target %s to gateway group %s', device, self.name) + self.targets[device.mac] = device return len(self.targets) - def detach_target(self, target_port): + def detach_target(self, device): """Detach the given target from this gateway; return number of remaining targets.""" - assert target_port in self.targets, 'target not attached to gw' - LOGGER.info('Detach target %d from gateway group %s: %s', - target_port, self.name, list(self.targets.keys())) - del self.targets[target_port] + assert device.mac in self.targets, 'target %s not attached to gw' % device + LOGGER.info('Detach target %s from gateway group %s: %s', + device, self.name, list(self.targets.keys())) + del self.targets[device.mac] return len(self.targets) - def target_ready(self, target_mac): + def target_ready(self, device): """Mark a target ready, and return set of ready targets""" - if not target_mac in self.ready: - LOGGER.info('Ready target %s from gateway group %s', target_mac, self.name) - self.ready[target_mac] = True + if device not in self.ready: + LOGGER.info('Ready target %s from gateway group %s', device, self.name) + self.ready.add(device) return self.ready def get_targets(self): @@ -240,3 +252,6 @@ def terminate(self): def _ping_test(self, src, dst, src_addr=None): return self.runner.ping_test(src, dst, src_addr=src_addr) + + def __repr__(self): + return 'Gateway group %s set %d' % (self.name, self.port_set) diff --git a/daq/gcp.py b/daq/gcp.py index 17051870a1..0dea14d14f 100644 --- a/daq/gcp.py +++ b/daq/gcp.py @@ -24,6 +24,7 @@ # pylint: disable=no-member DESCENDING = firestore.Query.DESCENDING + def get_timestamp(): """"Get a JSON-compatible formatted timestamp""" return to_timestamp(datetime.datetime.now(datetime.timezone.utc)) @@ -49,7 +50,7 @@ def __init__(self, config, callback_handler): self._callback_handler = callback_handler cred_file = self.config.get('gcp_cred') if not cred_file: - LOGGER.info('No gcp_cred filr specified in config, disabling gcp use.') + LOGGER.info('No gcp_cred file specified in config, disabling gcp use.') self._pubber = None self._storage = None self._firestore = None @@ -263,19 +264,25 @@ def _get_json_report(self, runid): blob = self._bucket.blob(report_blob) return json.loads(str(blob.download_as_string(), 'utf-8')) - def get_reports_from_date_range(self, device: str, start=None, end=None, count=None): - """Combine test results from reports within a date range""" + # pylint: disable=too-many-arguments + def get_reports(self, device: str, start=None, end=None, count=None, daq_run_id=None): + """Get filtered list of reports""" if not self._firestore: LOGGER.error('Firestore not initialized.') return - LOGGER.info('Looking for reports...') + LOGGER.info('Looking for reports from GCP...') limit_count = count if count else DEFAULT_LIMIT origin = self._firestore.collection(u'origin').document(self._client_name).get() query = origin.reference.collection('runid').where('deviceId', '==', device) if start: + LOGGER.info('Limiting to start time %s', to_timestamp(start)) query = query.where('updated', '>=', to_timestamp(start)) if end: + LOGGER.info('Limiting to end time %s', to_timestamp(end)) query = query.where('updated', '<=', to_timestamp(end)) + if daq_run_id: + LOGGER.info('Limiting to DAQ run id %s', daq_run_id) + query = query.where('daq_run_id', '==', daq_run_id) runids = query.order_by(u'updated', direction=DESCENDING).limit(limit_count).stream() for runid in runids: json_report = self._get_json_report(runid) diff --git a/daq/host.py b/daq/host.py index dc13f9b659..c600cb3835 100644 --- a/daq/host.py +++ b/daq/host.py @@ -5,17 +5,20 @@ import shutil import time from datetime import timedelta, datetime +import grpc from clib import tcpdump_helper + from report import ResultType, ReportGenerator +from proto import usi_pb2 as usi +from proto import usi_pb2_grpc as usi_service import configurator import docker_test import gcp +import ipaddr_test import logger -LOGGER = logger.get_logger('host') - class _STATE: """Host state enum for testing cycle""" @@ -31,6 +34,7 @@ class _STATE: TERM = 'Host terminated' + class MODE: """Test module modes for state reporting.""" INIT = 'init' @@ -46,21 +50,29 @@ class MODE: LONG = 'long' MERR = 'merr' + def pre_states(): """Return pre-test states for basic operation""" - return ['startup', 'sanity', 'ipaddr', 'base', 'monitor'] + return ['startup', 'sanity', 'acquire', 'base', 'monitor'] def post_states(): """Return post-test states for recording finalization""" return ['finish', 'info', 'timer'] +def get_test_config(config, test): + """Get a single test module's config""" + return config["modules"].get(test) + + class ConnectedHost: """Class managing a device-under-test""" _STARTUP_MIN_TIME_SEC = 5 + _RPC_TIMEOUT_SEC = 10 _INST_DIR = "inst/" _DEVICE_PATH = "device/%s" + _NETWORK_DIR = "inst/network" _MODULE_CONFIG = "module_config.json" _CONTROL_PATH = "control/port-%s" _CORE_TESTS = ['pass', 'fail', 'ping', 'hold'] @@ -68,19 +80,22 @@ class ConnectedHost: _CONFIG_DIR = "config/" _TIMEOUT_EXCEPTION = TimeoutError('Timeout expired') - def __init__(self, runner, gateway, target, config): + # pylint: disable=too-many-statements + def __init__(self, runner, device, config): self.configurator = configurator.Configurator() self.runner = runner self._gcp = runner.gcp - self.gateway = gateway + self.gateway = device.gateway self.config = config self.switch_setup = self.config.get('switch_setup', {}) - self.target_port = target['port'] - self.target_mac = target['mac'] - self.fake_target = target['fake'] + self.device = device + self.target_mac = device.mac + self.target_port = device.port.port_no + self.fake_target = self.gateway.fake_target self.devdir = self._init_devdir() self.run_id = self.make_runid() self.scan_base = os.path.abspath(os.path.join(self.devdir, 'scans')) + self.logger = logger.get_logger('host') self._port_base = self._get_port_base() self._device_base = self._get_device_base() self.state = None @@ -95,7 +110,8 @@ def __init__(self, runner, gateway, target, config): self._monitor_scan_sec = int(config.get('monitor_scan_sec', 0)) _default_timeout_sec = int(config.get('default_timeout_sec', 0)) self._default_timeout_sec = _default_timeout_sec if _default_timeout_sec else None - self._finish_hook_script = config.get('finish_hook') + self._usi_config = config.get('usi_setup', {}) + self._topology_hook_script = config.get('topology_hook') self._mirror_intf_name = None self._monitor_ref = None self._monitor_start = None @@ -106,7 +122,8 @@ def __init__(self, runner, gateway, target, config): assert self._loaded_config, 'config was not loaded' self._write_module_config(self._loaded_config, self._device_aux_path()) self.remaining_tests = self._get_enabled_tests() - LOGGER.info('Host %s running with enabled tests %s', self.target_port, self.remaining_tests) + self.logger.info('Host %s running with enabled tests %s', self.target_mac, + self.remaining_tests) self._report = ReportGenerator(config, self._INST_DIR, self.target_mac, self._loaded_config) self.record_result('startup', state=MODE.PREP) @@ -115,6 +132,7 @@ def __init__(self, runner, gateway, target, config): self._startup_file = None self.timeout_handler = self._aux_module_timeout_handler self._all_ips = [] + self._ip_listener = None @staticmethod def make_runid(): @@ -122,20 +140,20 @@ def make_runid(): return '%06x' % int(time.time()) def _init_devdir(self): - devdir = os.path.join(self._INST_DIR, 'run-port-%02d' % self.target_port) + devdir = os.path.join(self._INST_DIR, 'run-%s' % self.target_mac.replace(':', '')) shutil.rmtree(devdir, ignore_errors=True) os.makedirs(devdir) return devdir def _get_port_base(self): test_config = self.config.get('test_config') - if not test_config: - return None - conf_base = os.path.abspath(os.path.join(test_config, 'port-%02d' % self.target_port)) - if not os.path.isdir(conf_base): - LOGGER.warning('Test config directory not found: %s', conf_base) - return None - return conf_base + if test_config and self.target_port: + conf_base = os.path.abspath(os.path.join(test_config, 'port-%02d' % self.target_port)) + if not os.path.isdir(conf_base): + self.logger.warning('Test config directory not found: %s', conf_base) + return None + return conf_base + return None def _make_config_bundle(self, config=None): return { @@ -148,22 +166,25 @@ def _make_control_bundle(self): 'paused': self.state == _STATE.READY } + def _get_test_config(self, test): + return get_test_config(self._loaded_config, test) + def _test_enabled(self, test): fallback_config = {'enabled': test in self._CORE_TESTS} - test_config = self._loaded_config['modules'].get(test, fallback_config) + test_config = self._get_test_config(test) or fallback_config return test_config.get('enabled', True) def _get_test_timeout(self, test): - test_module = self._loaded_config['modules'].get(test) if test == 'hold': return None + test_module = self._get_test_config(test) if not test_module: return self._default_timeout_sec return test_module.get('timeout_sec', self._default_timeout_sec) def get_port_flap_timeout(self, test): """Get port toggle timeout configuration that's specific to each test module""" - test_module = self._loaded_config['modules'].get(test) + test_module = self._get_test_config(test) if not test_module: return None return test_module.get('port_flap_timeout_sec') @@ -193,18 +214,20 @@ def _get_unique_upload_path(self, file_name): partial = os.path.join('tests', self.test_name, base) if self.test_name else base return os.path.join('run_id', self.run_id, partial) - def _load_config(self, config, path): + def _load_config(self, name, config, path): + if name: + self.logger.info('Loading %s module config from %s', name, path) return self.configurator.load_and_merge(config, path, self._MODULE_CONFIG, optional=True) def _write_module_config(self, config, path): self.configurator.write_config(config, path, self._MODULE_CONFIG) def _type_path(self): - dev_config = self._load_config({}, self._device_base) + dev_config = self._load_config(None, {}, self._device_base) device_type = dev_config.get('device_type') if not device_type: return None - LOGGER.info('Configuring device %s as type %s', self.target_mac, device_type) + self.logger.info('Configuring device %s as type %s', self.device, device_type) site_path = self.config.get('site_path') type_path = os.path.abspath(os.path.join(site_path, 'device_types', device_type)) return type_path @@ -215,20 +238,20 @@ def _type_aux_path(self): return None aux_path = os.path.join(type_path, self._AUX_DIR) if not os.path.exists(aux_path): - LOGGER.info('Skipping missing type dir %s', aux_path) + self.logger.info('Skipping missing type dir %s', aux_path) return None return aux_path def _create_device_dir(self, path): - LOGGER.warning('Creating new device dir: %s', path) + self.logger.warning('Creating new device dir: %s', path) os.makedirs(path) template_dir = self.config.get('device_template') if not template_dir: - LOGGER.warning('Skipping defaults since no device_template found') + self.logger.warning('Skipping defaults since no device_template found') return - LOGGER.info('Copying template files from %s to %s', template_dir, path) + self.logger.info('Copying template files from %s to %s', template_dir, path) for file in os.listdir(template_dir): - LOGGER.info('Copying %s...', file) + self.logger.info('Copying %s...', file) shutil.copy(os.path.join(template_dir, file), path) def _upload_file(self, path): @@ -237,14 +260,16 @@ def _upload_file(self, path): def initialize(self): """Fully initialize a new host set""" - LOGGER.info('Target port %d initializing...', self.target_port) + self.logger.info('Target device %s initializing...', self) # There is a race condition here with ovs assigning ports, so wait a bit. time.sleep(2) shutil.rmtree(self.devdir, ignore_errors=True) os.makedirs(self.scan_base) self._initialize_config() network = self.runner.network - self._mirror_intf_name = network.create_mirror_interface(self.target_port) + if self.target_port: + self._mirror_intf_name = network.create_mirror_interface(self.target_port) + self._topology_hook() if self.config['test_list']: self._start_run() else: @@ -268,9 +293,27 @@ def _state_transition(self, target, expected=None): message = 'state was %s expected %s' % (self.state, expected) assert self.state == expected, message assert self.state != _STATE.TERM, 'host already terminated' - LOGGER.debug('Target port %d state: %s -> %s', self.target_port, self.state, target) + self.logger.debug('Target device %s state: %s -> %s', self, self.state, target) self.state = target + def _build_switch_info(self) -> usi.SwitchInfo: + switch_config = self._get_switch_config() + model_str = switch_config['model'] + if model_str == 'FAUX_SWITCH' or not self.target_port: + return None + if model_str: + switch_model = usi.SwitchModel.Value(model_str) + else: + switch_model = usi.SwitchModel.OVS_SWITCH + params = { + "ip_addr": switch_config["ip"], + "device_port": self.target_port, + "model": switch_model, + "username": switch_config["username"], + "password": switch_config["password"] + } + return usi.SwitchInfo(**params) + def is_running(self): """Return True if this host is running active test.""" return self.state != _STATE.ERROR and self.state != _STATE.DONE @@ -285,29 +328,50 @@ def notify_activate(self): self._record_result('startup', state=MODE.HOLD) return self.state == _STATE.WAITING + def connect_port(self, connect): + """Connects/Disconnects port for this host""" + switch_info = self._build_switch_info() + if not switch_info: + self.logger.info('No switch model found, skipping port connect') + return False + try: + with grpc.insecure_channel(self._usi_config.get('url')) as channel: + timeout = self._usi_config.get('rpc_timeout_sec', self._RPC_TIMEOUT_SEC) + stub = usi_service.USIServiceStub(channel) + if connect: + res = stub.connect(switch_info, timeout=timeout) + else: + res = stub.disconnect(switch_info, timeout=timeout) + self.logger.info('Target port %s %s successful? %s', self.target_port, "connect" + if connect else "disconnect", res.success) + except Exception as e: + self.logger.error(e) + raise e + return True + def _prepare(self): - LOGGER.info('Target port %d waiting for ip as %s', self.target_port, self.target_mac) + self.logger.info('Target device %s waiting for ip', self) self._state_transition(_STATE.WAITING, _STATE.INIT) self.record_result('sanity', state=MODE.DONE) - self.record_result('ipaddr', state=MODE.EXEC) + self.record_result('acquire', state=MODE.EXEC) static_ip = self._get_static_ip() if static_ip: - LOGGER.info('Target port %d using static ip', self.target_port) + self.logger.info('Target device %s using static ip', self) time.sleep(self._STARTUP_MIN_TIME_SEC) self.runner.ip_notify(MODE.NOPE, { 'mac': self.target_mac, 'ip': static_ip, 'delta': -1 - }, self.gateway.port_set) + }, self.gateway) else: dhcp_mode = self._get_dhcp_mode() # enables dhcp response for this device wait_time = self.runner.config.get("long_dhcp_response_sec") \ if dhcp_mode == 'long_response' else 0 - LOGGER.info('Target port %d using %s DHCP mode, wait %s', - self.target_port, dhcp_mode, wait_time) - self.gateway.execute_script('change_dhcp_response_time', self.target_mac, wait_time) - _ = [listener(self) for listener in self._dhcp_listeners] + self.logger.info('Target device %s using %s DHCP mode, wait %s', + self, dhcp_mode, wait_time) + self.gateway.change_dhcp_response_time(self.target_mac, wait_time) + _ = [listener(self.device) for listener in self._dhcp_listeners] def _aux_module_timeout_handler(self): # clean up tcp monitor that could be open @@ -315,7 +379,7 @@ def _aux_module_timeout_handler(self): def _main_module_timeout_handler(self): self.test_host.terminate() - self._docker_callback(exception=self._TIMEOUT_EXCEPTION) + self._module_callback(exception=self._TIMEOUT_EXCEPTION) def heartbeat(self): """Checks module run time for each event loop""" @@ -326,7 +390,8 @@ def heartbeat(self): nowtime = gcp.parse_timestamp(gcp.get_timestamp()) if nowtime >= timeout: if self.timeout_handler: - LOGGER.error('Monitoring timeout for %s after %ds', self.test_name, timeout_sec) + self.logger.error('Monitoring timeout for %s after %ds', self.test_name, + timeout_sec) # ensure it's called once handler, self.timeout_handler = self.timeout_handler, None handler() @@ -340,19 +405,20 @@ def _finalize_report(self): report_paths = self._report.finalize() if self._trigger_path: report_paths.update({'trigger_path': self._trigger_path}) - LOGGER.info('Finalized with reports %s', list(report_paths.keys())) + self.logger.info('Finalized with reports %s', list(report_paths.keys())) report_blobs = {name: self._upload_file(path) for name, path in report_paths.items()} self.record_result('terminate', state=MODE.TERM, **report_blobs) self._report = None def terminate(self, reason, trigger=True): """Terminate this host""" - LOGGER.info('Target port %d terminate, running %s, trigger %s: %s', self.target_port, - self._host_name(), trigger, reason) + self.logger.info('Target device %s terminate, running %s, trigger %s: %s', self, + self._host_name(), trigger, reason) self._state_transition(_STATE.TERM) self._release_config() self._monitor_cleanup() - self.runner.network.delete_mirror_interface(self.target_port) + if self.target_port: + self.runner.network.delete_mirror_interface(self.target_port) self._finalize_report() if self.test_host: try: @@ -360,12 +426,12 @@ def terminate(self, reason, trigger=True): self.test_host = None self.timeout_handler = None except Exception as e: - LOGGER.error('Target port %d terminating test: %s', self.target_port, e) - LOGGER.exception(e) + self.logger.error('Target device %s terminating test: %s', self, self.test_name) + self.logger.exception(e) if trigger: - self.runner.target_set_complete(self.target_port, - 'Target port %d termination: %s' % ( - self.target_port, self.test_host)) + self.runner.target_set_complete(self.device, + 'Target device %s termination: %s' % ( + self, self.test_host)) def idle_handler(self): """Trigger events from idle state""" @@ -380,8 +446,11 @@ def ip_notify(self, target_ip, state=MODE.DONE, delta_sec=-1): with open(self._trigger_path, 'a') as output_stream: output_stream.write('%s %s %d\n' % (target_ip, state, delta_sec)) self._all_ips.append({"ip": target_ip, "timestamp": time.time()}) - if self._get_dhcp_mode() == "ip_change" and len(self._all_ips) == 1: - self.gateway.request_new_ip(self.target_mac) + # Update ip directly if it's already triggered. + if self.target_ip: + self.target_ip = target_ip + if self.test_host: + self.test_host.ip_listener(target_ip) def trigger_ready(self): """Check if this host is ready to be triggered""" @@ -397,41 +466,42 @@ def trigger_ready(self): def trigger(self, state=MODE.DONE, target_ip=None, exception=None, delta_sec=-1): """Handle device trigger""" if not self.target_ip and not self.trigger_ready(): - LOGGER.warn('Target port %d ignoring premature trigger', self.target_port) + self.logger.warn('Target device %s ignoring premature trigger', self) return False if self.target_ip: - LOGGER.debug('Target port %d already triggered', self.target_port) + self.logger.debug('Target device %s already triggered', self) assert self.target_ip == target_ip, "target_ip mismatch" return True self.target_ip = target_ip self._record_result('info', state='%s/%s' % (self.target_mac, target_ip)) - self.record_result('ipaddr', ip=target_ip, state=state, exception=exception) + self.record_result('acquire', ip=target_ip, state=state, exception=exception) if exception: self._state_transition(_STATE.ERROR) - self.runner.target_set_error(self.target_port, exception) + self.runner.target_set_error(self.device, exception) else: - LOGGER.info('Target port %d triggered as %s', self.target_port, target_ip) + self.logger.info('Target device %s triggered as %s', self, target_ip) self._state_transition(_STATE.BASE, _STATE.WAITING) return True def _ping_test(self, src, dst, src_addr=None): if not src or not dst: - LOGGER.error('Invalid ping test params, src=%s, dst=%s', src, dst) + self.logger.error('Invalid ping test params, src=%s, dst=%s', src, dst) return False return self.runner.ping_test(src, dst, src_addr=src_addr) def _startup_scan(self): self._startup_file = os.path.join(self.scan_base, 'startup.pcap') self._startup_time = datetime.now() - LOGGER.info('Target port %d startup pcap capture', self.target_port) + self.logger.info('Target device %s startup pcap capture', self) self._monitor_scan(self._startup_file) def _monitor_scan(self, output_file, timeout=None): assert not self._monitor_ref, 'tcp_monitor already active' network = self.runner.network tcp_filter = '' - LOGGER.info('Target port %d pcap intf %s for %ss output in %s', - self.target_port, self._mirror_intf_name, timeout, output_file) + self.logger.info('Target device %s pcap intf %s for %s seconds output in %s', + self, self._mirror_intf_name, timeout if timeout else 'infinite', + output_file) helper = tcpdump_helper.TcpdumpHelper(network.pri, tcp_filter, packets=None, intf_name=self._mirror_intf_name, timeout=timeout, pcap_out=output_file, @@ -447,10 +517,10 @@ def _base_start(self): success = self._base_tests() self._monitor_cleanup() if not success: - LOGGER.warning('Target port %d base tests failed', self.target_port) + self.logger.warning('Target device %s base tests failed', self) self._state_transition(_STATE.ERROR) return - LOGGER.info('Target port %d done with base.', self.target_port) + self.logger.info('Target device %s done with base.', self) self._background_scan() except Exception as e: self._monitor_cleanup() @@ -458,7 +528,7 @@ def _base_start(self): def _monitor_cleanup(self, forget=True): if self._monitor_ref: - LOGGER.info('Target port %d network pcap complete', self.target_port) + self.logger.info('Target device %s network pcap complete', self) active = self._monitor_ref.stream() and not self._monitor_ref.stream().closed assert active == forget, 'forget and active mismatch' self._upload_file(self._startup_file) @@ -468,22 +538,22 @@ def _monitor_cleanup(self, forget=True): self._monitor_ref = None def _monitor_error(self, exception, forget=False): - LOGGER.error('Target port %d monitor error: %s', self.target_port, exception) + self.logger.error('Target device %s monitor error: %s', self, exception) self._monitor_cleanup(forget=forget) self.record_result(self.test_name, exception=exception) self._state_transition(_STATE.ERROR) - self.runner.target_set_error(self.target_port, exception) + self.runner.target_set_error(self.device, exception) def _background_scan(self): self._state_transition(_STATE.MONITOR, _STATE.BASE) if not self._monitor_scan_sec: - LOGGER.info('Target port %d skipping background pcap', self.target_port) + self.logger.info('Target device %s skipping background pcap', self) self._monitor_continue() return self.record_result('monitor', time=self._monitor_scan_sec, state=MODE.EXEC) monitor_file = os.path.join(self.scan_base, 'monitor.pcap') - LOGGER.info('Target port %d background pcap for %ds', - self.target_port, self._monitor_scan_sec) + self.logger.info('Target device %s background pcap for %ds', + self, self._monitor_scan_sec) self._monitor_scan(monitor_file, timeout=self._monitor_scan_sec) def _monitor_timeout(self, timeout): @@ -494,19 +564,20 @@ def _monitor_timeout(self, timeout): self._monitor_complete() def _monitor_complete(self): - LOGGER.info('Target port %d pcap complete', self.target_port) + self.logger.info('Target device %s pcap complete', self) self._monitor_cleanup(forget=False) self.record_result('monitor', state=MODE.DONE) self._monitor_continue() def _monitor_continue(self): self._state_transition(_STATE.NEXT, _STATE.MONITOR) + self.test_name = None self._run_next_test() def _base_tests(self): self.record_result('base', state=MODE.EXEC) if not self._ping_test(self.gateway.host, self.target_ip): - LOGGER.debug('Target port %d warmup ping failed', self.target_port) + self.logger.debug('Target device %s warmup ping failed', self) try: success1 = self._ping_test(self.gateway.host, self.target_ip), 'simple ping failed' success2 = self._ping_test(self.gateway.host, self.target_ip, @@ -521,22 +592,21 @@ def _base_tests(self): return True def _run_next_test(self): + assert not self.test_name, 'test_name defined: %s' % self.test_name try: if self.remaining_tests: - LOGGER.debug('Target port %d executing tests %s', - self.target_port, self.remaining_tests) - self.timeout_handler = self._main_module_timeout_handler - self._docker_test(self.remaining_tests.pop(0)) + self.logger.debug('Target device %s executing tests %s', + self, self.remaining_tests) + self._run_test(self.remaining_tests.pop(0)) else: - LOGGER.info('Target port %d no more tests remaining', self.target_port) + self.logger.info('Target device %s no more tests remaining', self) self.timeout_handler = self._aux_module_timeout_handler self._state_transition(_STATE.DONE, _STATE.NEXT) - self.test_name = None self.record_result('finish', state=MODE.FINE) except Exception as e: - LOGGER.error('Target port %d start error: %s', self.target_port, e) + self.logger.error('Target device %s start error: %s', self, e) self._state_transition(_STATE.ERROR) - self.runner.target_set_error(self.target_port, e) + self.runner.target_set_error(self.device, e) def _inst_config_path(self): return os.path.abspath(os.path.join(self._INST_DIR, self._CONFIG_DIR)) @@ -547,36 +617,61 @@ def _device_aux_path(self): os.makedirs(path) return path - def _docker_test(self, test_name): - self.test_name = test_name - self.test_start = gcp.get_timestamp() - self.test_host = docker_test.DockerTest(self.runner, self.target_port, - self.devdir, test_name) - LOGGER.debug('test_host start %s/%s', test_name, self._host_name()) + def _new_test(self, test_name): + clazz = ipaddr_test.IpAddrTest if test_name == 'ipaddr' else docker_test.DockerTest + return clazz(self, self.devdir, test_name, self._loaded_config) + + def _run_test(self, test_name): + self.timeout_handler = self._main_module_timeout_handler + self.test_host = self._new_test(test_name) + + self.logger.info('Target device %s start %s', self, self._host_name()) try: - self.test_port = self.runner.allocate_test_port(self.target_port) + self.test_port = self.gateway.allocate_test_port() except Exception as e: self.test_host = None raise e try: - self._start_test_host() + self._start_test(test_name) + params = self._get_module_params() + self.test_host.start(self.test_port, params, self._module_callback, self._finish_hook) except Exception as e: self.test_host = None - self.runner.release_test_port(self.target_port, self.test_port) + self.gateway.release_test_port(self.test_port) self.test_port = None self._monitor_cleanup() raise e - def _start_test_host(self): - params = self._get_module_params() + def _start_test(self, test_name): + self.test_name = test_name + self.test_start = gcp.get_timestamp() self._write_module_config(self._loaded_config, self._host_tmp_path()) self._record_result(self.test_name, config=self._loaded_config, state=MODE.CONF) self.record_result(self.test_name, state=MODE.EXEC) self._monitor_scan(os.path.join(self.scan_base, 'test_%s.pcap' % self.test_name)) self._state_transition(_STATE.TESTING, _STATE.NEXT) - self.test_host.start(self.test_port, params, self._docker_callback, self._finish_hook) + + def _end_test(self, state=MODE.DONE, return_code=None, exception=None): + self._monitor_cleanup() + self._state_transition(_STATE.NEXT, _STATE.TESTING) + report_path = os.path.join(self._host_tmp_path(), 'report.txt') + activation_log_path = os.path.join(self._host_dir_path(), 'activate.log') + module_config_path = os.path.join(self._host_tmp_path(), self._MODULE_CONFIG) + remote_paths = {} + for result_type, path in ((ResultType.REPORT_PATH, report_path), + (ResultType.ACTIVATION_LOG_PATH, activation_log_path), + (ResultType.MODULE_CONFIG_PATH, module_config_path)): + if os.path.isfile(path): + self._report.accumulate(self.test_name, {result_type: path}) + remote_paths[result_type.value] = self._upload_file(path) + self.record_result(self.test_name, state=state, code=return_code, exception=exception, + **remote_paths) + self.test_name = None + self.test_host = None + self.timeout_handler = None + self._run_next_test() def _get_module_params(self): switch_setup = self.switch_setup if 'mods_addr' in self.switch_setup else None @@ -585,7 +680,7 @@ def _get_module_params(self): 'local_ip': ext_loip, 'target_ip': self.target_ip, 'target_mac': self.target_mac, - 'target_port': str(self.target_port), + 'target_port': str(self.target_port) if self.target_port else None, 'gateway_ip': self.gateway.host.IP(), 'gateway_mac': self.gateway.host.MAC(), 'inst_base': self._inst_config_path(), @@ -616,46 +711,39 @@ def _host_tmp_path(self): return os.path.join(self._host_dir_path(), 'tmp') def _finish_hook(self): - if self._finish_hook_script: + script = self.config.get('finish_hook') + if script: finish_dir = os.path.join(self.devdir, 'finish', self._host_name()) shutil.rmtree(finish_dir, ignore_errors=True) os.makedirs(finish_dir) - LOGGER.info('Executing finish_hook: %s %s', self._finish_hook_script, finish_dir) - os.system('%s %s 2>&1 > %s/finish.out' % - (self._finish_hook_script, finish_dir, finish_dir)) - - def _docker_callback(self, return_code=None, exception=None): + self.logger.info('Executing finish_hook: %s %s', script, finish_dir) + os.system('%s %s 2>&1 > %s/finish.out' % (script, finish_dir, finish_dir)) + + def _topology_hook(self): + if self._topology_hook_script: + update_dir = self._NETWORK_DIR + self.logger.info('Executing topology_hook: %s %s', + self._topology_hook_script, update_dir) + os.system('%s %s 2>&1 > %s/update.out' % + (self._topology_hook_script, update_dir, update_dir)) + + def _module_callback(self, return_code=None, exception=None): host_name = self._host_name() - LOGGER.info('Host callback %s/%s was %s with %s', - self.test_name, host_name, return_code, exception) - self._monitor_cleanup() + self.logger.info('Host callback %s/%s was %s with %s', + self.test_name, host_name, return_code, exception) failed = return_code or exception state = MODE.MERR if failed else MODE.DONE - report_path = os.path.join(self._host_tmp_path(), 'report.txt') - activation_log_path = os.path.join(self._host_dir_path(), 'activate.log') - module_config_path = os.path.join(self._host_tmp_path(), self._MODULE_CONFIG) - remote_paths = {} - for result_type, path in ((ResultType.REPORT_PATH, report_path), - (ResultType.ACTIVATION_LOG_PATH, activation_log_path), - (ResultType.MODULE_CONFIG_PATH, module_config_path)): - if os.path.isfile(path): - self._report.accumulate(self.test_name, {result_type: path}) - remote_paths[result_type.value] = self._upload_file(path) - self.record_result(self.test_name, state=state, code=return_code, exception=exception, - **remote_paths) - self.runner.release_test_port(self.target_port, self.test_port) - self._state_transition(_STATE.NEXT, _STATE.TESTING) - assert self.test_host, '_docker_callback with no test_host defined' - self.test_host = None - self.timeout_handler = None - self._run_next_test() + self.gateway.release_test_port(self.test_port) + assert self.test_host, '_module_callback with no test_host defined' + self._end_test(state=state, return_code=return_code, exception=exception) def _merge_run_info(self, config): config['run_info'] = { 'run_id': self.run_id, 'mac_addr': self.target_mac, 'started': gcp.get_timestamp(), - 'switch': self._get_switch_config() + 'switch': self._get_switch_config(), + 'usi': self._usi_config } config['run_info'].update(self.runner.get_run_info()) @@ -663,17 +751,17 @@ def _load_module_config(self, run_info=True): config = self.runner.get_base_config() if run_info: self._merge_run_info(config) - self._load_config(config, self._type_path()) - self._load_config(config, self._device_base) - self._load_config(config, self._port_base) + self._load_config('type', config, self._type_path()) + self._load_config('device', config, self._device_base) + self._load_config('port', config, self._port_base) return config def record_result(self, name, **kwargs): """Record a named result for this test""" current = gcp.get_timestamp() if name != self.test_name: - LOGGER.debug('Target port %d report %s start %s', - self.target_port, name, current) + self.logger.debug('Target device %s report %s start %s', + self, name, current) self.test_name = name self.test_start = current if name: @@ -688,6 +776,7 @@ def _record_result(self, name, run_info=True, current=None, **kwargs): result = { 'name': name, 'runid': (self.run_id if run_info else None), + 'daq_run_id': self.runner.daq_run_id, 'device_id': self.target_mac, 'started': self.test_start, 'timestamp': current if current else gcp.get_timestamp(), @@ -709,12 +798,12 @@ def _exception_message(self, exception): return str(exception) def _control_updated(self, control_config): - LOGGER.info('Updated control config: %s %s', self.target_mac, control_config) + self.logger.info('Updated control config: %s %s', self, control_config) paused = control_config.get('paused') if not paused and self.is_ready(): self._start_run() elif paused and not self.is_ready(): - LOGGER.warning('Inconsistent control state for update of %s', self.target_mac) + self.logger.warning('Inconsistent control state for update of %s', self) def reload_config(self): """Trigger a config reload due to an external config change.""" @@ -723,24 +812,29 @@ def reload_config(self): if device_ready: self._loaded_config = new_config config_bundle = self._make_config_bundle(new_config) - LOGGER.info('Device config reloaded: %s %s', device_ready, self.target_mac) + self.logger.info('Device config reloaded: %s %s', device_ready, self) self._record_result(None, run_info=device_ready, config=config_bundle) return new_config def _dev_config_updated(self, dev_config): - LOGGER.info('Device config update: %s %s', self.target_mac, dev_config) + self.logger.info('Device config update: %s %s', self, dev_config) self._write_module_config(dev_config, self._device_base) self.reload_config() def _initialize_config(self): - dev_config = self._load_config({}, self._device_base) + dev_config = self._load_config('base', {}, self._device_base) self._gcp.register_config(self._DEVICE_PATH % self.target_mac, dev_config, self._dev_config_updated) - self._gcp.register_config(self._CONTROL_PATH % self.target_port, - self._make_control_bundle(), - self._control_updated, immediate=True) + if self.target_port: + self._gcp.register_config(self._CONTROL_PATH % self.target_port, + self._make_control_bundle(), + self._control_updated, immediate=True) self._record_result(None, config=self._make_config_bundle()) def _release_config(self): self._gcp.release_config(self._DEVICE_PATH % self.target_mac) - self._gcp.release_config(self._CONTROL_PATH % self.target_port) + if self.target_port: + self._gcp.release_config(self._CONTROL_PATH % self.target_port) + + def __repr__(self): + return str(self.device) + (" on port %d" % self.target_port if self.target_port else "") diff --git a/daq/ipaddr_test.py b/daq/ipaddr_test.py new file mode 100644 index 0000000000..f701404d90 --- /dev/null +++ b/daq/ipaddr_test.py @@ -0,0 +1,88 @@ +"""Test module encapsulating ip-address tests (including DHCP)""" + +from __future__ import absolute_import +import time +import os +import copy +import logger + + +from base_module import HostModule + +LOGGER = logger.get_logger('ipaddr') + + +class IpAddrTest(HostModule): + """Module for inline ipaddr tests""" + + def __init__(self, host, tmpdir, test_name, module_config): + super().__init__(host, tmpdir, test_name, module_config) + self.test_dhcp_ranges = copy.copy(self.test_config.get('dhcp_ranges', [])) + self.log_path = os.path.join(self.tmpdir, 'nodes', self.host_name, 'activate.log') + self.log_file = None + self._ip_callback = None + self.tests = [ + ('dhcp port_toggle test', self._dhcp_port_toggle_test), + ('dhcp multi subnet test', self._multi_subnet_test), + ('ip change test', self._ip_change_test), + ('finalize', self._finalize) + ] + + def start(self, port, params, callback, finish_hook): + """Start the ip-addr tests""" + super().start(port, params, callback, finish_hook) + LOGGER.debug('Target device %s starting ipaddr test %s', self.device, self.test_name) + self.log_file = open(self.log_path, 'w') + self._next_test() + + def _next_test(self): + try: + name, func = self.tests.pop(0) + self.log('Running ' + name) + func() + except Exception as e: + self.log(str(e)) + self._finalize(exception=e) + + def log(self, message): + """Log an activation message""" + LOGGER.info(message) + self.log_file.write(message + '\n') + + def _dhcp_port_toggle_test(self): + if not self.host.connect_port(False): + self.log('disconnect port not enabled') + return + time.sleep(self.host.config.get("port_debounce_sec", 0) + 1) + self.host.connect_port(True) + self._ip_callback = self._next_test + + def _multi_subnet_test(self): + if not self.test_dhcp_ranges: + self._next_test() + return + dhcp_range = self.test_dhcp_ranges.pop(0) + self.log('Testing dhcp range: ' + str(dhcp_range)) + args = (dhcp_range["start"], dhcp_range["end"], dhcp_range["prefix_length"]) + self.host.gateway.change_dhcp_range(*args) + self._ip_callback = self._multi_subnet_test if self.test_dhcp_ranges else self._next_test + + def _ip_change_test(self): + self.host.gateway.request_new_ip(self.host.target_mac) + self._ip_callback = self._next_test + + def _finalize(self, exception=None): + self.terminate() + self.callback(exception=exception) + + def terminate(self): + """Terminate this set of tests""" + self.log('Module terminating') + self.log_file.close() + self.log_file = None + + def ip_listener(self, target_ip): + """Respond to a ip notification event""" + self.log('ip notification %s' % target_ip) + if self._ip_callback: + self._ip_callback() diff --git a/daq/network.py b/daq/network.py index b622205926..255a6bd259 100644 --- a/daq/network.py +++ b/daq/network.py @@ -136,7 +136,7 @@ def _attach_sec_device_links(self): def is_system_port(self, dpid, port): """Check if the dpid/port combo is the system trunk port""" - return dpid == self.topology.PRI_DPID and port == self.topology.PRI_STACK_PORT + return dpid == self.topology.PRI_DPID and port == self.topology.PRI_TRUNK_PORT def is_device_port(self, dpid, port): """Check if the dpid/port combo is for a valid device""" diff --git a/daq/report.py b/daq/report.py index 311c8fe89e..d3ee8e5046 100644 --- a/daq/report.py +++ b/daq/report.py @@ -16,7 +16,6 @@ import gcp import logger - LOGGER = logger.get_logger('report') class ResultType(Enum): @@ -306,6 +305,7 @@ def _get_test_info(self, test_name): return self._module_config.get('tests', {}).get(test_name, {}) def _write_repitems(self): + from host import get_test_config # Deferring import for (test_name, result_dict) in self._repitems.items(): # To not write a module header if there is nothing to report def writeln(line, test_name=test_name): @@ -318,7 +318,8 @@ def writeln(line, test_name=test_name): writeln(self._TEST_SUBHEADER % "Report") self._append_file(result_dict[ResultType.REPORT_PATH]) if ResultType.MODULE_CONFIG in result_dict: - config = result_dict[ResultType.MODULE_CONFIG].get("modules", {}).get(test_name) + module_configs = result_dict[ResultType.MODULE_CONFIG] + config = get_test_config(module_configs, test_name) if config and len(config) > 0: writeln(self._TEST_SUBHEADER % "Module Config") table = MdTable(["Attribute", "Value"]) diff --git a/daq/runner.py b/daq/runner.py index 631aa43526..26b5ab3c54 100644 --- a/daq/runner.py +++ b/daq/runner.py @@ -25,18 +25,102 @@ class PortInfo: """Simple container for device port info""" active = False - flapping_start = 0 - mac = None - host = None - gateway = None - + flapping_start = None + port_no = None + + +class IpInfo: + """Simple container for device ip info""" + ip_addr = None + state = None + delta_sec = None + + +class Device: + """Simple container for device info""" + def __init__(self): + self.mac = None + self.host = None + self.gateway = None + self.group = None + self.port = None + self.dhcp_ready = False + self.ip_info = IpInfo() + self.set_id = None + + def __repr__(self): + return self.mac.replace(":", "") + + +class Devices: + """Container for all devices""" + def __init__(self): + self._devices = {} + self._set_ids = set() + + def new_device(self, mac, port_info=None): + """Adding a new device""" + assert mac not in self._devices, "Device with mac: %s is already added." % mac + device = Device() + device.mac = mac + self._devices[mac] = device + device.port = port_info if port_info else PortInfo() + port_no = device.port.port_no + set_id = port_no if port_no else self._allocate_set_id() + assert set_id not in self._set_ids, "Duplicate device set id %d" % set_id + self._set_ids.add(set_id) + device.set_id = set_id + return device + + def _allocate_set_id(self): + set_id = 1 + while set_id in self._set_ids: + set_id += 1 + return set_id + + def remove(self, device): + """Removing a device""" + assert self.contains(device), "Device %s not found." % device + del self._devices[device.mac] + self._set_ids.remove(device.set_id) + + def get(self, device_mac): + """Get a device using its mac address""" + return self._devices.get(device_mac) + + def get_by_port_info(self, port): + """Get a device using its port info object""" + for device in self._devices.values(): + if device.port == port: + return device + return None + + def get_by_gateway(self, gateway): + """Get devices under specified gateway""" + return [device for device in self._devices.values() if device.gateway == gateway] + + def get_by_group(self, group_name): + """Get devices under a group name""" + return [device for device in self._devices.values() if device.group == group_name] + + def get_all_devices(self): + """Get all devices""" + return list(self._devices.values()) + + def get_triggered_devices(self): + """Get devices with hosts""" + return [device for device in self._devices.values() if device.host] + + def contains(self, device): + """Returns true if the device is expected""" + return self._devices.get(device.mac) == device class DAQRunner: """Main runner class controlling DAQ. Primarily mediates between faucet events, connected hosts (to test), and gcp for logging. This class owns the main event loop and shards out work to subclasses.""" - MAX_GATEWAYS = 10 + MAX_GATEWAYS = 9 _DEFAULT_RETENTION_DAYS = 30 _MODULE_CONFIG = 'module_config.json' _RUNNER_CONFIG_PATH = 'runner/setup' @@ -45,13 +129,11 @@ class owns the main event loop and shards out work to subclasses.""" def __init__(self, config): self.configurator = configurator.Configurator() + self.gateway_sets = set(range(1, self.MAX_GATEWAYS+1)) self.config = config - self._port_info = {} self._result_sets = {} - self._mac_port_map = {} - self._device_groups = {} - self._gateway_sets = {} - self._target_mac_ip = {} + self._devices = Devices() + self._ports = {} self._callback_queue = [] self._callback_lock = threading.Lock() self.gcp = gcp.GcpManager(self.config, self._queue_callback) @@ -65,8 +147,8 @@ def __init__(self, config): self._linger_exit = 0 self.faucet_events = None self.single_shot = config.get('single_shot', False) - self.event_trigger = config.get('event_trigger', False) self.fail_mode = config.get('fail_mode', False) + self.run_trigger_type = config.get('run_trigger_type', 'PORT') self.run_tests = True self.stream_monitor = None self.exception = None @@ -75,30 +157,22 @@ def __init__(self, config): self._default_port_flap_timeout = int(config.get('port_flap_timeout_sec', 0)) self.result_log = self._open_result_log() self._system_active = False - self._dhcp_ready = set() - self._ip_info = {} logging_client = self.gcp.get_logging_client() - self._daq_run_id = uuid.uuid4() + self.daq_run_id = self._init_daq_run_id() if logging_client: logger.set_stackdriver_client(logging_client, - labels={"daq_run_id": str(self._daq_run_id)}) + labels={"daq_run_id": self.daq_run_id}) test_list = self._get_test_list(config.get('host_tests', self._DEFAULT_TESTS_FILE), []) if self.config.get('keep_hold'): LOGGER.info('Appending test_hold to master test list') test_list.append('hold') config['test_list'] = test_list - LOGGER.info('DAQ RUN id: %s' % self._daq_run_id) + LOGGER.info('DAQ RUN id: %s' % self.daq_run_id) LOGGER.info('Configured with tests %s' % ', '.join(config['test_list'])) LOGGER.info('DAQ version %s' % self._daq_version) LOGGER.info('LSB release %s' % self._lsb_release) LOGGER.info('system uname %s' % self._sys_uname) - def _flush_faucet_events(self): - LOGGER.info('Flushing faucet event queue...') - if self.faucet_events: - while self.faucet_events.next_event(): - pass - def _open_result_log(self): return open(self._RESULT_LOG_FILE, 'w') @@ -106,6 +180,12 @@ def _get_states(self): states = connected_host.pre_states() + self.config['test_list'] return states + connected_host.post_states() + def _init_daq_run_id(self): + daq_run_id = str(uuid.uuid4()) + with open('inst/daq_run_id.txt', 'w') as output_stream: + output_stream.write(daq_run_id + '\n') + return daq_run_id + def _send_heartbeat(self): message = { 'name': 'status', @@ -123,7 +203,7 @@ def get_run_info(self): 'version': self._daq_version, 'lsb': self._lsb_release, 'uname': self._sys_uname, - 'daq_run_id': str(self._daq_run_id) + 'daq_run_id': self.daq_run_id } data_retention_days = self.config.get('run_data_retention_days', self._DEFAULT_RETENTION_DAYS) @@ -182,9 +262,12 @@ def _handle_faucet_events(self): LOGGER.debug('port_state: %s %s', dpid, port) self._handle_port_state(dpid, port, active) return - (dpid, port, target_mac) = self.faucet_events.as_port_learn(event) - if dpid and port: - self._handle_port_learn(dpid, port, target_mac) + (dpid, port, target_mac, vid) = self.faucet_events.as_port_learn(event) + if dpid and port and vid: + if self.run_trigger_type == "PORT": + self._handle_port_learn(dpid, port, vid, target_mac) + elif self.run_trigger_type == "VLAN" and self.network.is_system_port(dpid, port): + self._handle_device_learn(vid, target_mac) return (dpid, restart_type) = self.faucet_events.as_config_change(event) if dpid is not None: @@ -203,44 +286,57 @@ def _handle_port_state(self, dpid, port, active): LOGGER.debug('Unknown port %s on dpid %s is active %s', port, dpid, active) return - if port not in self._port_info: - self._port_info[port] = PortInfo() + if port not in self._ports: + self._ports[port] = PortInfo() + self._ports[port].port_no = port - if active != self._port_info[port].active: + if active != self._ports[port].active: LOGGER.info('Port %s dpid %s is now %s', port, dpid, "active" if active else "inactive") if active: self._activate_port(port) else: - port_info = self._port_info[port] - if port_info.host and not port_info.flapping_start: + device = self._devices.get_by_port_info(self._ports[port]) + port_info = self._ports[port] + if device and device.host and not port_info.flapping_start: port_info.flapping_start = time.time() if port_info.active: - if port_info.mac and not port_info.flapping_start: - self._direct_port_traffic(port_info.mac, port, None) + if device and not port_info.flapping_start: + self._direct_port_traffic(device.mac, port, None) self._deactivate_port(port) self._send_heartbeat() def _activate_port(self, port): - port_info = self._port_info[port] + port_info = self._ports[port] port_info.flapping_start = 0 port_info.active = True def _deactivate_port(self, port): - port_info = self._port_info[port] + port_info = self._ports[port] port_info.active = False def _direct_port_traffic(self, mac, port, target): self.network.direct_port_traffic(mac, port, target) - def _handle_port_learn(self, dpid, port, target_mac): + def _handle_port_learn(self, dpid, port, vid, target_mac): if self.network.is_device_port(dpid, port): LOGGER.info('Port %s dpid %s learned %s', port, dpid, target_mac) - self._mac_port_map[target_mac] = port - self._port_info[port].mac = target_mac - self._target_set_trigger(port) + if port not in self._ports: + self._ports[port] = PortInfo() + self._ports[port].port_no = port + if not self._devices.get(target_mac): + self._devices.new_device(target_mac, port_info=self._ports[port]) + self._target_set_trigger(self._devices.get(target_mac)) else: LOGGER.debug('Port %s dpid %s learned %s (ignored)', port, dpid, target_mac) + def _handle_device_learn(self, vid, target_mac): + LOGGER.info('%s learned on vid %s', target_mac, vid) + if not self._devices.get(target_mac): + device = self._devices.new_device(target_mac) + else: + device = self._devices.get(target_mac) + self._target_set_trigger(device) + def _queue_callback(self, callback): with self._callback_lock: LOGGER.debug('Register callback') @@ -259,21 +355,19 @@ def _handle_system_idle(self): # Some synthetic faucet events don't come in on the socket, so process them here. self._handle_faucet_events() all_idle = True - for target_port, target_host in self._get_port_hosts(): + for device in self._devices.get_triggered_devices(): try: - if target_host.is_running(): + if device.host.is_running(): all_idle = False - target_host.idle_handler() + device.host.idle_handler() else: - self.target_set_complete(target_port, 'target set not active') + self.target_set_complete(device, 'target set not active') except Exception as e: - self.target_set_error(target_host.target_port, e) - if not self.event_trigger: - for target_port, port_info in self._port_info.items(): - if port_info.active and port_info.mac: - self._target_set_trigger(target_port) - all_idle = False - if not self._get_running_ports() and not self.run_tests: + self.target_set_error(device, e) + for device in self._devices.get_all_devices(): + self._target_set_trigger(device) + all_idle = False + if not self._devices.get_triggered_devices() and not self.run_tests: if self.faucet_events and not self._linger_exit: self.shutdown() if self._linger_exit == 1: @@ -281,20 +375,19 @@ def _handle_system_idle(self): LOGGER.warning('Result linger on exit.') all_idle = False if all_idle: - LOGGER.debug('No active device ports, waiting for trigger event...') + LOGGER.debug('No active device, waiting for trigger event...') def _reap_stale_ports(self): - for port, port_info in copy.copy(self._port_info).items(): - if not port_info.flapping_start or not port_info.host: + for device in self._devices.get_triggered_devices(): + if not device.port.flapping_start: continue - host = port_info.host - timeout_sec = host.get_port_flap_timeout(host.test_name) + timeout_sec = device.host.get_port_flap_timeout(device.host.test_name) if timeout_sec is None: timeout_sec = self._default_port_flap_timeout - if (port_info.flapping_start + timeout_sec) <= time.time(): + if (device.port.flapping_start + timeout_sec) <= time.time(): exception = DaqException('port not active for %ds' % timeout_sec) - self.target_set_error(port, exception) - port_info.flapping_start = 0 + self.target_set_error(device, exception) + device.port.flapping_start = 0 def shutdown(self): """Shutdown this runner by closing all active components""" @@ -308,16 +401,16 @@ def shutdown(self): def _loop_hook(self): self._handle_queued_events() - states = {p: h.state for p, h in self._get_port_hosts()} + states = {device.mac: device.host.state for device in self._devices.get_triggered_devices()} LOGGER.debug('Active target sets/state: %s', states) def _terminate(self): - for target_port in self._get_running_ports(): - self.target_set_error(target_port, DaqException('terminated')) + for device in self._devices.get_triggered_devices(): + self.target_set_error(device, DaqException('terminated')) def _module_heartbeat(self): # Should probably be converted to a separate thread to timeout any blocking fn calls - _ = [host.heartbeat() for _, host in self._get_port_hosts()] + _ = [device.host.heartbeat() for device in self._devices.get_triggered_devices()] def main_loop(self): """Run main loop to execute tests""" @@ -329,8 +422,6 @@ def main_loop(self): self.stream_monitor = monitor self.monitor_stream('faucet', self.faucet_events.sock, self._handle_faucet_events, priority=10) - if self.event_trigger: - self._flush_faucet_events() LOGGER.info('Entering main event loop.') LOGGER.info('See docs/troubleshooting.md if this blocks for more than a few minutes.') while self.stream_monitor.event_loop(): @@ -351,64 +442,62 @@ def main_loop(self): self._terminate() - def _target_set_trigger(self, target_port): - target_active = target_port in self._port_info and self._port_info[target_port].active - assert target_active, 'Target port %d not active' % target_port - - target_mac = self._port_info[target_port].mac - assert target_mac, 'Target port %d triggered but not learned' % target_port + def _target_set_trigger(self, device): + assert self._devices.contains(device), 'Target device %s is not expected' % device.mac + port_trigger = device.port.port_no is not None + if port_trigger: + assert device.port.active, 'Target port %d is not active' % device.port.port_no if not self._system_active: - LOGGER.warning('Target port %d ignored, system not active', target_port) + LOGGER.warning('Target device %s ignored, system is not active', device.mac) return False - if self._port_info[target_port].host: - LOGGER.debug('Target port %d already triggered', target_port) + if device.host: + LOGGER.debug('Target device %s already triggered', device.mac) return False if not self.run_tests: - LOGGER.debug('Target port %d trigger suppressed', target_port) + LOGGER.debug('Target device %s trigger suppressed', device.mac) return False try: - group_name = self.network.device_group_for(target_mac) - gateway = self._activate_device_group(group_name, target_port) + group_name = self.network.device_group_for(device.mac) + device.group = group_name + gateway = self._activate_device_group(device) if gateway.activated: - LOGGER.debug('Target port %d trigger ignored b/c activated gateway', target_port) + LOGGER.debug('Target device %s trigger ignored b/c activated gateway', device.mac) return False except Exception as e: - LOGGER.error('Target port %d target trigger error %s', target_port, str(e)) + LOGGER.error('Target device %s target trigger error %s', device.mac, str(e)) if self.fail_mode: LOGGER.warning('Suppressing further tests due to failure.') self.run_tests = False return False - target = { - 'port': target_port, - 'group': group_name, - 'fake': gateway.fake_target, - 'port_set': gateway.port_set, - 'mac': target_mac - } - # Stops all DHCP response initially # Selectively enables dhcp response at ipaddr stage based on dhcp mode - gateway.execute_script('change_dhcp_response_time', target_mac, -1) - gateway.attach_target(target_port, target) - + gateway.stop_dhcp_response(device.mac) + gateway.attach_target(device) + device.gateway = gateway try: self.run_count += 1 - new_host = connected_host.ConnectedHost(self, gateway, target, self.config) - self._port_info[target_port].host = new_host - self._port_info[target_port].gateway = gateway - LOGGER.info('Target port %d registered %s', target_port, target_mac) + new_host = connected_host.ConnectedHost(self, device, self.config) + device.host = new_host new_host.register_dhcp_ready_listener(self._dhcp_ready_listener) new_host.initialize() - self._direct_port_traffic(target_mac, target_port, target) + if port_trigger: + target = { + 'port': device.port.port_no, + 'group': group_name, + 'fake': gateway.fake_target, + 'port_set': gateway.port_set, + 'mac': device.mac + } + self._direct_port_traffic(device.mac, device.port.port_no, target) return True except Exception as e: - self.target_set_error(target_port, e) + self.target_set_error(device, e) def _get_test_list(self, test_file, test_list): no_test = self.config.get('no_test', False) @@ -438,127 +527,101 @@ def _get_test_list(self, test_file, test_list): line = file.readline() return test_list - def allocate_test_port(self, target_port): - """Get the test port for the given target_port""" - gateway = self._port_info[target_port].gateway - return gateway.allocate_test_port() - - def release_test_port(self, target_port, test_port): - """Release the given test port""" - gateway = self._port_info[target_port].gateway - return gateway.release_test_port(test_port) - - def _activate_device_group(self, group_name, target_port): - if group_name in self._device_groups: - existing = self._device_groups[group_name] - LOGGER.debug('Gateway for existing device group %s is %s', group_name, existing.name) + def _activate_device_group(self, device): + group_name = device.group + group_devices = self._devices.get_by_group(group_name) + existing_gateways = {device.gateway for device in group_devices if device.gateway} + if existing_gateways: + existing = existing_gateways.pop() + LOGGER.info('Gateway for existing device group %s is %s', group_name, existing) return existing - set_num = self._find_gateway_set(target_port) + + set_num = self._find_gateway_set(device) LOGGER.info('Gateway for device group %s not found, initializing base %d...', - group_name, set_num) + device.group, set_num) gateway = gateway_manager.Gateway(self, group_name, set_num, self.network) - self._gateway_sets[set_num] = group_name - self._device_groups[group_name] = gateway try: gateway.initialize() except Exception: LOGGER.error('Cleaning up from failed gateway initialization') - LOGGER.debug('Clearing target %s gateway group %s for %s', - target_port, set_num, group_name) - del self._gateway_sets[set_num] - del self._device_groups[group_name] + LOGGER.debug('Clearing %s gateway group %s for %s', + device, set_num, group_name) + self.gateway_sets.add(set_num) raise return gateway - def ip_notify(self, state, target, gateway_set, exception=None): + def ip_notify(self, state, target, gateway, exception=None): """Handle a DHCP / Static IP notification""" if exception: assert not target, 'unexpected exception with target' - LOGGER.error('IP exception for gw%02d: %s', gateway_set, exception) + LOGGER.error('IP exception for %s: %s', gateway, exception) LOGGER.exception(exception) - self._terminate_gateway_set(gateway_set) + self._terminate_gateway_set(gateway) return target_mac, target_ip, delta_sec = target['mac'], target['ip'], target['delta'] - LOGGER.info('IP notify %s is %s on gw%02d (%s/%d)', target_mac, - target_ip, gateway_set, state, delta_sec) + LOGGER.info('IP notify %s is %s on %s (%s/%d)', target_mac, + target_ip, gateway, state, delta_sec) if not target_mac: LOGGER.warning('IP target mac missing') return - self._target_mac_ip[target_mac] = target_ip - host = self._get_host_from_mac(target_mac) - if host: - self._ip_info[host] = (state, target, gateway_set) - host.ip_notify(target_ip, state, delta_sec) - self._check_and_activate_gateway(host) - - def _get_host_from_mac(self, mac): - if mac not in self._mac_port_map: - return None - return self._port_info[self._mac_port_map[mac]].host - - def _get_port_hosts(self): - return list({p: i.host for p, i in self._port_info.items() if i.host}.items()) - - def _get_running_ports(self): - return [p for p, i in self._port_info.items() if i.host] + device = self._devices.get(target_mac) + device.ip_info.ip_addr = target_ip + device.ip_info.state = state + device.ip_info.delta_sec = delta_sec + if device and device.host: + device.host.ip_notify(target_ip, state, delta_sec) + self._check_and_activate_gateway(device) def _get_active_ports(self): - return [p for p, i in self._port_info.items() if i.active] + return [p.port_no for p in self._ports.values() if p.active] - def _check_and_activate_gateway(self, host): + def _check_and_activate_gateway(self, device): # Host ready to be activated and DHCP happened / Static IP - if host not in self._ip_info or host not in self._dhcp_ready: + ip_info = device.ip_info + if not ip_info.ip_addr or not device.dhcp_ready: return - state, target, gateway_set = self._ip_info[host] - target_mac, target_ip, delta_sec = target['mac'], target['ip'], target['delta'] - (gateway, ready_devices) = self._should_activate_target(target_mac, target_ip, gateway_set) + (gateway, ready_devices) = self._should_activate_target(device) if not ready_devices: return - if ready_devices is True: - self._get_host_from_mac(target_mac).trigger(state, target_ip=target_ip, - delta_sec=delta_sec) + device.host.trigger(ip_info.state, target_ip=ip_info.ip_addr, + delta_sec=ip_info.delta_sec) else: - self._activate_gateway(state, gateway, ready_devices, delta_sec) + self._activate_gateway(ip_info.state, gateway, ready_devices, ip_info.delta_sec) - def _dhcp_ready_listener(self, host): - self._dhcp_ready.add(host) - self._check_and_activate_gateway(host) + def _dhcp_ready_listener(self, device): + device.dhcp_ready = True + self._check_and_activate_gateway(device) def _activate_gateway(self, state, gateway, ready_devices, delta_sec): gateway.activate() if len(ready_devices) > 1: state = 'group' delta_sec = -1 - for ready_mac in ready_devices: - LOGGER.info('IP activating target %s', ready_mac) - ready_host = self._get_host_from_mac(ready_mac) - ready_ip = self._target_mac_ip[ready_mac] - triggered = ready_host.trigger(state, target_ip=ready_ip, delta_sec=delta_sec) - assert triggered, 'host %s not triggered' % ready_mac - - def _should_activate_target(self, target_mac, target_ip, gateway_set): - target_host = self._get_host_from_mac(target_mac) - if not target_host: - LOGGER.warning('DHCP targets missing %s', target_mac) + for device in ready_devices: + LOGGER.info('IP activating target %s', device) + target_ip, delta_sec = device.ip_info.ip_addr, device.ip_info.delta_sec + triggered = device.host.trigger(state, target_ip=target_ip, delta_sec=delta_sec) + assert triggered, 'Device %s not triggered' % device + + def _should_activate_target(self, device): + if not device.host: + LOGGER.warning('DHCP targets missing %s', device) return False, False - - group_name = self._gateway_sets[gateway_set] - gateway = self._device_groups[group_name] - + gateway, group_name = device.gateway, device.group if gateway.activated: LOGGER.info('DHCP activation group %s already activated', group_name) return gateway, True - if not target_host.notify_activate(): - LOGGER.info('DHCP device %s ignoring spurious notify', target_mac) + if not device.host.notify_activate(): + LOGGER.info('DHCP device %s ignoring spurious notify', device) return gateway, False - ready_devices = gateway.target_ready(target_mac) + ready_devices = gateway.target_ready(device) group_size = self.network.device_group_size(group_name) remaining = group_size - len(ready_devices) @@ -566,30 +629,27 @@ def _should_activate_target(self, target_mac, target_ip, gateway_set): LOGGER.info('DHCP waiting for %d additional members of group %s', remaining, group_name) return gateway, False - hosts = map(self._get_host_from_mac, ready_devices) - ready_trigger = all(map(lambda host: host.trigger_ready(), hosts)) + ready_trigger = all(map(lambda host: device.host.trigger_ready(), ready_devices)) if not ready_trigger: LOGGER.info('DHCP device group %s not ready to trigger', group_name) return gateway, False return gateway, ready_devices - def _terminate_gateway_set(self, gateway_set): - assert gateway_set in self._gateway_sets, 'Gateway set %s not found' - group_name = self._gateway_sets[gateway_set] - gateway = self._device_groups[group_name] - ports = [target['port'] for target in gateway.get_targets()] - LOGGER.info('Terminating gateway group %s set %s, ports %s', group_name, gateway_set, ports) - for target_port in ports: - self.target_set_error(target_port, DaqException('terminated')) - - def _find_gateway_set(self, target_port): - if target_port not in self._gateway_sets: - return target_port - for entry in range(1, self.MAX_GATEWAYS): - if entry not in self._gateway_sets: - return entry - raise Exception('Could not allocate open gateway set') + def _terminate_gateway_set(self, gateway): + gateway_devices = self._devices.get_by_gateway(gateway) + assert gateway_devices, '%s not found' % gateway + LOGGER.info('Terminating %s', gateway) + for device in gateway_devices: + self.target_set_error(device, DaqException('terminated')) + + def _find_gateway_set(self, device): + if not self.gateway_sets: + raise Exception('Could not allocate open gateway set') + if device.port.port_no in self.gateway_sets: + self.gateway_sets.remove(device.port.port_no) + return device.port.port_no + return self.gateway_sets.pop() @staticmethod def ping_test(src, dst, src_addr=None): @@ -608,36 +668,34 @@ def ping_test(src, dst, src_addr=None): LOGGER.info('Test ping failure: %s', e) return False - def target_set_error(self, target_port, exception): - """Handle an error in the target port set""" - running = bool(target_port in self._port_info and self._port_info[target_port].host) - LOGGER.error('Target port %d running %s exception: %s', target_port, running, exception) + def target_set_error(self, device, exception): + """Handle an error in the target set""" + running = bool(device.host) + LOGGER.error('Target device %s running %s exception: %s', device, running, exception) LOGGER.exception(exception) if running: - target_host = self._port_info[target_port].host - target_host.record_result(target_host.test_name, exception=exception) - self.target_set_complete(target_port, str(exception)) + device.host.record_result(device.host.test_name, exception=exception) + self.target_set_complete(device, str(exception)) else: stack = ''.join( traceback.format_exception(etype=type(exception), value=exception, tb=exception.__traceback__)) - self._target_set_finalize(target_port, + self._target_set_finalize(device, {'exception': {'exception': str(exception), 'traceback': stack}}, str(exception)) - self._detach_gateway(target_port) + self._detach_gateway(device) - def target_set_complete(self, target_port, reason): + def target_set_complete(self, device, reason): """Handle completion of a target_set""" - target_host = self._port_info[target_port].host - self._target_set_finalize(target_port, target_host.results, reason) - self._target_set_cancel(target_port) + self._target_set_finalize(device, device.host.results, reason) + self._target_set_cancel(device) - def _target_set_finalize(self, target_port, result_set, reason): - results = self._combine_result_set(target_port, result_set) - LOGGER.info('Target port %d finalize: %s (%s)', target_port, results, reason) + def _target_set_finalize(self, device, result_set, reason): + results = self._combine_result_set(device, result_set) + LOGGER.info('Target device %s finalize: %s (%s)', device, results, reason) if self.result_log: - self.result_log.write('%02d: %s\n' % (target_port, results)) + self.result_log.write('%s: %s\n' % (device, results)) self.result_log.flush() suppress_tests = self.fail_mode or self.result_linger @@ -646,49 +704,48 @@ def _target_set_finalize(self, target_port, result_set, reason): self.run_tests = False if self.result_linger: self._linger_exit = 1 - self._result_sets[target_port] = result_set + self._result_sets[device] = result_set - def _target_set_cancel(self, target_port): - target_host = self._port_info[target_port].host + def _target_set_cancel(self, device): + target_host = device.host if target_host: - self._port_info[target_port].host = None - target_mac = self._port_info[target_port].mac - del self._mac_port_map[target_mac] - target_gateway = self._port_info[target_port].gateway - LOGGER.info('Target port %d cancel %s (#%d/%s).', - target_port, target_mac, self.run_count, self.run_limit) - results = self._combine_result_set(target_port, self._result_sets.get(target_port)) + device.host = None + target_gateway = device.gateway + target_port = device.port.port_no + LOGGER.info('Target device %s cancel (#%d/%s).', device.mac, self.run_count, + self.run_limit) + + results = self._combine_result_set(device, self._result_sets.get(device)) this_result_linger = results and self.result_linger target_gateway_linger = target_gateway and target_gateway.result_linger if target_gateway_linger or this_result_linger: - LOGGER.warning('Target port %d result_linger: %s', target_port, results) - self._activate_port(target_port) + LOGGER.warning('Target device %s result_linger: %s', device.mac, results) + if target_port: + self._activate_port(target_port) target_gateway.result_linger = True else: - self._direct_port_traffic(target_mac, target_port, None) + if target_port: + self._direct_port_traffic(device.mac, target_port, None) target_host.terminate('_target_set_cancel', trigger=False) if target_gateway: - self._detach_gateway(target_port) + self._detach_gateway(device) if self.run_limit and self.run_count >= self.run_limit and self.run_tests: LOGGER.warning('Suppressing future tests because run limit reached.') self.run_tests = False if self.single_shot and self.run_tests: LOGGER.warning('Suppressing future tests because test done in single shot.') self.run_tests = False - LOGGER.info('Remaining target sets: %s', self._get_running_ports()) + self._devices.remove(device) + LOGGER.info('Remaining target sets: %s', self._devices.get_triggered_devices()) - def _detach_gateway(self, target_port): - target_gateway = self._port_info[target_port].gateway + def _detach_gateway(self, device): + target_gateway = device.gateway if not target_gateway: return - self._port_info[target_port].gateway = None - target_mac = self._port_info[target_port].mac - if not target_gateway.detach_target(target_port): - LOGGER.info('Retiring target gateway %s, %s, %s, %s', - target_port, target_mac, target_gateway.name, target_gateway.port_set) - group_name = self.network.device_group_for(target_mac) - del self._device_groups[group_name] - del self._gateway_sets[target_gateway.port_set] + device.gateway = None + if not target_gateway.detach_target(device): + LOGGER.info('Retiring %s. Last device: %s', target_gateway, device) + self.gateway_sets.add(target_gateway.port_set) target_gateway.terminate() def monitor_stream(self, *args, **kwargs): @@ -720,7 +777,7 @@ def _combine_result_set(self, set_key, result_sets): exp_msg = result.get('exception') status = exp_msg if exp_msg else code if name != 'fail' else not code if status != 0: - results.append('%02d:%s:%s' % (set_key, name, status)) + results.append('%s:%s:%s' % (set_key, name, status)) return results def finalize(self): @@ -742,12 +799,15 @@ def _base_config_changed(self, new_config): self._MODULE_CONFIG) self._base_config = self._load_base_config(register=False) self._publish_runner_config(self._base_config) - _ = [host.reload_config() for _, host in self._get_port_hosts()] + _ = [device.host.reload_config() for device in self._devices.get_triggered_devices()] def _load_base_config(self, register=True): - base = self.configurator.load_and_merge({}, self.config.get('base_conf')) - site_config = self.configurator.load_config(self.config.get('site_path'), - self._MODULE_CONFIG, optional=True) + base_conf = self.config.get('base_conf') + LOGGER.info('Loading base module config from %s', base_conf) + base = self.configurator.load_and_merge({}, base_conf) + site_path = self.config.get('site_path') + LOGGER.info('Loading site module config from %s', base_conf) + site_config = self.configurator.load_config(site_path, self._MODULE_CONFIG, optional=True) if register: self.gcp.register_config(self._RUNNER_CONFIG_PATH, site_config, self._base_config_changed) diff --git a/daq/topology.py b/daq/topology.py index 2438b49012..2cda43ebe4 100644 --- a/daq/topology.py +++ b/daq/topology.py @@ -31,13 +31,14 @@ class FaucetTopology: INCOMING_ACL_FORMAT = "dp_%s_incoming_acl" PORTSET_ACL_FORMAT = "dp_%s_portset_%d_acl" LOCAL_ACL_FORMAT = "dp_%s_local_acl" - _DEFAULT_STACK_PORT_NAME = "stack_sec" + _DEFAULT_SEC_TRUNK_NAME = "trunk_sec" _MIRROR_IFACE_FORMAT = "mirror-%d" _MIRROR_PORT_BASE = 1000 _SWITCH_LOCAL_PORT = _MIRROR_PORT_BASE _VLAN_BASE = 1000 PRI_DPID = 1 - PRI_STACK_PORT = 1 + PRI_TRUNK_PORT = 1 + PRI_TRUNK_NAME = 'trunk_pri' _NO_VLAN = "0x0000/0x1000" def __init__(self, config): @@ -92,7 +93,7 @@ def get_sec_dpid(self): return self.sec_dpid def get_sec_port(self): - """Return the secondary stacking port""" + """Return the secondary trunk port""" return self.sec_port def get_device_intfs(self): @@ -173,20 +174,23 @@ def _update_port_vlan(self, port_no, port_set): def _port_set_vlan(self, port_set=None): return self._VLAN_BASE + (port_set if port_set else 0) - def _make_pri_stack_interface(self): + def _make_pri_trunk_interface(self): interface = {} interface['acl_in'] = self.INCOMING_ACL_FORMAT % self.pri_name - interface['stack'] = {'dp': self.sec_name, 'port': self.sec_port} - interface['name'] = 'stack_pri' + interface['tagged_vlans'] = self._vlan_tags() + interface['name'] = self.PRI_TRUNK_NAME return interface - def _make_sec_stack_interface(self): + def _make_sec_trunk_interface(self): interface = {} interface['acl_in'] = self.INCOMING_ACL_FORMAT % self.sec_name - interface['stack'] = {'dp': self.pri_name, 'port': self.PRI_STACK_PORT} - interface['name'] = self.get_ext_intf() or self._DEFAULT_STACK_PORT_NAME + interface['tagged_vlans'] = self._vlan_tags() + interface['name'] = self.get_ext_intf() or self._DEFAULT_SEC_TRUNK_NAME return interface + def _vlan_tags(self): + return list(range(self._VLAN_BASE, self._VLAN_BASE + self.sec_port)) + def _make_default_acl_rules(self): rules = [] if not self._append_acl_template(rules, 'raw'): @@ -201,7 +205,7 @@ def _make_sec_port_interface(self, port_no): def _make_pri_interfaces(self): interfaces = {} - interfaces[self.PRI_STACK_PORT] = self._make_pri_stack_interface() + interfaces[self.PRI_TRUNK_PORT] = self._make_pri_trunk_interface() for port_set in range(1, self.sec_port): for port in self._get_gw_ports(port_set): interfaces[port] = self._make_gw_interface(port_set) @@ -212,7 +216,7 @@ def _make_pri_interfaces(self): def _make_sec_interfaces(self): interfaces = {} - interfaces[self.sec_port] = self._make_sec_stack_interface() + interfaces[self.sec_port] = self._make_sec_trunk_interface() for port in range(1, self.sec_port): interfaces[port] = self._make_sec_port_interface(port) return interfaces @@ -227,23 +231,24 @@ def _make_acl_include(self): def _make_pri_topology(self): pri_dp = {} pri_dp['dp_id'] = self.PRI_DPID - pri_dp['name'] = self.pri_name - pri_dp['stack'] = {'priority':1} pri_dp['interfaces'] = self._make_pri_interfaces() return pri_dp def _make_sec_topology(self): sec_dp = {} sec_dp['dp_id'] = self.sec_dpid - sec_dp['name'] = self.sec_name sec_dp['interfaces'] = self._make_sec_interfaces() return sec_dp + def _has_sec_switch(self): + return self.sec_dpid and self.sec_port + def _make_base_network_topology(self): assert self.pri, 'pri dataplane not configured' dps = {} dps['pri'] = self._make_pri_topology() - dps['sec'] = self._make_sec_topology() + if self._has_sec_switch(): + dps['sec'] = self._make_sec_topology() topology = {} topology['dps'] = dps topology['vlans'] = self._make_vlan_description(10) diff --git a/daq/utils.py b/daq/utils.py index 1bc3a75c1d..6bb093d484 100644 --- a/daq/utils.py +++ b/daq/utils.py @@ -1,8 +1,7 @@ """Utility functions for DAQ""" -import yaml - from google.protobuf import json_format +import yaml def yaml_proto(file_name, proto_func): diff --git a/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml b/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml new file mode 100644 index 0000000000..8b57f4527a --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml @@ -0,0 +1,6 @@ + + + + + \ No newline at end of file diff --git a/validator/.idea/gradle.xml b/docker/include/bacnet/bacnetFaux/.idea/gradle.xml similarity index 74% rename from validator/.idea/gradle.xml rename to docker/include/bacnet/bacnetFaux/.idea/gradle.xml index 854749173b..d50e06cdb1 100644 --- a/validator/.idea/gradle.xml +++ b/docker/include/bacnet/bacnetFaux/.idea/gradle.xml @@ -1,9 +1,9 @@ - - \ No newline at end of file + diff --git a/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml b/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml new file mode 100644 index 0000000000..fc50401e9e --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/pubber/.idea/misc.xml b/docker/include/bacnet/bacnetFaux/.idea/misc.xml similarity index 59% rename from pubber/.idea/misc.xml rename to docker/include/bacnet/bacnetFaux/.idea/misc.xml index 012255a52d..bc8d0a3a63 100644 --- a/pubber/.idea/misc.xml +++ b/docker/include/bacnet/bacnetFaux/.idea/misc.xml @@ -1,6 +1,7 @@ - + + \ No newline at end of file diff --git a/docker/include/bacnet/bacnetFaux/build.gradle b/docker/include/bacnet/bacnetFaux/build.gradle new file mode 100644 index 0000000000..85657774fd --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/build.gradle @@ -0,0 +1,41 @@ +plugins { + id 'java' +} + +group 'bacnetFaux' +version '1.0-SNAPSHOT' + +sourceCompatibility = 1.8 + +repositories { + mavenCentral() +} + +dependencies { + testCompile group: 'junit', name: 'junit', version: '4.13' + implementation fileTree(dir: 'libs', include: ['*.jar']) + implementation 'com.googlecode.json-simple:json-simple:1.1.1' +} + +jar { + manifest { + attributes 'Main-Class': 'Main' + } +} + +task fatJar(type: Jar) { + manifest.from jar.manifest + classifier = 'all' + from { + configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) } + } { + exclude "META-INF/*.SF" + exclude "META-INF/*.DSA" + exclude "META-INF/*.RSA" + } + with jar +} + +artifacts { + archives fatJar +} diff --git a/subset/security/security_passwords/gradle/wrapper/gradle-wrapper.jar b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.jar old mode 100755 new mode 100644 similarity index 100% rename from subset/security/security_passwords/gradle/wrapper/gradle-wrapper.jar rename to docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.jar diff --git a/validator/gradle/wrapper/gradle-wrapper.properties b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties similarity index 92% rename from validator/gradle/wrapper/gradle-wrapper.properties rename to docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties index 9e6fcc10e9..12d38de6a4 100644 --- a/validator/gradle/wrapper/gradle-wrapper.properties +++ b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip zipStoreBase=GRADLE_USER_HOME +zipStorePath=wrapper/dists diff --git a/subset/connection/mac_oui/gradlew b/docker/include/bacnet/bacnetFaux/gradlew similarity index 100% rename from subset/connection/mac_oui/gradlew rename to docker/include/bacnet/bacnetFaux/gradlew diff --git a/subset/security/security_passwords/gradlew.bat b/docker/include/bacnet/bacnetFaux/gradlew.bat old mode 100755 new mode 100644 similarity index 100% rename from subset/security/security_passwords/gradlew.bat rename to docker/include/bacnet/bacnetFaux/gradlew.bat diff --git a/docker/include/bacnet/bacnetFaux/settings.gradle b/docker/include/bacnet/bacnetFaux/settings.gradle new file mode 100644 index 0000000000..600831e41d --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/settings.gradle @@ -0,0 +1,2 @@ +rootProject.name = 'bacnet' + diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Analog.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Analog.java similarity index 100% rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Analog.java rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Analog.java diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Binary.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Binary.java similarity index 100% rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Binary.java rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Binary.java diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/EntryPoint.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/EntryPoint.java similarity index 100% rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/EntryPoint.java rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/EntryPoint.java diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/JSON.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/JSON.java similarity index 100% rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/JSON.java rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/JSON.java diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/helper/Device.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/Device.java similarity index 100% rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/helper/Device.java rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/Device.java diff --git a/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java new file mode 100644 index 0000000000..35ef1bc27c --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java @@ -0,0 +1,74 @@ +package helper; + +import java.io.File; + +public class FileManager { + + private String filePath = ""; + private String csvName = "pics"; + private String csvExtension = ".csv"; + private boolean debug = false; + + /** + * Checks if pics.csv exists. + * @return if pics.csv exists + */ + public boolean checkDevicePicCSV() { + String csvFolder = getCSVPath(); + try { + File[] listFiles = new File(csvFolder).listFiles(); + for (int i = 0; i < listFiles.length; i++) { + if (listFiles[i].isFile()) { + String fileName = listFiles[i].getName(); + if (fileName.contains(csvName) + && fileName.endsWith(csvExtension)) { + System.out.println("pics.csv file found in " + csvFolder); + setFilePath(fileName); + return true; + } + } + } + String errorMessage = "pics.csv not found.\n"; + System.err.println(errorMessage); + } catch (Exception e) { + System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder); + } + return false; + } + + private void setFilePath(String fileName) { + String absolutePath = getCSVPath(); + this.filePath = absolutePath + "/" + fileName; + } + + public String getFilePath() { + return this.filePath; + } + + /** + * Returns absolute path to the working directory. + */ + public String getAbsolutePath() { + String absolutePath = ""; + String systemPath = System.getProperty("user.dir"); + System.out.println("system_path: " + systemPath); + String[] pathArr = systemPath.split("/"); + for (int count = 0; count < pathArr.length; count++) { + if (pathArr[count].equals("bacnetTests")) { + break; + } + absolutePath += pathArr[count] + "/"; + } + return absolutePath; + } + + /** + * Returns directory pics.csv is located within test container + */ + public String getCSVPath() { + if (debug) { + return "src/main/resources"; + } + return "/config/type"; + } +} diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json new file mode 100644 index 0000000000..29324d53d3 --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json @@ -0,0 +1,184 @@ +[ + { + "DeviceID": "1234", + "AnalogInput01": { + "Object_Name": "device_run_command", + "Device_Type": "0to1Volts", + "Deadband": "21f", + "Resolution": "0.3f", + "Event_Enable": "true true false", + "Event_State": "0", + "Object_Type": "0", + "Time_Delay_Normal": "0", + "Low_Limit": "0", + "Limit_Enable": "false false", + "Cov_Increment": "1.0f", + "Status_Flags": "false false false false", + "Update_Interval": "1001", + "Acked_Transitions": "true true true", + "High_Limit": "0", + "Notify_Type": "0", + "Event_Detection_Enable": "false", + "Min_Pres_Value": "103.78f", + "Max_Pres_Value": "145.89f", + "Reliability": "4", + "Event_Message_Texts": "true", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Units": "64", + "Profile_Name": "FD-01", + "Out_Of_Service": "false" + } + }, + { + "AnalogOutput01": { + "Event_State": "0", + "Time_Delay_Normal": "0", + "Reliability": "4", + "Resolution": "0.3f", + "Event_Algorithm_Inhibit": "false", + "Out_Of_Service": "false", + "Status_Flags": "false false false false", + "Object_Type": "1", + "Description": "Faux-Device created to run inside DAQ", + "Low_Limit": "0", + "Deadband": "22f", + "Cov_Increment": "1.0f", + "Limit_Enable": "false false", + "Object_Name": "fan_run_speed_percentage_command", + "Notification_Class": "3", + "Units": "62", + "Notify_Type": "0", + "Device_Type": "0to1Volts", + "Event_Enable": "true true false", + "Event_Detection_Enable": "false", + "Event_Message_Texts": "true", + "High_Limit": "0", + "Acked_Transitions": "true true true", + "Priority_Array": "true", + "Relinquish_Default": "0.1" + } + }, + { + "AnalogValue01": { + "Object_Name":"chilled_water_valve_percentage_setpoint", + "Present_Value":"5.1f", + "Deadband": "21.0f", + "Out_Of_Service": "false", + "Event_Message_Texts": "true", + "Event_State": "1", + "Time_Delay_Normal": "1", + "Low_Limit": "0", + "Object_Type": "2", + "Cov_Increment": "1.2f", + "Limit_Enable": "false false", + "Status_Flags": "false true false true", + "Acked_Transitions": "true false true", + "High_Limit": "0", + "Notify_Type": "0", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Notification_Class": "2", + "Description": "Faux-Device created to run inside DAQ", + "Units": "62", + "Event_Algorithm_Inhibit": "false" + } + }, + { + "BinaryInput01": { + "Object_Name":"chiller_water_valve_percentage_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "3", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Polarity": "0", + "Device_Type": "0to1Volts" + } + }, + { + "BinaryOutput01": { + "Object_Name":"fun_run_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "4", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Minimum_On_Time": "0", + "Minimum_Off_Time": "0", + "Relinquish_Default": "0.0", + "Feedback_Value": "0", + "Polarity": "0", + "Device_Type": "0to1Volts" + } + }, + { + "BinaryValue01": { + "Object_Name":"device1_run_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "4", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Minimum_On_Time": "0", + "Minimum_Off_Time": "0", + "Relinquish_Default": "0.0", + "Feedback_Value": "0" + } + } +] diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json new file mode 100644 index 0000000000..678817d3ff --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json @@ -0,0 +1,185 @@ +[ + { + "DeviceID": "1234", + "AnalogInput01": { + "Present_Value": "5.1", + "Object_Name": "device_run_command", + "Device_Type": "0to1Volts", + "Deadband": "21f", + "Resolution": "0.3f", + "Event_Enable": "true true false", + "Event_State": "0", + "Object_Type": "0", + "Time_Delay_Normal": "0", + "Low_Limit": "0", + "Limit_Enable": "false false", + "Cov_Increment": "1.0f", + "Status_Flags": "false false false false", + "Update_Interval": "1001", + "Acked_Transitions": "true true true", + "High_Limit": "0", + "Notify_Type": "0", + "Event_Detection_Enable": "false", + "Min_Pres_Value": "103.78f", + "Max_Pres_Value": "145.89f", + "Reliability": "4", + "Event_Message_Texts": "true", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Units": "64", + "Profile_Name": "FD-01", + "Out_Of_Service": "false" + } + }, + { + "AnalogOutput01": { + "Event_State": "0", + "Time_Delay_Normal": "0", + "Reliability": "4", + "Resolution": "0.3f", + "Event_Algorithm_Inhibit": "false", + "Out_Of_Service": "false", + "Status_Flags": "false false false false", + "Object_Type": "1", + "Description": "Faux-Device created to run inside DAQ", + "Low_Limit": "0", + "Deadband": "22f", + "Cov_Increment": "1.0f", + "Limit_Enable": "false false", + "Object_Name": "fan_run_speed_percentage_command", + "Notification_Class": "3", + "Units": "62", + "Notify_Type": "0", + "Device_Type": "0to1Volts", + "Event_Enable": "true true false", + "Event_Detection_Enable": "false", + "Event_Message_Texts": "true", + "High_Limit": "0", + "Acked_Transitions": "true true true", + "Priority_Array": "true", + "Relinquish_Default": "0.1" + } + }, + { + "AnalogValue01": { + "Object_Name":"chilled_water_valve_percentage_setpoint", + "Present_Value":"5.1f", + "Deadband": "21.0f", + "Out_Of_Service": "false", + "Event_Message_Texts": "true", + "Event_State": "1", + "Time_Delay_Normal": "1", + "Low_Limit": "0", + "Object_Type": "2", + "Cov_Increment": "1.2f", + "Limit_Enable": "false false", + "Status_Flags": "false true false true", + "Acked_Transitions": "true false true", + "High_Limit": "0", + "Notify_Type": "0", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Notification_Class": "2", + "Description": "Faux-Device created to run inside DAQ", + "Units": "62", + "Event_Algorithm_Inhibit": "false" + } + }, + { + "BinaryInput01": { + "Object_Name":"chiller_water_valve_percentage_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "3", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Polarity": "0", + "Device_Type": "0to1Volts" + } + }, + { + "BinaryOutput01": { + "Object_Name":"fun_run_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "4", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Minimum_On_Time": "0", + "Minimum_Off_Time": "0", + "Relinquish_Default": "0.0", + "Feedback_Value": "0", + "Polarity": "0", + "Device_Type": "0to1Volts" + } + }, + { + "BinaryValue01": { + "Object_Name":"device1_run_command", + "Present_Value":"5", + "Out_Of_Service": "false", + "Active_Text": "TRUE", + "Time_Of_State_Count_Reset": "13/05/2019", + "Event_Enable": "true true false", + "Change_Of_State_Count": "0", + "Event_State": "0", + "Object_Type": "4", + "Time_Delay_Normal": "0", + "Inactive_Text": "FALSE", + "Alarm_Value": "0", + "Acked_Transitions": "true false true", + "Status_Flags": "false false false false", + "Change_Of_State_Time": "13/05/2019", + "Notify_Type": "0", + "Time_Of_Active_Time_Reset": "13/05/2019", + "Event_Detection_Enable": "false", + "Reliability": "4", + "Event_Message_Texts": "true", + "Elapsed_Active_Time": "0", + "Notification_Class": "3", + "Description": "Faux-Device created to run inside DAQ", + "Event_Algorithm_Inhibit": "false", + "Minimum_On_Time": "0", + "Minimum_Off_Time": "0", + "Relinquish_Default": "0.0", + "Feedback_Value": "0" + } + } +] diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv b/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv new file mode 100644 index 0000000000..aab3985dec --- /dev/null +++ b/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv @@ -0,0 +1,486 @@ +Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported, +Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE, +Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE, + ,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Input,Device_Type,,O,TRUE, +Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE, +Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Analogue_Input,Update_Interval,,O,TRUE, +Bacnet_Analogue_Input,Units,,R,TRUE, +Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE, +Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE, +Bacnet_Analogue_Input,Resolution,,O,TRUE, +Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Input,COV_Period,,O,TRUE, +Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Input,Time_Delay,,O,TRUE, +Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Input,High_Limit,,O,TRUE, +Bacnet_Analogue_Input,Low_Limit,,O,TRUE, +Bacnet_Analogue_Input,Deadband,,O,TRUE, +Bacnet_Analogue_Input,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE, +Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE, +Bacnet_Analogue_Output,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Output,Device_Type,,O,TRUE, +Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE, +Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Analogue_Output,Units,,R,TRUE, +Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE, +Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE, +Bacnet_Analogue_Output,Resolution,,O,TRUE, +Bacnet_Analogue_Output,Priority_Array,,R,TRUE, +Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE, +Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Output,COV_Period,,O,TRUE, +Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Output,Time_Delay,,O,TRUE, +Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Output,High_Limit,,O,TRUE, +Bacnet_Analogue_Output,Low_Limit,,O,TRUE, +Bacnet_Analogue_Output,Deadband,,O,TRUE, +Bacnet_Analogue_Output,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE, +Bacnet_Analogue_Value,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Analogue_Value,Units,,O,TRUE, +Bacnet_Analogue_Value,Priority_Array,,O,TRUE, +Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE, +Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE, +Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE, +Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Value,COV_Period,,O,TRUE, +Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Value,Time_Delay,,O,TRUE, +Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Value,High_Limit,,O,TRUE, +Bacnet_Analogue_Value,Low_Limit,,O,TRUE, +Bacnet_Analogue_Value,Deadband,,O,TRUE, +Bacnet_Analogue_Value,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE, +Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Binary_Input,Present_Value,REAL,R,TRUE, +Bacnet_Binary_Input,Description,CharacterString,O,TRUE, +Bacnet_Binary_Input,Device_Type,,O,TRUE, +Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE, +Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Binary_Input,Polarity,,R,TRUE, +Bacnet_Binary_Input,Inactive_Text,,O,TRUE, +Bacnet_Binary_Input,Active_Text,,O,TRUE, +Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Input,COV_Period,,O,TRUE, +Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Input,Time_Delay,,O,TRUE, +Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Input,Alarm_Value,,O,TRUE, +Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE, +Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Binary_Output,Present_Value,REAL,O,TRUE, +Bacnet_Binary_Output,Description,CharacterString,O,TRUE, +Bacnet_Binary_Output,Device_Type,,O,TRUE, +Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE, +Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Binary_Output,Polarity,,O,TRUE, +Bacnet_Binary_Output,Inactive_Text,,O,TRUE, +Bacnet_Binary_Output,Active_Text,,O,TRUE, +Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE, +Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE, +Bacnet_Binary_Output,Priority_Array,,O,TRUE, +Bacnet_Binary_Output,Relinquish_Default,,O,TRUE, +Bacnet_Binary_Output,COV_Period,,O,TRUE, +Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Output,Time_Delay,,O,TRUE, +Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Output,Feedback_Value,,O,TRUE, +Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Binary_Value,Present_Value,REAL,O,TRUE, +Bacnet_Binary_Value,Description,CharacterString,O,TRUE, +Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Binary_Value,Inactive_Text,,O,TRUE, +Bacnet_Binary_Value,Active_Text,,O,TRUE, +Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE, +Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE, +Bacnet_Binary_Value,Priority_Array,,O,TRUE, +Bacnet_Binary_Value,Relinquish_Default,,O,TRUE, +Bacnet_Binary_Value,COV_Period,,O,TRUE, +Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Value,Time_Delay,,O,TRUE, +Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Value,Alarm_Value,,O,TRUE, +Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Calendar,Object_Name,CharacterString,O,TRUE, +Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Calendar,Present_Value,REAL,O,TRUE, +Bacnet_Calendar,Description,CharacterString,O,TRUE, +Bacnet_Calendar,Date_List,,O,TRUE, +Bacnet_Calendar,Time_To_Next_State,,O,TRUE, +Bacnet_Calendar,Next_State,,O,TRUE, +Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE, +Device,Object_Name,CharacterString,W,TRUE, +Device,Object_Type,BACnetObjectType,R,TRUE, +Device,System_Status,,R,TRUE, +Device,Vendor_Name,,R,TRUE, +Device,Vendor_Identifier,,R,TRUE, +Device,Model_Name,,R,TRUE, +Device,Firmware_Revision,,R,TRUE, +Device,Application_Software_Version,,R,TRUE, +Device,Location,,O,TRUE, +Device,Description,CharacterString,O,TRUE, +Device,Protocol_Version,,R,TRUE, +Device,Protocol_Revision,,R,TRUE, +Device,Protocol_Services_Supported,,R,TRUE, +Device,Protocol_Object_Types_Supported,,R,TRUE, +Device,Object_List,,R,TRUE, +Device,Max_APDU_Length_Accepted,,R,TRUE, +Device,Segmentation_Supported,,R,TRUE, +Device,Max_Segments_Accepted,,O,TRUE, +Device,Local_Date,,O,TRUE, +Device,Local_Time,,O,TRUE, +Device,UTC_Offset,,O,TRUE, +Device,Daylight_Savings_Status,,O,TRUE, +Device,Apdu_Segment_Timeout,,O,TRUE, +Device,APDU_Timeout,,W,TRUE, +Device,Number_Of_APDU_Retries,,W,TRUE, +Device,Time_Synchronization_Recipients,,O,TRUE, +Device,Device_Address_Binding,,R,TRUE, +Device,Database_Revision,,R,TRUE, +Device,Configuration_Files,,O,TRUE, +Device,Last_Restore_Time,,O,TRUE, +Device,Backup_Failure_Timeout,,O,TRUE, +Device,Backup_Preparation_Time,,O,TRUE, +Device,Restore_Preparation_Time,,O,TRUE, +Device,Restore_Completion_Time,,O,TRUE, +Device,Backup_And_Restore_State,,O,TRUE, +Device,Active_COV_Subscriptions,,O,TRUE, +Device,Last_Restart_Reason,,O,TRUE, +Device,Time_Of_Device_Restart,,O,TRUE, +Device,Restart_Notification_Recipients,,O,TRUE, +Device,Utc_Time_Synchronization_Recipients,,O,TRUE, +Device,Max_Master,,O,TRUE, +Device,Max_Info_Frames,,O,TRUE, +Device,Time_Synchronization_Interval,,O,TRUE, +Device,Align_Intervals,,O,TRUE, +Device,Interval_Offset,,O,TRUE, +Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Event_Enrollment,Object_Name,CharacterString,O,TRUE, +Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE, +Event_Enrollment,Description,CharacterString,O,TRUE, +Event_Enrollment,Event_Type,,O,TRUE, +Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE, +Event_Enrollment,Event_Parameters,,O,TRUE, +Event_Enrollment,Object_Property_Reference,,O,TRUE, +Event_Enrollment,Event_State,BACnetEventState,O,TRUE, +Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Event_Enrollment,Notification_Class,Unsigned,O,TRUE, +Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE, +Event_Enrollment,Time_Delay_Normal,,O,TRUE, +Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE, +Event_Enrollment,Reliability,BACnetReliability,O,TRUE, +Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_File,Object_Name,CharacterString,O,TRUE, +Bacnet_File,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_File,Description,CharacterString,O,TRUE, +Bacnet_File,File_Type,,O,TRUE, +Bacnet_File,File_Size,,O,TRUE, +Bacnet_File,Modification_Date,,O,TRUE, +Bacnet_File,Archive,,O,TRUE, +Bacnet_File,Read_Only,,O,TRUE, +Bacnet_File,File_Access_Method,,O,TRUE, +Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Loop,Object_Name,CharacterString,O,TRUE, +Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Loop,Present_Value,REAL,O,TRUE, +Bacnet_Loop,Description,CharacterString,O,TRUE, +Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Loop,Event_State,BACnetEventState,O,TRUE, +Bacnet_Loop,Reliability,BACnetReliability,O,TRUE, +Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Loop,Update_Interval,,O,TRUE, +Bacnet_Loop,Output_Units,,O,TRUE, +Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Value,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Units,,O,TRUE, +Bacnet_Loop,Setpoint_Reference,,O,TRUE, +Bacnet_Loop,Setpoint,,O,TRUE, +Bacnet_Loop,Action,,O,TRUE, +Bacnet_Loop,Proportional_Constant,,O,TRUE, +Bacnet_Loop,Proportional_Constant_Units,,O,TRUE, +Bacnet_Loop,Integral_Constant,,O,TRUE, +Bacnet_Loop,Integral_Constant_Units,,O,TRUE, +Bacnet_Loop,Derivative_Constant,,O,TRUE, +Bacnet_Loop,Derivative_Constant_Units,,O,TRUE, +Bacnet_Loop,Bias,,O,TRUE, +Bacnet_Loop,Maximum_Output,,O,TRUE, +Bacnet_Loop,Minimum_Output,,O,TRUE, +Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE, +Bacnet_Loop,LoopDeadband,,O,TRUE, +Bacnet_Loop,Saturation_Time,,O,TRUE, +Bacnet_Loop,COV_Increment,REAL,O,TRUE, +Bacnet_Loop,COV_Period,,O,TRUE, +Bacnet_Loop,COV_Min_Send_Time,,O,TRUE, +Bacnet_Loop,Ramp_Time,,O,TRUE, +Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE, +Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE, +Bacnet_Loop,Time_Delay,,O,TRUE, +Bacnet_Loop,Notification_Class,Unsigned,O,TRUE, +Bacnet_Loop,Error_Limit,,O,TRUE, +Bacnet_Loop,Deadband,,O,TRUE, +Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Loop,Time_Delay_Normal,,O,TRUE, +Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE, +Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Device_Type,,O,TRUE, +Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE, +Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Input,Number_of_States,,O,TRUE, +Bacnet_Multi-state_Input,State_Text,,O,TRUE, +Bacnet_Multi-state_Input,COV_Period,,O,TRUE, +Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Multi-state_Input,Time_Delay,,O,TRUE, +Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE, +Bacnet_Multi-state_Input,Fault_Values,,O,TRUE, +Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE, +Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Value,Number_of_States,,O,TRUE, +Bacnet_Multi-state_Value,State_Text,,O,TRUE, +Bacnet_Multi-state_Value,Priority_Array,,O,TRUE, +Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE, +Bacnet_Multi-state_Value,COV_Period,,O,TRUE, +Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Multi-state_Value,Time_Delay,,O,TRUE, +Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE, +Bacnet_Multi-state_Value,Fault_Values,,O,TRUE, +Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Program,Object_Name,CharacterString,O,TRUE, +Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Program,Description,CharacterString,O,TRUE, +Bacnet_Program,Program_State,,O,TRUE, +Bacnet_Program,Program_Change,,O,TRUE, +Bacnet_Program,Description_Of_Halt,,O,TRUE, +Bacnet_Program,Reason_For_Halt,,O,TRUE, +Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Program,Reliability,BACnetReliability,O,TRUE, +Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Notification,Object_Name,CharacterString,O,TRUE, +Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Notification,Description,CharacterString,O,TRUE, +Bacnet_Notification,Notification_Class,Unsigned,O,TRUE, +Bacnet_Notification,Priority,,O,TRUE, +Bacnet_Notification,Ack_Required,,O,TRUE, +Bacnet_Notification,Recipient_List,,O,TRUE, +Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Schedule,Object_Name,CharacterString,O,TRUE, +Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Schedule,Description,CharacterString,O,TRUE, +Bacnet_Schedule,Present_Value,REAL,O,TRUE, +Bacnet_Schedule,Effective_Period,,O,TRUE, +Bacnet_Schedule,Weekly_Schedule,,O,TRUE, +Bacnet_Schedule,Exception_Schedule,,O,TRUE, +Bacnet_Schedule,Schedule_Default,,O,TRUE, +Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE, +Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE, +Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE, +Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Schedule,Time_To_Next_State,,O,TRUE, +Bacnet_Schedule,Next_State,,O,TRUE, +Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE, +Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Trend_Log,Description,CharacterString,O,TRUE, +Bacnet_Trend_Log,Enable,,O,TRUE, +Bacnet_Trend_Log,Start_Time,,O,TRUE, +Bacnet_Trend_Log,Stop_Time,,O,TRUE, +Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE, +Bacnet_Trend_Log,Log_Interval,,O,TRUE, +Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE, +Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE, +Bacnet_Trend_Log,Stop_When_Full,,O,TRUE, +Bacnet_Trend_Log,Buffer_Size,,O,TRUE, +Bacnet_Trend_Log,Log_Buffer,,O,TRUE, +Bacnet_Trend_Log,Record_Count,,O,TRUE, +Bacnet_Trend_Log,Total_Record_Count,,O,TRUE, +Bacnet_Trend_Log,Logging_Type,,O,TRUE, +Bacnet_Trend_Log,Align_Intervals,,O,TRUE, +Bacnet_Trend_Log,Interval_Offset,,O,TRUE, +Bacnet_Trend_Log,Trigger,,O,TRUE, +Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE, +Bacnet_Trend_Log,Notification_Threshold,,O,TRUE, +Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE, +Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE, +Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE, +Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE, +Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Trend_Log,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, \ No newline at end of file diff --git a/docker/include/bin/start_faux b/docker/include/bin/start_faux index 94c9711ab3..016a60bb7a 100755 --- a/docker/include/bin/start_faux +++ b/docker/include/bin/start_faux @@ -68,6 +68,13 @@ elif [ -n "${options[passwordfail]}" ]; then service ssh start fi +# security.nmap.http faux device setup +if [ -n "${options[ohttp]}" ]; then + cp /root/nginx/nginxpass.conf /etc/nginx/nginx.conf + sed -i 's/listen 80;/listen 12345; listen 54321;/' /etc/nginx/nginx.conf + service nginx start +fi + # To capture all the data in/out of the faux device for debugging, uncomment # the following lines. The pcap file will end up in inst/faux/{hostname}.pcap # on the DAQ controller. @@ -87,7 +94,11 @@ if [ -n "${options[xdhcp]}" ]; then fi ip addr show $intf_name - (while true; do ping -c 1 10.20.0.1; sleep 5; done) & + if [ -n "${options[opendns]}" ]; then + dhcp_dns=8.8.8.8 + echo nameserver ${dhcp_dns#DNS=} > /etc/resolv.conf + route add default gw 10.20.255.254 $intf_name # fixed IP in start_networking + fi else echo Running dhclient... dhclient -v @@ -126,19 +137,36 @@ if [ -n "${options[telnet]}" ]; then (while true; do echo Telnet `hostname`; nc -nvlt -p 23 -e `which hostname`; done) & fi +if [ -n "${options[ssh]}" ]; then + echo Starting SSH server + /usr/local/sbin/sshd +elif [ -n "${options[sshv1]}" ]; then + echo Starting SSHv1 server + echo 'Protocol 1' >> /usr/local/etc/sshd_config + /usr/local/sbin/sshd +fi + if [ -n "${options[bacnet]}" ]; then echo Starting bacnet loop device. - java -cp bacnetTests/build/libs/bacnet-1.0-SNAPSHOT-all.jar \ + java -cp bacnetFaux/build/libs/bacnet-1.0-SNAPSHOT-all.jar \ FauxDeviceEngine.EntryPoint $local_ip $broadcast_ip "Faux-Device-Pass.json" & elif [ -n "${options[bacnetfail]}" ]; then echo Starting bacnet loop device. - java -cp bacnetTests/build/libs/bacnet-1.0-SNAPSHOT-all.jar \ + java -cp bacnetFaux/build/libs/bacnet-1.0-SNAPSHOT-all.jar \ FauxDeviceEngine.EntryPoint $local_ip $broadcast_ip "Faux-Device-Fail.json" & fi -if [ -n "${options[ntp_client]}" ]; then - echo Starting ntp client. - java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar "time.google.com" "123" "3" & +# NTPv4 query to the NTP server learnt from DHCP. +# NTPv3 query to the IP of time.google.com (since resolv.conf is modified by other tests) +if [ -n "${options[ntpv4]}" ]; then + dhcp_ntp=$(fgrep NTPSERVERS= /run/ntpdate.dhcp) + ntp_server=`echo $dhcp_ntp | cut -d "'" -f 2` + echo Transmitting NTP query to $ntp_server using NTPv4 + java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar $ntp_server 123 4 2 > ntp.log & +elif [ -n "${options[ntpv3]}" ]; then + STATIC_NTP_SERVER=216.239.35.8 + echo Transmitting NTP query to $STATIC_NTP_SERVER using NTPv3 + java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar $STATIC_NTP_SERVER 123 3 2 > ntp.log & fi # ntp_pass queries the NTP server learnt from DHCP. ntp_fail sends to time.google.com @@ -155,16 +183,16 @@ if [ -n "${options[ntp_pass]}" -o -n "${options[ntp_fail]}" ]; then fi echo Transmitting NTP query to $ntp_server ntpdate -q -p 1 $ntp_server - sleep 10 + sleep 5 done) & fi if [ -n "${options[broadcast_client]}" ]; then - echo Starting broatcast client. - cip_port=41794 + echo Starting broadcast client. + port=41794 cycle_seconds=20 duration_seconds=360 - python TransportClient/client.py $broadcast_ip $cip_port broadcast $duration_seconds $cycle_seconds & + python TransportClient/client.py $broadcast_ip $port broadcast $duration_seconds $cycle_seconds & fi if [ -n "${options[discover]}" ]; then @@ -172,11 +200,6 @@ if [ -n "${options[discover]}" ]; then bin/bacnet_discover loop & fi -if [ -n "${options[curl]}" ]; then - echo Starting curl loop. - (while true; do curl -o - http://google.com; sleep 1; done) & -fi - if [ -n "${options[brute]}" ]; then echo Starting brute server. (python pentests/brute_server.py bad 10000; echo Brute done.) & @@ -201,13 +224,24 @@ fi if [ -n "${options[pubber]}" ]; then echo Running cloud pubber tool... (while date; do - pubber/bin/run - # Do https query in case pubber is not configured, for testing port 443 - curl -o /dev/null https://google.com - sleep 30 + pubber/bin/run local/pubber.json + # Do https query in case pubber is not configured, for testing port 443 + curl -o /dev/null https://google.com + sleep 30 done) & fi +# Periodically sends ARP packets +if [ -z "${options[xarp]}" ]; then + echo Starting arp send loop. + (while true; do arpsend -D -e 10.20.254.254 $intf_name; sleep 2; done) & +fi + +if [ -n "${options[curl]}" ]; then + echo Starting curl loop. + (while true; do curl -o - http://google.com; sleep 1; done) & +fi + conf_file=/config/start/start_faux.sh if [ -f $conf_file ]; then echo Loading $conf_file... diff --git a/docker/include/bin/test_ping b/docker/include/bin/test_ping index 99bc957d83..b66393a079 100755 --- a/docker/include/bin/test_ping +++ b/docker/include/bin/test_ping @@ -88,8 +88,8 @@ echo Done with basic connectivity tests | tee -a $MONO_LOG echo Checking startup NTP ntp_target=${TARGET_IP%.*}.2 -ntp_request=`tcpdump -env -c 1 -r /scans/startup.pcap dst port 123 | wc -l` -ntp_proper=`tcpdump -env -c 1 -r /scans/startup.pcap dst port 123 and dst host $ntp_target | wc -l` +ntp_request=`tcpdump -env -c 1 -r /scans/monitor.pcap dst port 123 | wc -l` +ntp_proper=`tcpdump -env -c 1 -r /scans/monitor.pcap dst port 123 and dst host $ntp_target | wc -l` if [ "$ntp_request" == 0 ]; then ntp_result=skip ntp_summary="No NTP traffic detected" diff --git a/validator/.idea/codeStyles/codeStyleConfig.xml b/docker/include/network/NTPClient/.idea/codeStyles/codeStyleConfig.xml similarity index 100% rename from validator/.idea/codeStyles/codeStyleConfig.xml rename to docker/include/network/NTPClient/.idea/codeStyles/codeStyleConfig.xml diff --git a/subset/network/NTPClient/.idea/gradle.xml b/docker/include/network/NTPClient/.idea/gradle.xml similarity index 100% rename from subset/network/NTPClient/.idea/gradle.xml rename to docker/include/network/NTPClient/.idea/gradle.xml diff --git a/subset/network/NTPClient/.idea/jarRepositories.xml b/docker/include/network/NTPClient/.idea/jarRepositories.xml similarity index 100% rename from subset/network/NTPClient/.idea/jarRepositories.xml rename to docker/include/network/NTPClient/.idea/jarRepositories.xml diff --git a/subset/network/NTPClient/.idea/misc.xml b/docker/include/network/NTPClient/.idea/misc.xml similarity index 100% rename from subset/network/NTPClient/.idea/misc.xml rename to docker/include/network/NTPClient/.idea/misc.xml diff --git a/subset/network/NTPClient/.idea/vcs.xml b/docker/include/network/NTPClient/.idea/vcs.xml similarity index 100% rename from subset/network/NTPClient/.idea/vcs.xml rename to docker/include/network/NTPClient/.idea/vcs.xml diff --git a/subset/network/NTPClient/build.gradle b/docker/include/network/NTPClient/build.gradle similarity index 100% rename from subset/network/NTPClient/build.gradle rename to docker/include/network/NTPClient/build.gradle diff --git a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.jar b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from subset/network/NTPClient/gradle/wrapper/gradle-wrapper.jar rename to docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.jar diff --git a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties similarity index 92% rename from subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties rename to docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties index 38c1d48d19..12d38de6a4 100644 --- a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties +++ b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip diff --git a/subset/network/NTPClient/gradlew b/docker/include/network/NTPClient/gradlew similarity index 100% rename from subset/network/NTPClient/gradlew rename to docker/include/network/NTPClient/gradlew diff --git a/subset/network/NTPClient/gradlew.bat b/docker/include/network/NTPClient/gradlew.bat similarity index 100% rename from subset/network/NTPClient/gradlew.bat rename to docker/include/network/NTPClient/gradlew.bat diff --git a/subset/network/NTPClient/settings.gradle b/docker/include/network/NTPClient/settings.gradle similarity index 100% rename from subset/network/NTPClient/settings.gradle rename to docker/include/network/NTPClient/settings.gradle diff --git a/subset/network/NTPClient/src/main/java/META-INF/MANIFEST.MF b/docker/include/network/NTPClient/src/main/java/META-INF/MANIFEST.MF similarity index 100% rename from subset/network/NTPClient/src/main/java/META-INF/MANIFEST.MF rename to docker/include/network/NTPClient/src/main/java/META-INF/MANIFEST.MF diff --git a/docker/include/network/NTPClient/src/main/java/Main.java b/docker/include/network/NTPClient/src/main/java/Main.java new file mode 100644 index 0000000000..0bfe635d31 --- /dev/null +++ b/docker/include/network/NTPClient/src/main/java/Main.java @@ -0,0 +1,92 @@ +import java.io.IOException; +import java.net.DatagramPacket; +import java.net.DatagramSocket; +import java.net.InetAddress; +import java.text.DecimalFormat; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +public class Main { + + static final double SECONDS_FROM_01_01_1900_TO_01_01_1970 = 2208988800.0; + static String serverName = "time.google.com"; + static byte version = 3; + static int port = 123; + static int timerPeriod = 10; + static byte leapIndicator = 3; + + /** + * Constructs and sends NTP packets to target NTP server. + */ + + public static void main(String[] args) { + if (args.length < 2) { + throw new IllegalArgumentException("Usage: server_name port version timerPeriod"); + } + serverName = args[0]; + port = Integer.parseInt(args[1]); + version = Byte.parseByte(args[2]); + timerPeriod = Integer.parseInt(args[3]); + + Runnable senderRunnable = new Runnable() { + @Override + public void run() { + try { + sendRequest(); + } catch (IOException e) { + System.out.println(e.getMessage()); + } + } + }; + ScheduledExecutorService executor = Executors.newScheduledThreadPool(1); + executor.scheduleAtFixedRate(senderRunnable, 0, timerPeriod, TimeUnit.SECONDS); + } + + private static void sendRequest() throws IOException { + DatagramSocket socket = new DatagramSocket(); + InetAddress address = InetAddress.getByName(serverName); + byte[] buf = new NtpMessage(SECONDS_FROM_01_01_1900_TO_01_01_1970, leapIndicator, version).toByteArray(); + DatagramPacket packet = new DatagramPacket(buf, buf.length, address, port); + + // Set the transmit timestamp *just* before sending the packet + NtpMessage.encodeTimestamp(packet.getData(), 40, + (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970); + sendPacket(socket, packet, buf); + } + + private static void sendPacket(DatagramSocket socket, DatagramPacket packet, byte[] buf) throws IOException { + socket.send(packet); + + // Get response + System.out.println("NTP request sent, waiting for response...\n"); + packet = new DatagramPacket(buf, buf.length); + socket.receive(packet); + + // Display response + System.out.println("NTP server: " + serverName); + + // Process response + NtpMessage msg = new NtpMessage(packet.getData()); + + // Immediately record the incoming timestamp + double destinationTimestamp = + (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970; + System.out.println(msg.toString()); + System.out.println("Dest. timestamp: " + + NtpMessage.timestampToString(destinationTimestamp)); + + double roundTripDelay = (destinationTimestamp - msg.originateTimestamp) + - (msg.transmitTimestamp - msg.receiveTimestamp); + System.out.println("Round-trip delay: " + + new DecimalFormat("0.00").format(roundTripDelay * 1000) + " ms"); + double localClockOffset = + ((msg.receiveTimestamp - msg.originateTimestamp) + + (msg.transmitTimestamp - destinationTimestamp)) / 2; + System.out.println("Local clock offset: " + + new DecimalFormat("0.00").format(localClockOffset * 1000) + " ms"); + if (localClockOffset * 1000 < 128) { + leapIndicator = 0; + } + } +} diff --git a/docker/include/network/NTPClient/src/main/java/NtpMessage.java b/docker/include/network/NTPClient/src/main/java/NtpMessage.java new file mode 100644 index 0000000000..a441b14959 --- /dev/null +++ b/docker/include/network/NTPClient/src/main/java/NtpMessage.java @@ -0,0 +1,203 @@ +import java.text.DecimalFormat; +import java.text.SimpleDateFormat; +import java.util.Date; + +public class NtpMessage { + public byte leapIndicator = 3; + public byte version = 0; + public byte mode = 0; + public short stratum = 0; + public byte pollInterval = 0; + public byte precision = 0; + public double rootDelay = 0; + public double rootDispersion = 0; + public byte[] referenceIdentifier = {0, 0, 0, 0}; + public double referenceTimestamp = 0; + public double originateTimestamp = 0; + public double receiveTimestamp = 0; + public double transmitTimestamp = 0; + + /** + * Constructs a new NtpMessage from an array of bytes. + */ + public NtpMessage(byte[] array) { + leapIndicator = (byte)((array[0] >> 6) & 0x3); + version = (byte)((array[0] >> 3) & 0x7); + mode = (byte)(array[0] & 0x7); + stratum = unsignedByteToShort(array[1]); + pollInterval = array[2]; + precision = array[3]; + + rootDelay = (array[4] * 256.0) + + unsignedByteToShort(array[5]) + + (unsignedByteToShort(array[6]) / 256.0) + + (unsignedByteToShort(array[7]) / 65536.0); + + rootDispersion = (unsignedByteToShort(array[8]) * 256.0) + + unsignedByteToShort(array[9]) + + (unsignedByteToShort(array[10]) / 256.0) + + (unsignedByteToShort(array[11]) / 65536.0); + + referenceIdentifier[0] = array[12]; + referenceIdentifier[1] = array[13]; + referenceIdentifier[2] = array[14]; + referenceIdentifier[3] = array[15]; + + referenceTimestamp = decodeTimestamp(array, 16); + originateTimestamp = decodeTimestamp(array, 24); + receiveTimestamp = decodeTimestamp(array, 32); + transmitTimestamp = decodeTimestamp(array, 40); + } + + /** + * Constructs a new NtpMessage in client -> server mode, and sets the + * transmit timestamp to the current time. + */ + public NtpMessage(double secondsDiff, byte leapIndicator, byte version) { + this.mode = 3; + this.leapIndicator = leapIndicator; + this.version = version; + this.transmitTimestamp = (System.currentTimeMillis() / 1000.0) + secondsDiff; + } + + /** + * This method constructs the data bytes of a raw NTP packet. + */ + public byte[] toByteArray() { + byte[] p = new byte[48]; + + p[0] = (byte)(leapIndicator << 6 | version << 3 | mode); + p[1] = (byte) stratum; + p[2] = (byte) pollInterval; + p[3] = (byte) precision; + + // root delay is a signed 16.16-bit FP, in Java an int is 32-bits + int l = (int)(rootDelay * 65536.0); + p[4] = (byte)((l >> 24) & 0xFF); + p[5] = (byte)((l >> 16) & 0xFF); + p[6] = (byte)((l >> 8) & 0xFF); + p[7] = (byte)(l & 0xFF); + + // root dispersion is an unsigned 16.16-bit FP, in Java there are no + // unsigned primitive types, so we use a long which is 64-bits + long ul = (long)(rootDispersion * 65536.0); + p[8] = (byte)((ul >> 24) & 0xFF); + p[9] = (byte)((ul >> 16) & 0xFF); + p[10] = (byte)((ul >> 8) & 0xFF); + p[11] = (byte)(ul & 0xFF); + + p[12] = referenceIdentifier[0]; + p[13] = referenceIdentifier[1]; + p[14] = referenceIdentifier[2]; + p[15] = referenceIdentifier[3]; + + encodeTimestamp(p, 16, referenceTimestamp); + encodeTimestamp(p, 24, originateTimestamp); + encodeTimestamp(p, 32, receiveTimestamp); + encodeTimestamp(p, 40, transmitTimestamp); + + return p; + } + + /** + * Returns a string representation of a NtpMessage. + */ + public String toString() { + String precisionStr = + new DecimalFormat("0.#E0").format(Math.pow(2, precision)); + + return "Leap indicator: " + leapIndicator + "\n" + + "Version: " + version + "\n" + + "Mode: " + mode + "\n" + + "Stratum: " + stratum + "\n" + + "Poll: " + pollInterval + "\n" + + "Precision: " + precision + " (" + precisionStr + " seconds)\n" + + "Root delay: " + new DecimalFormat("0.00").format(rootDelay * 1000) + " ms\n" + + "Root dispersion: " + new DecimalFormat("0.00").format(rootDispersion * 1000) + " ms\n" + + "Reference identifier: " + referenceIdentifierToString(referenceIdentifier, stratum, version) + "\n" + + "Reference timestamp: " + timestampToString(referenceTimestamp) + "\n" + + "Originate timestamp: " + timestampToString(originateTimestamp) + "\n" + + "Receive timestamp: " + timestampToString(receiveTimestamp) + "\n" + + "Transmit timestamp: " + timestampToString(transmitTimestamp); + } + + /** + * Converts an unsigned byte to a short. By default, Java assumes that + * a byte is signed. + */ + public static short unsignedByteToShort(byte b) { + if ((b & 0x80) == 0x80) { + return (short)(128 + (b & 0x7f)); + } else { + return (short) b; + } + } + + /** + * Will read 8 bytes of a message beginning at pointer + * and return it as a double, according to the NTP 64-bit timestamp + * format. + */ + public static double decodeTimestamp(byte[] array, int pointer) { + double r = 0.0; + + for (int i = 0; i < 8; i++) { + r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8); + } + + return r; + } + + /** + * Encodes a timestamp in the specified position in the message. + */ + public static void encodeTimestamp(byte[] array, int pointer, double timestamp) { + // Converts a double into a 64-bit fixed point + for (int i = 0; i < 8; i++) { + // 2^24, 2^16, 2^8, .. 2^-32 + double base = Math.pow(2, (3 - i) * 8); + // Capture byte value + array[pointer + i] = (byte)(timestamp / base); + // Subtract captured value from remaining total + timestamp = timestamp - (double)(unsignedByteToShort(array[pointer + i]) * base); + } + array[7] = (byte)(Math.random() * 255.0); + } + + /** + * Returns a timestamp (number of seconds since 00:00 1-Jan-1900) as a + * formatted date/time string. + */ + public static String timestampToString(double timestamp) { + if (timestamp == 0) { + return "0"; + } + double utc = timestamp - (2208988800.0); + long ms = (long)(utc * 1000.0); + String date = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss").format(new Date(ms)); + double fraction = timestamp - ((long) timestamp); + String fractionSting = new DecimalFormat(".000000").format(fraction); + return date + fractionSting; + } + + /** + * Returns a string representation of a reference identifier according + * to the rules set out in RFC 2030. + */ + public static String referenceIdentifierToString(byte[] ref, short stratum, byte version) { + if (stratum == 0 || stratum == 1) { + return new String(ref); + } else if (version == 3) { + return unsignedByteToShort(ref[0]) + "." + + unsignedByteToShort(ref[1]) + "." + + unsignedByteToShort(ref[2]) + "." + + unsignedByteToShort(ref[3]); + } else if (version == 4) { + return "" + ((unsignedByteToShort(ref[0]) / 256.0) + + (unsignedByteToShort(ref[1]) / 65536.0) + + (unsignedByteToShort(ref[2]) / 16777216.0) + + (unsignedByteToShort(ref[3]) / 4294967296.0)); + } + return ""; + } +} diff --git a/subset/network/TransportClient/client.py b/docker/include/network/TransportClient/client.py similarity index 59% rename from subset/network/TransportClient/client.py rename to docker/include/network/TransportClient/client.py index e72b83ec2f..15a01d951c 100644 --- a/subset/network/TransportClient/client.py +++ b/docker/include/network/TransportClient/client.py @@ -1,3 +1,10 @@ +""" + Used within the faux device to start a client which will send out broadcast packets. + + Usage: + python TransportClient/client.py $broadcast_ip $port broadcast $duration_seconds $cycle_seconds +""" + import socket, sys, time arguments = sys.argv @@ -7,19 +14,25 @@ transport_type = str(arguments[3]) duration_seconds = int(arguments[4]) cycle_seconds = int(arguments[5]) -message = "Fried lizards taste like chicken" +message = "Fried lizards taste like chicken!" -def broadcast_setup_socket(): + +def get_broadcast_socket(): client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) client.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) return client + def send_message(message, transport_type): + client = None + if transport_type == 'broadcast': - client = broadcast_setup_socket() - sent = client.sendto(message, (udp_ip_address, udp_port)) + client = get_broadcast_socket() + + client.sendto(message, (udp_ip_address, udp_port)) + -while(duration_seconds > 0): +while duration_seconds > 0: print('{t} to {a}'.format(t=transport_type, a=udp_ip_address)) send_message(message, transport_type) time.sleep(cycle_seconds) diff --git a/docker/include/networking_scripts/autorestart_dnsmasq b/docker/include/network/scripts/autorestart_dnsmasq similarity index 100% rename from docker/include/networking_scripts/autorestart_dnsmasq rename to docker/include/network/scripts/autorestart_dnsmasq diff --git a/docker/include/network/scripts/change_dhcp_range b/docker/include/network/scripts/change_dhcp_range new file mode 100755 index 0000000000..0c45cd6bae --- /dev/null +++ b/docker/include/network/scripts/change_dhcp_range @@ -0,0 +1,24 @@ +#!/bin/bash -e +# +# Dynamically change DHCP lease range, requires killing and restarting +# dnsmasq as per documentation (SIGHUP does not reload configuration file). +LOCAL_IF=${LOCAL_IF:-$HOSTNAME-eth0} + +range_start=$1 +range_end=$2 +prefix_len=$3 +if [ -z $range_start -o -z $range_end -o -z $prefix_len ]; then + echo "Usage: change_dhcp_range range_start range_end prefix_len" + exit 1 +fi +while [ $(cat /etc/dnsmasq.conf | egrep "^dhcp-range=" | wc -l) == 0 ]; do + sleep 1 +done +ip addr add $range_start/$prefix_len dev $LOCAL_IF || true +original=$(cat /etc/dnsmasq.conf | egrep "^dhcp-range=" | head -1) +lease=$(echo $original | cut -d',' -f 3) +if [ -n "lease" ]; then + lease=",$lease" +fi +new="dhcp-range=$range_start,$range_end$lease" +flock /etc/dnsmasq.conf sed -i s/$original/$new/ /etc/dnsmasq.conf \ No newline at end of file diff --git a/docker/include/networking_scripts/change_dhcp_response_time b/docker/include/network/scripts/change_dhcp_response_time similarity index 100% rename from docker/include/networking_scripts/change_dhcp_response_time rename to docker/include/network/scripts/change_dhcp_response_time diff --git a/docker/include/networking_scripts/change_lease_time b/docker/include/network/scripts/change_lease_time similarity index 72% rename from docker/include/networking_scripts/change_lease_time rename to docker/include/network/scripts/change_lease_time index 306e985604..0cb8986c8a 100755 --- a/docker/include/networking_scripts/change_lease_time +++ b/docker/include/network/scripts/change_lease_time @@ -7,10 +7,10 @@ if [ -z $lease ]; then echo "Lease time not defined." exit 1 fi -while [ $(cat /etc/dnsmasq.conf | grep dhcp-range=10.20 | wc -l) == 0 ]; do +while [ $(cat /etc/dnsmasq.conf | grep "^dhcp-range=" | wc -l) == 0 ]; do sleep 1 done -original=$(cat /etc/dnsmasq.conf | grep dhcp-range=10.20 | head -1) +original=$(cat /etc/dnsmasq.conf | grep "^dhcp-range=" | head -1) new="$(echo $original | cut -d',' -f 1,2),$lease" flock /etc/dnsmasq.conf sed -i s/$original/$new/ /etc/dnsmasq.conf diff --git a/docker/include/networking_scripts/new_ip b/docker/include/network/scripts/new_ip similarity index 100% rename from docker/include/networking_scripts/new_ip rename to docker/include/network/scripts/new_ip diff --git a/docker/include/networking_scripts/start_networking b/docker/include/network/scripts/start_networking similarity index 94% rename from docker/include/networking_scripts/start_networking rename to docker/include/network/scripts/start_networking index a55e24b052..331585c2fe 100755 --- a/docker/include/networking_scripts/start_networking +++ b/docker/include/network/scripts/start_networking @@ -40,6 +40,7 @@ if ! ip addr show dev $LOCAL_IF | fgrep -q 'inet '; then ip addr add 10.20.$subnet.1/16 dev $LOCAL_IF fi +ip addr add 10.20.255.254/16 dev $LOCAL_IF #For static ip devices' default gateway IP echo dhcp-host=*,ignore >> /etc/dnsmasq.conf # Start the NTP server diff --git a/subset/pentests/brute_server.py b/docker/include/pentests/brute_server.py similarity index 100% rename from subset/pentests/brute_server.py rename to docker/include/pentests/brute_server.py diff --git a/subset/security/nginx-site/html/index.html b/docker/include/security/nginx-site/html/index.html similarity index 100% rename from subset/security/nginx-site/html/index.html rename to docker/include/security/nginx-site/html/index.html diff --git a/subset/security/nginxfail.conf b/docker/include/security/nginxfail.conf similarity index 100% rename from subset/security/nginxfail.conf rename to docker/include/security/nginxfail.conf diff --git a/subset/security/nginxpass.conf b/docker/include/security/nginxpass.conf similarity index 100% rename from subset/security/nginxpass.conf rename to docker/include/security/nginxpass.conf diff --git a/docker/include/security/sshfaux/ssh_build.sh b/docker/include/security/sshfaux/ssh_build.sh new file mode 100644 index 0000000000..f870555f1d --- /dev/null +++ b/docker/include/security/sshfaux/ssh_build.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# +# Build older versions OpenSSL 1.0.2 and OpenSSH 7.2 +# Used for testing in faux devices only +# +# To run SSHD use /usr/local/sbin/sshd +# SSH components, e.g. ssh-keygen are found in /usr/local/bin +# SSH configuration and keys found in /usr/local/etc + +# Build OpenSSL 1.0.2 +wget https://www.openssl.org/source/openssl-1.0.2g.tar.gz +tar -xzf openssl-1.0.2g.tar.gz +cd openssl-1.0.2g +./config --prefix=/usr/local/openssl --openssldir=/usr/local/openssl +make -s +make -s install +cd .. + +# Prepare privellage seperation for SSHD +source ssh_privsep.sh + +# Build OpenSSH 7.2 +wget https://mirrors.mit.edu/pub/OpenBSD/OpenSSH/portable/openssh-7.2p1.tar.gz +tar -xzf openssh-7.2p1.tar.gz +cd openssh-7.2p1 +./configure --with-ssl-dir=/usr/local/openssl --with-ssh1 +make -s +make -s install diff --git a/docker/include/security/sshfaux/ssh_privsep.sh b/docker/include/security/sshfaux/ssh_privsep.sh new file mode 100644 index 0000000000..668d825f9e --- /dev/null +++ b/docker/include/security/sshfaux/ssh_privsep.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# +# Prepare environment for running SSHD with privilege separation +# https://github.com/openssh/openssh-portable/blob/master/README.privsep + +mkdir /etc/ssh +mkdir /var/empty +chown root:sys /var/empty +chmod 755 /var/empty +groupadd sshd +useradd -g sshd -c 'sshd privsep' -d /var/empty -s /bin/false sshd diff --git a/subset/security/tlsfaux/absolute_filepath.py b/docker/include/security/tlsfaux/absolute_filepath.py similarity index 100% rename from subset/security/tlsfaux/absolute_filepath.py rename to docker/include/security/tlsfaux/absolute_filepath.py diff --git a/subset/security/tlsfaux/certs/server.crt b/docker/include/security/tlsfaux/certs/server.crt similarity index 100% rename from subset/security/tlsfaux/certs/server.crt rename to docker/include/security/tlsfaux/certs/server.crt diff --git a/subset/security/tlsfaux/certs/server.csr b/docker/include/security/tlsfaux/certs/server.csr similarity index 100% rename from subset/security/tlsfaux/certs/server.csr rename to docker/include/security/tlsfaux/certs/server.csr diff --git a/subset/security/tlsfaux/certs/server.key b/docker/include/security/tlsfaux/certs/server.key similarity index 100% rename from subset/security/tlsfaux/certs/server.key rename to docker/include/security/tlsfaux/certs/server.key diff --git a/subset/security/tlsfaux/expcerts/server.crt b/docker/include/security/tlsfaux/expcerts/server.crt similarity index 100% rename from subset/security/tlsfaux/expcerts/server.crt rename to docker/include/security/tlsfaux/expcerts/server.crt diff --git a/subset/security/tlsfaux/expcerts/server.csr b/docker/include/security/tlsfaux/expcerts/server.csr similarity index 100% rename from subset/security/tlsfaux/expcerts/server.csr rename to docker/include/security/tlsfaux/expcerts/server.csr diff --git a/subset/security/tlsfaux/expcerts/server.key b/docker/include/security/tlsfaux/expcerts/server.key similarity index 100% rename from subset/security/tlsfaux/expcerts/server.key rename to docker/include/security/tlsfaux/expcerts/server.key diff --git a/subset/security/tlsfaux/generate_certs.py b/docker/include/security/tlsfaux/generate_certs.py similarity index 100% rename from subset/security/tlsfaux/generate_certs.py rename to docker/include/security/tlsfaux/generate_certs.py diff --git a/subset/security/tlsfaux/server.py b/docker/include/security/tlsfaux/server.py similarity index 100% rename from subset/security/tlsfaux/server.py rename to docker/include/security/tlsfaux/server.py diff --git a/docker/modules/Dockerfile.faux1 b/docker/modules/Dockerfile.faux1 index 7c8e9c0be0..14e18e428e 100644 --- a/docker/modules/Dockerfile.faux1 +++ b/docker/modules/Dockerfile.faux1 @@ -15,23 +15,33 @@ ENV BACHASH=94a794a756ee0d37c6a2e53e08747ee021415aa8 RUN bin/retry_cmd git clone https://github.com/grafnu/bacnet4j.git --single-branch \ && cd bacnet4j && git reset --hard $BACHASH && ../bin/retry_cmd ./gradlew shadow -COPY pubber/ pubber/ +COPY udmi/pubber/ pubber/ RUN pubber/bin/build +# Seperate stage to build older version of SSH and SSL +FROM daqf/aardvark:latest as ssh_build + +RUN $AG update && $AG install wget make build-essential gcc libz-dev ca-certificates + +# Build SSH, OpenSSL from source and configure + +COPY docker/include/security/sshfaux/*.sh ./ +RUN sh ssh_build.sh + FROM daqf/aardvark:latest # Run this separately so it can be shared with other builds. RUN $AG update && $AG install openjdk-8-jre RUN $AG update && $AG install openjdk-8-jdk git RUN $AG update && $AG install isc-dhcp-client ethtool network-manager netcat curl\ - python ifupdown openssl ssh nano apache2-utils ntpdate + python ifupdown openssl nano apache2-utils ntpdate vzctl # Additional OS dependencies RUN $AG update && $AG install -y telnetd && $AG install xinetd nginx -COPY subset/network/NTPClient NTPClient +COPY docker/include/network/NTPClient NTPClient RUN cd NTPClient && ./gradlew build -COPY subset/network/TransportClient TransportClient +COPY docker/include/network/TransportClient TransportClient # Prefetch resolvconf to dynamically install at runtime in start_faux. RUN $AG update && cd /tmp && ln -s ~/bin bin && $AG download resolvconf && mv resolvconf_*.deb ~ @@ -42,27 +52,29 @@ COPY --from=java_build /root/bacnet4j/*.jar bacnet4j/ COPY docker/include/bin/bacnet_discover bin/ COPY --from=java_build /root/pubber/build/libs/*.jar pubber/build/libs/ -COPY pubber/bin/run pubber/bin/ +COPY udmi/pubber/bin/run pubber/bin/ -COPY subset/pentests/brute_server.py pentests/ -COPY subset/security/tlsfaux tlsfaux/ +COPY docker/include/pentests/brute_server.py pentests/ +COPY docker/include/security/tlsfaux tlsfaux/ -COPY subset/bacnet/bacnetTests/ bacnetTests -COPY subset/bacnet/bacnetTests/src/main/resources/Faux*.json tmp/ -COPY --from=java_build /root/bacnet4j/bacnet4j-1.0-SNAPSHOT-all.jar bacnetTests/libs/ -RUN cd bacnetTests && ./gradlew build - -# SSH dependency -COPY subset/security/ssh_additions.config ssh_additions.config -RUN cat ssh_additions.config >> /etc/ssh/sshd_config +COPY docker/include/bacnet/bacnetFaux/ bacnetFaux +COPY docker/include/bacnet/bacnetFaux/src/main/resources/Faux*.json tmp/ +COPY --from=java_build /root/bacnet4j/bacnet4j-1.0-SNAPSHOT-all.jar bacnetFaux/libs/ +RUN cd bacnetFaux && ./gradlew build # HTTP/HTTPS dependency -COPY subset/security/nginxpass.conf /root/nginx/ -COPY subset/security/nginxfail.conf /root/nginx/ -COPY subset/security/nginx-site /var/www/nginx-site - -# SSH login fix. Otherwise user is kicked off after login -RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd +COPY docker/include/security/nginxpass.conf /root/nginx/ +COPY docker/include/security/nginxfail.conf /root/nginx/ +COPY docker/include/security/nginx-site /var/www/nginx-site + +COPY --from=ssh_build /usr/local/openssl/* /usr/local/openssl/ +COPY --from=ssh_build /usr/local/sbin/* /usr/local/sbin/ +COPY --from=ssh_build /usr/local/bin/* /usr/local/bin/ +COPY --from=ssh_build /usr/local/etc/* /usr/local/etc/ + +COPY docker/include/security/sshfaux/ssh_privsep.sh ssh_privsep.sh +RUN sh ssh_privsep.sh +RUN /usr/local/bin/ssh-keygen -A # Weird workaround for problem running tcdump in a privlidged container. RUN mv /usr/sbin/tcpdump /usr/bin/tcpdump diff --git a/docker/modules/Dockerfile.faux2 b/docker/modules/Dockerfile.faux2 index 11ea5020bf..e179f0b9a7 100644 --- a/docker/modules/Dockerfile.faux2 +++ b/docker/modules/Dockerfile.faux2 @@ -26,9 +26,9 @@ COPY docker/include/bin/start_faux docker/include/bin/failing bin/ # Weird workaround for problem running tcdump in a privlidged container. RUN mv /usr/sbin/tcpdump /usr/bin/tcpdump -COPY subset/security/tlsfaux tlsfaux/ -COPY subset/security/nginxpass.conf /root/nginx/ -COPY subset/security/nginxfail.conf /root/nginx/ -COPY subset/security/nginx-site /var/www/nginx-site +COPY docker/include/security/tlsfaux tlsfaux/ +COPY docker/include/security/nginxpass.conf /root/nginx/ +COPY docker/include/security/nginxfail.conf /root/nginx/ +COPY docker/include/security/nginx-site /var/www/nginx-site ENTRYPOINT ["bin/start_faux"] diff --git a/docker/modules/Dockerfile.networking b/docker/modules/Dockerfile.networking index 2001cf1f03..86ae77238b 100644 --- a/docker/modules/Dockerfile.networking +++ b/docker/modules/Dockerfile.networking @@ -5,9 +5,9 @@ FROM daqf/aardvark:latest -RUN $AG update && $AG install dnsmasq ethtool iptables netcat ntp python +RUN $AG update && $AG install dnsmasq ethtool iptables netcat ntp python curl -COPY docker/include/networking_scripts/* ./ +COPY docker/include/network/scripts/* ./ RUN mkdir -p /etc COPY docker/include/etc/ntp.conf /etc diff --git a/docs/add_test.md b/docs/add_test.md index f279359570..79592f0d24 100644 --- a/docs/add_test.md +++ b/docs/add_test.md @@ -34,7 +34,7 @@ A setup for the `pass` test, as an example, woud be configured as follows * `echo host_tests=local/local_tests.conf >> local/system.conf` -- Set tests configuration. This, of course, only works for local development when using the `local_tests.conf` config. To -formalize a test and include it in the overal system build it should be included in +formalize a test and include it in the overall system build it should be included in `config/modules/all.conf`. ## Component Build @@ -112,7 +112,7 @@ However, with great flexibility comes great responsibility. Tests should: - Test _one_ thing well -- Include an integration test for Travis CI +- Include an integration test for [Github actions](https://github.com/faucetsdn/daq/actions) - Adhere to the Google style guide of your chosen language: https://google.github.io/styleguide/ - Have the smallest amount of code possible for the greatest utility for the framework. Keep docker images lean! - Not add things like the following to the repository: @@ -124,9 +124,9 @@ Tests should: - Include the test name and a description of the test in the report output - Include an informative line in the summary table -Integration tests don't need to be tedious and, if you're developing one test and seeing a consistent failure on Travis CI, isolate your problem and run _just that part_ of the integration test both locally and on Travis CI. +Integration tests don't need to be tedious and, if you're developing one test and seeing a consistent failure on Github actions, isolate your problem and run _just that part_ of the integration test both locally and on Github actions. -The pass/fail state of an integration test corresponds to the result of a `diff` between expected and actual device report output. You can follow the steps in the _Integration Testing Workflow_ section below to mimic the exact process that Travis CI follows. Or, if your local machine builds Docker images slowly, simply modify the test_*.out by hand, amending it to what your report should look like. Then, see if Travis CI agrees. +The pass/fail state of an integration test corresponds to the result of a `diff` between expected and actual device report output. You can follow the steps in the _Integration Testing Workflow_ section below to mimic the exact process that Github actions follows. Or, if your local machine builds Docker images slowly, simply modify the test_*.out by hand, amending it to what your report should look like. Then, see if Github actions agrees. Similarly, if you're writing one test and running it within DAQ locally, run _only the test you're developing_. Try not to bloat your precious development hours by waiting for tests to run that you don't care about. Building unnecessary tests is a very efficient time sink. @@ -145,7 +145,7 @@ All of the commands in these steps are run as the root user by typing "sudo -i" 6. Run `testing/test_x.sh` 7. Copy `out/test_x.out` to `testing/test_x.out` 8. Run `testing/test_x.sh` to check the integration tests now execute successfully in the local machine. -9. Commit to GitHub to sync local codebase with remote codebase and to trigger the final Travis CI tests -10. Test should now pass the Travis CI integration tests. +9. Commit to GitHub to sync local codebase with remote codebase and to trigger the final [Github actions](https://github.com/faucetsdn/daq/actions) tests +10. Test should now pass the [Github actions](https://github.com/faucetsdn/daq/actions) integration tests. TODO: write note about hold_tests diff --git a/docs/build.md b/docs/build.md index 40e40aa20e..6ff2287518 100644 --- a/docs/build.md +++ b/docs/build.md @@ -22,23 +22,11 @@ different dependencies. See `cmd/build help` for more details on different image ## Tests, Tests, and More Tests In a whirlwind of flagrant appropriateness, the baseline for DAQ development is... testing. Specifically, -there is a suite of continuous integration tests that run on [Travis CI](https://travis-ci.com/faucetsdn/daq) +there is a suite of continuous integration tests that run using [Github actions](https://github.com/faucetsdn/daq/actions) that puts the system through a barrage of tests to make sure all is good. Any PR submission will -require that these tests pass. It's recommended (but not required) that you set up Travis CI on -your personal development branch to test your commits in the full Travis CI environment before relying -on the automatic PR test mechanism. - -The `.travis.yml` file contains the information for the tests themselves, primarily listed under the `matrix:` -subsection that shows all the various tested configurations. Note that this assumes a fully installed environment -(as setup with `bin/setup_daq`). From there, individual tests can be run locally by -appending `bin/test_daq` to a `sudo` line of shell environment settings, e.g. as taken from one matrix entry: -
-~/daq$ sudo DAQ_TEST=base bin/test_daq
-…
-or directly with:
-~/daq$ sudo testing/test_base.sh
-…
-
+require that these tests pass. + +For more information, please see [developing docs](https://github.com/faucetsdn/daq/blob/master/docs/developing.md). ## Incremental Builds diff --git a/docs/changelog.md b/docs/changelog.md index a67a7cfd32..bedd5c2366 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,4 +1,55 @@ # Changelog +* 1.9.5 + * Increase nmap module timeout (#611) + * Improvements for test development debugging (#609) + * Add module config system logging (#607) +* 1.9.4 + * Feature/convert switchtests (#601) + * Do not infinite spawn ntp (#598) + * security.nmap.http test (#563) + * Update registrar tool for latest UDMI version (#596) + * Feature/vlan trigger (#588) + * fix gcp combine report test (#587) + * Adding default dns for static ip faux devices (#576) + * Add perodic tests (#575) + * security.admin.password changes (#461) +* 1.9.0 + * Test infrastructure cleanup (#572) + * Remove faux dependencies from subset directory (#567) + * Github actions (#558) + * misc updates to docs (#568) + * Incorporate manual test comments (#499) + * NTP Update (#525) + * Automatic build script (#557) +* 1.8.2 + * GRPC timeouts + usi first command wait fix. (#555) + * Numerous renovate bot updates. +* 1.8.0 + * add security.ssh.version test (#523) + * Refactor UDMI to external repo (#544) + * Additional DHCP test part 3: IP change test (#543) + * Additional DHCP test part 2: Multisubnet test (#539) + * Additional DHCP test part 1 (#532) + * Support for alternate sec switch (not managed by DAQ) (#531) + * Add troubleshooting script (#529) + * Using usi in daq (#520) + * Use trunk rather than stack between switches (#526) + * NTPv4 support (#487) + * Feature/usi OVS switch (#521) +* 1.7.0 + * Add DAQ version to origin summary (#522) + * Add check for git version tag in Travis (#519) + * Minor UDMI updates for pubber keygen + * Update Minimum Send Test (#498) + * Universal Switch Interface (USI) (#496) +* 1.6.1 + * fix image pull in cmd/build (#503) +* 1.6.0 + * cloud test setup documentation (#495) + * Baseline for NTP tests (#494) + * Baseline for DNS test (#492) + * Add manual test summary to test report (#481) + * UDMI logentry schema update (#391) * 1.5.1 * Fix for local-port-as-string issue (#477) * 1.5.0 diff --git a/docs/cloud_tests.md b/docs/cloud_tests.md new file mode 100644 index 0000000000..ff982de80f --- /dev/null +++ b/docs/cloud_tests.md @@ -0,0 +1,97 @@ +# Cloud Connection Testing + +A number of additional setup steps are required for enabling testing against "smart devices" +that communicate with the cloud. The tests themselves are part of the `subset/cloud/test_udmi` +module included in the standard DAQ distro. The same basic device-to-cloud validation test +pipeline can be done manually and automatically (through DAQ); it's instructive to fully +understand the manual test pipeline before engaging with the automated setup. + +## Manual Test Pipeline + +The overall device-to-cloud pipeline looks something like the following: + +* Device sends data to the cloud. There's two kinds of devices: + * A faux _reference design_ device called [pubber](https://github.com/faucetsdn/udmi/blob/master/docs/pubber.md), + which is a completely contained software device. + * An actual physical device. The setup and configuration of that device will be manufacturer + dependent and so is out of scope for this (DAQ) documentation. +* A configured GCP IoT Core project, registry, and device entry. The +[GCP docs for IoT Core](https://cloud.google.com/iot/docs/how-tos/devices) describe the basics. The +key part is the _authentication key_ (hahaha) that needs to be setup between the local device and +cloud device entry. +* The IoT Core registry is configured with a _PubSub topic_ (not to be confused with an _MQTT topic_), +that provides the bridge between incoming data and consumers of that data. See the GCP documentation +on PubSub for more details. +* (optional) Some devices might need a gateway that communicates with IoT Core + on their behalf. In this case the Gateway should be added to the IoT Core as + well and the devices bound to it. +* (optional) The `gcloud` command line can be used to validate that data is being sent from the +device to the cloud. Something like +`gcloud pubsub subscriptions pull --auto-ack projects/{project}/subscriptions/{sub_id}`. +(Complete documentation for how to use `gcloud` commands is out of scope of this documentation.) +* The [validator tool](https://github.com/faucetsdn/udmi/blob/master/docs/validator.md) is what +programmatically validates a device data stream, and is what is ultimately used by `test_udmi` +to validate device-cloud communication. + +## Base Local Test Setup + +* The `udmi` module needs to be enabled in build. When running `cmd/build` there should be a line +like `subset/cloud/Dockerfile.test_udmi` in the startup logs. +This is enabled through the `host_tests` config parameter, +which can be set to `config/modules/all.conf` if necessary. On startup, there should be a log +message that includes `udmi`: +``` +Jun 22 08:32:52 runner INFO Configured with tests pass, fail, ping, bacnet, mudgee, nmap, discover, switch, macoui, bacext, tls, password, udmi, manual +``` +* A testing gcp service account `gcp_cred` needs to be setup as described in +[service account setup instructions](service.md). +* The system's default `module_config` needs to enable the `udmi` test, e.g. as per +`resources/setups/baseline/module_config.json`. This can be validated by (runtime) checking +`inst/run-port-01/nodes/udmi01/tmp/module_config.json` to see if it has something like the following: +``` + "udmi": { + "enabled": true + } +``` +* `site_path` config needs to point to a site definition directory, or defaults to `local/site`. +This contains all the site-specific information about devices needed for testing. +* `{site_path}/mac_addrs/{mac_addr}/module_config.json` needs to have a `device_id` defined, e.g. +as in `resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json`. +* The GCP IoT Core setup needs to have a proper registry and device configred. This can either +be done manually or using the [registrar +tool](https://github.com/faucetsdn/udmi/blob/master/docs/registrar.md) tool. + +## Integration Testing + +If developing cloud-tests, then the CI build system also needs to have a service account configured +pointing at a suitable GCP project. To run cloud-based tests, setup the [Github Secrets](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets) `GCP_BASE64_CRED` +env variable with a `base64` encoded service account key for your project. It's recommended to use a dedicated key with a nice name like `daq-ci`, but not required. Note that on linux `-w 0` option is required for proper parsing/formatting, as there can't be any newlines in the copied string. + + +$ base64 -w 0 local/gcp_service_account.json +ewoICJ1eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiYm9zLWRhcS10ZXN0aW5nIiwKICAicHJpd +… +iOiAiaHR0cHM6Ly93LWRhcS10ZXN0aW5nLmlhbS5nc2VydmljZWFjY291bnQuY29tIgp9Cg== + + +### Github Actions CI Testing + +* Run the [registrar tool](https://github.com/faucetsdn/udmi/blob/master/docs/registrar.md) to properly configure the cloud project. +* `gcp_topic` config to `local/system.conf` as described in this doc. +* Configure test subsystem with proper cloud endpoint in `{test_site}/cloud_iot_config.json`. +* Configure the DUT with the proper cloud device credentials (device specific). For _faux_ devices, this means copying +the associated `rsa_private.pkcs8` file to something like `inst/faux/daq-faux-2/local/` (exact path depends on which faux). +* Test with `udmi/bin/registrar`, `udmi/pubber/bin/run`, and `udmi/bin/validator` manually, before integrated testing through DAQ. + +### Is my Github Actions set up correctly? + +If Github Actions is set up correctly, you should see logs similar to the one below: +``` +Running test script testing/test_aux.sh +Writing test results to inst/test_aux.out and inst/test_aux.gcp +Decoding GCP_BASE64_CRED to inst/config/gcp_service_account.json +base64 wc: 1 1 3097 +GCP service account is "daq-ci@daq-testing.iam.gserviceaccount.com" +``` + +If the `3097` character count is wildly off, then likely something went wrong with the newlines. diff --git a/docs/developing.md b/docs/developing.md index 77848125ef..e4c025a9e5 100644 --- a/docs/developing.md +++ b/docs/developing.md @@ -28,16 +28,9 @@ faster than continually working with physical components (unless actively debugg problems that only manifest themselves physically). If there is a problem in the 'real' world, then the first step is typically to try and reproduce it virtually. -## Travis CI +## Github Actions CI -Travis is used as the primary CI testing point for DAQ. The -[facuetsdn/daq dashboard](https://travis-ci.com/faucetsdn/daq/branches) shows the -status of the current master branch. It is generally recommended to set up -Travis on your personal repos to test any branches you push/commit. PRs will -automatically be tested under the destination repo. - -Travis runs a suite of tests defined in the `.travis.yml` file. Each `DAQ_TEST` -entry triggers a separate run through the `bin/test_daq` script. E.g. `DAQ_TEST=many` +The `.github/workflows` folder contains information for the tests themselves. There are 2 workflows currently in place -- one for main DAQ integration tests and unit tests, and the other for USI related tests. Each workflow file is further broken down into jobs. In the case of tests.yml, there are the `integration_tests` and `unit_tests` jobs. Primarily listed under the `matrix:` subsection shows all the various tested configurations for the `integration_tests`. Each matrix entry triggers a separate run through the `bin/test_daq` script. E.g. `DAQ_TEST=many` ultimately runs `testing/test_many.sh`. The test output results are compared against the golden `.out` file (e.g. `testing/test_many.out`) and the tests pass if there is no difference. (Look in `bin/test_daq` to see exactly what it's doing.) @@ -45,25 +38,26 @@ is no difference. (Look in `bin/test_daq` to see exactly what it's doing.) If there are unexplained differences in the `.out` file, then the test output log itself should be checked to see what actually went wrong, since there's likely not enough information in the `.out` files to diagnose effectively. The complete -log output is avaliable from a Travis run (or locally when you run locally), and -the triggering line from the `.out` difference should be there as well (search for it!). +log output is avaliable from a [Github actions](https://github.com/faucetsdn/daq/actions) run (or locally when you run locally), and the triggering line from the `.out` difference should be there as well (search for it!). + +Note all integration tests assume a fully installed environment (as setup with `bin/setup_daq`). ## Local Integration Tests -Tests can be run locally with something like `sudo testing/test_aux.sh`, and the output -will be generated into, e.g., `out/test_aux.out`, that can be compared against the -corresponding golden `.out` file, e.g., `testing/test_aux.out`. Running tests locally is -not always 100% exactly the same as running things in a real (against physical devices +Individual integration tests can be run locally by +appending `bin/test_daq` to a `sudo` line of shell environment settings, e.g. as taken from one matrix entry: +
+~/daq$ sudo DAQ_TEST=base bin/test_daq
+…
+or directly with:
+~/daq$ sudo testing/test_base.sh
+…
+
+ +Running tests locally is not always 100% exactly the same as running things in a real (against physical devices on a physical switch) or CI environment, but in most cases it provides a workable method. -It is recommended to start from a clear DAQ configuration by running `rm -rf local` -from the main DAQ folder before running the local integration tests. - -When developing a new test, the output should appear in the corresponding `.out` file, -which should be updated appropriatley. The easiest way to migrate in new changes is to -just copy the `out/` file to `testing/`, but care must be taken that only expected -changes are included with a new PR. Ultimately the Travis CI tests must pass, not the -local tests, to guard against any local filesystem changes. +When developing a new test, the output should appear in the corresponding `.out` file, which should be updated appropriatley. The easiest way to migrate in new changes is to just copy the `out/` file to `testing/`, but care must be taken that only expected changes are included with a new PR. Ultimately the [Github actions](https://github.com/faucetsdn/daq/actions) tests must pass, not the local tests, to guard against any local filesystem changes. ## Aux Golden Device Report @@ -85,7 +79,9 @@ as the new golden file (i.e., copy it from `out/report_9a02571e8f01_???.md` to ## Lint Checks -Lint checks are performed as part of the `testing/test_aux.sh` script. They are extra -tricky because they are typically very sensitive to the exact version of every package -installed, so they're somewhat unreliable except when run through a pristine environment -on Travis. +To make sure changes to DAQ adheres to the existing code checkstyle, a pre commit hook can be setup to run [bin/check_style](https://github.com/faucetsdn/daq/blob/master/bin/check_style) before a commit. To enable this, simply run the following line under your daq root directory. +
+~/daq$ echo "bin/check_style" > .git/hooks/pre-commit && chmod +x .git/hooks/pre-commit
+
+ +Lint checks are performed as part of the unit_test job on [Github actions](https://github.com/faucetsdn/daq/actions) as well as on [stickler-ci](https://stickler-ci.com/repositories/51649-faucetsdn-daq) when for every PR. diff --git a/docs/device_report.md b/docs/device_report.md index 2ab3ab3320..83c4ff8fa4 100644 --- a/docs/device_report.md +++ b/docs/device_report.md @@ -48,15 +48,15 @@ Overall device result FAIL |Category|Result| |---|---| -|Security|PASS| +|Security|1/2| |Other|1/2| |Connectivity|n/a| -|Expectation|pass|fail|skip|gone| -|---|---|---|---|---| -|Required|1|0|0|0| -|Recommended|2|0|0|0| -|Other|1|2|22|2| +|Expectation|pass|fail|skip|info|gone| +|---|---|---|---|---|---| +|Required|1|0|0|0|0| +|Recommended|1|0|0|0|1| +|Other|6|2|21|1|2| |Result|Test|Category|Expectation|Notes| |---|---|---|---|---| @@ -66,22 +66,27 @@ Overall device result FAIL |skip|cloud.udmi.pointset|Other|Other|No device id| |skip|cloud.udmi.state|Other|Other|No device id| |skip|cloud.udmi.system|Other|Other|No device id| +|info|communication.type.broadcast|Other|Other|Broadcast packets received. Unicast packets received.| +|skip|connection.dns.hostname_connect|Other|Other|Device did not send any DNS requests| |fail|connection.mac_oui|Other|Other|Manufacturer prefix not found!| +|pass|connection.min_send|Other|Other|ARP packets received. Data packets were sent at a frequency of less than 5 minutes| +|pass|connection.network.ntp_support|Other|Other|Using NTPv4.| +|pass|connection.network.ntp_update|Other|Other|Device clock synchronized.| |skip|connection.port_duplex|Other|Other|No local IP has been set, check system config| |skip|connection.port_link|Other|Other|No local IP has been set, check system config| |skip|connection.port_speed|Other|Other|No local IP has been set, check system config| -|pass|manual.test.travis|Security|Recommended|Manual test - for testing| -|skip|poe.negotiation|Other|Other|No local IP has been set, check system config| -|skip|poe.power|Other|Other|No local IP has been set, check system config| -|skip|poe.support|Other|Other|No local IP has been set, check system config| +|pass|manual.test.name|Security|Recommended|Manual test - for testing| +|skip|poe.switch.power|Other|Other|No local IP has been set, check system config| |fail|protocol.bacnet.pic|Other|Other|PICS file defined however a BACnet device was not found.| |skip|protocol.bacnet.version|Other|Other|Bacnet device not found.| |skip|security.firmware|Other|Other|Could not retrieve a firmware version with nmap. Check bacnet port.| -|skip|security.passwords.http|Other|Other|Port 80 is not open on target device.| -|skip|security.passwords.https|Other|Other|Port 443 is not open on target device.| -|skip|security.passwords.ssh|Other|Other|Port 22 is not open on target device.| -|skip|security.passwords.telnet|Other|Other|Port 23 is not open on target device.| -|pass|security.ports.nmap|Security|Recommended|Only allowed ports found open.| +|pass|security.nmap.http|Other|Other|No running http servers have been found.| +|pass|security.nmap.ports|Other|Other|Only allowed ports found open.| +|skip|security.passwords.http|Other|Other|Port 80 not open on target device.| +|skip|security.passwords.https|Other|Other|Port 443 not open on target device.| +|skip|security.passwords.ssh|Other|Other|Port 22 not open on target device.| +|skip|security.passwords.telnet|Other|Other|Port 23 not open on target device.| +|gone|security.ports.nmap|Security|Recommended|| |skip|security.tls.v1|Other|Other|IOException unable to connect to server| |skip|security.tls.v1.x509|Other|Other|IOException unable to connect to server| |skip|security.tls.v1_2|Other|Other|IOException unable to connect to server| @@ -92,15 +97,6 @@ Overall device result FAIL |gone|unknown.fake.monkey|Other|Other|| -## Module ipaddr - - -#### Module Config - -|Attribute|Value| -|---|---| -|timeout_sec|300| - ## Module pass @@ -159,18 +155,32 @@ RESULT pass base.target.ping target reached ``` -------------------- -security.ports.nmap +security.nmap.ports -------------------- Automatic TCP/UDP port scan using nmap -------------------- # Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -sT -sU --host-timeout=4m --open -pU:47808,T:23,443,80, -oG /tmp/nmap.log X.X.X.X # Ports scanned: TCP(3;23,80,443) UDP(1;47808) SCTP(0;) PROTOCOLS(0;) Host: X.X.X.X () Status: Up -Host: X.X.X.X () Ports: 47808/closed/udp//bacnet/// Ignored State: closed (3) +Host: X.X.X.X () Ports: 47808/closed/udp//bacnet/// # Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX No invalid ports found. -------------------- -RESULT pass security.ports.nmap Only allowed ports found open. +RESULT pass security.nmap.ports Only allowed ports found open. + +-------------------- +security.nmap.http +-------------------- +Check that the device does not have open ports exposing an unencrypted web interface using HTTP +-------------------- +# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -A --script http-methods --host-timeout=4m --open -p- -oG /tmp/http.log X.X.X.X +# Ports scanned: TCP(65535;1-65535) UDP(0;) SCTP(0;) PROTOCOLS(0;) +Host: X.X.X.X () Status: Up +Host: X.X.X.X () Ports: 10000/open/tcp//snet-sensor-mgmt?/// +# Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX +No running http servers have been found. +-------------------- +RESULT pass security.nmap.http No running http servers have been found. ``` @@ -178,6 +188,7 @@ RESULT pass security.ports.nmap Only allowed ports found open. |Attribute|Value| |---|---| +|timeout_sec|600| |enabled|True| ## Module discover @@ -239,31 +250,13 @@ LOCAL_IP not configured, assuming no network switch. RESULT skip connection.port_duplex No local IP has been set, check system config -------------------- -poe.power +poe.switch.power -------------------- Verify that the device draws less than the maximum power allocated by the port. This is 15.4W for 802.3af and 30W for 802.3at -------------------- LOCAL_IP not configured, assuming no network switch. -------------------- -RESULT skip poe.power No local IP has been set, check system config - --------------------- -poe.negotiation --------------------- -Verify the device autonegotiates power requirements --------------------- -LOCAL_IP not configured, assuming no network switch. --------------------- -RESULT skip poe.negotiation No local IP has been set, check system config - --------------------- -poe.support --------------------- -Verify if the device supports PoE --------------------- -LOCAL_IP not configured, assuming no network switch. --------------------- -RESULT skip poe.support No local IP has been set, check system config +RESULT skip poe.switch.power No local IP has been set, check system config ``` @@ -274,30 +267,6 @@ RESULT skip poe.support No local IP has been set, check system config |enabled|True| |poe|{'enabled': True}| -## Module macoui - - -#### Report - -``` --------------------- -connection.mac_oui --------------------- -Check Physical device address OUI against IEEE registration and verify it is registered with the correct manufacturer --------------------- -Using the host hardware address 9a:02:57:1e:8f:01 -Mac OUI Test --------------------- -RESULT fail connection.mac_oui Manufacturer prefix not found! - -``` - -#### Module Config - -|Attribute|Value| -|---|---| -|enabled|True| - ## Module bacext @@ -408,92 +377,80 @@ RESULT skip security.tls.v1_3.x509 IOException unable to connect to server ``` -------------------- -security.passwords.http +security.admin.password.http -------------------- -Verify all default passwords are updated and new Google provided passwords are set. +Verify all device manufacturer default passwords are changed for protocol: http, and new passwords are set. -------------------- -[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: http] -Starting NMAP check... Starting Nmap 7.60 ( https://nmap.org ) at XXX Nmap scan report for daq-faux-1 (X.X.X.X) Host is up (XXX). -PORT STATE SERVICE -10000/tcp open snet-sensor-mgmt +PORT STATE SERVICE +80/tcp closed http MAC Address: 9A:02:57:1E:8F:01 (Unknown) Nmap done: 1 IP address (1 host up) scanned in XXX -nmap X.X.X.X -Done. +Could not connect to specified port on host. -------------------- -RESULT skip security.passwords.http Port 80 is not open on target device. +RESULT skip security.passwords.http Port 80 not open on target device. -------------------- -security.passwords.https +security.admin.password.https -------------------- -Verify all default passwords are updated and new Google provided passwords are set. +Verify all device manufacturer default passwords are changed for protocol: https, and new passwords are set. -------------------- -[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: https] -Starting NMAP check... Starting Nmap 7.60 ( https://nmap.org ) at XXX Nmap scan report for daq-faux-1 (X.X.X.X) Host is up (XXX). -PORT STATE SERVICE -10000/tcp open snet-sensor-mgmt +PORT STATE SERVICE +443/tcp closed https MAC Address: 9A:02:57:1E:8F:01 (Unknown) Nmap done: 1 IP address (1 host up) scanned in XXX -nmap X.X.X.X -Done. +Could not connect to specified port on host. -------------------- -RESULT skip security.passwords.https Port 443 is not open on target device. +RESULT skip security.passwords.https Port 443 not open on target device. -------------------- -security.passwords.telnet +security.admin.password.ssh -------------------- -Verify all default passwords are updated and new Google provided passwords are set. +Verify all device manufacturer default passwords are changed for protocol: ssh, and new passwords are set. -------------------- -[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: telnet] -Starting NMAP check... Starting Nmap 7.60 ( https://nmap.org ) at XXX Nmap scan report for daq-faux-1 (X.X.X.X) Host is up (XXX). -PORT STATE SERVICE -10000/tcp open snet-sensor-mgmt +PORT STATE SERVICE +22/tcp closed ssh MAC Address: 9A:02:57:1E:8F:01 (Unknown) Nmap done: 1 IP address (1 host up) scanned in XXX -nmap X.X.X.X -Done. +Could not connect to specified port on host. -------------------- -RESULT skip security.passwords.telnet Port 23 is not open on target device. +RESULT skip security.passwords.ssh Port 22 not open on target device. -------------------- -security.passwords.ssh +security.admin.password.telnet -------------------- -Verify all default passwords are updated and new Google provided passwords are set. +Verify all device manufacturer default passwords are changed for protocol: telnet, and new passwords are set. -------------------- -[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: ssh] -Starting NMAP check... Starting Nmap 7.60 ( https://nmap.org ) at XXX Nmap scan report for daq-faux-1 (X.X.X.X) Host is up (XXX). -PORT STATE SERVICE -10000/tcp open snet-sensor-mgmt +PORT STATE SERVICE +23/tcp closed telnet MAC Address: 9A:02:57:1E:8F:01 (Unknown) Nmap done: 1 IP address (1 host up) scanned in XXX -nmap X.X.X.X -Done. +Could not connect to specified port on host. -------------------- -RESULT skip security.passwords.ssh Port 22 is not open on target device. +RESULT skip security.passwords.telnet Port 23 not open on target device. ``` @@ -501,6 +458,7 @@ RESULT skip security.passwords.ssh Port 22 is not open on target device. |Attribute|Value| |---|---| +|dictionary_dir|resources/faux| |enabled|True| ## Module udmi @@ -551,13 +509,13 @@ RESULT skip cloud.udmi.system No device id ``` -------------------- -manual.test.travis +manual.test.name -------------------- -------------------- No additional information provided -------------------- -RESULT pass manual.test.travis Manual test - for testing +RESULT pass manual.test.name Manual test - for testing ``` @@ -567,5 +525,72 @@ RESULT pass manual.test.travis Manual test - for testing |---|---| |enabled|True| +## Module network + + +#### Report + +``` +-------------------- +connection.min_send +-------------------- +Device sends data at a frequency of less than 5 minutes. +-------------------- + + + + + + + + + + + +RESULT pass connection.min_send ARP packets received. Data packets were sent at a frequency of less than 5 minutes +-------------------- +communication.type.broadcast +-------------------- +Device sends unicast or broadcast packets. +-------------------- + + +RESULT info communication.type.broadcast Broadcast packets received. Unicast packets received. +-------------------- +connection.network.ntp_support +-------------------- +Device supports NTP version 4. +-------------------- +RESULT pass connection.network.ntp_support Using NTPv4. +-------------------- +connection.network.ntp_update +-------------------- +Device synchronizes its time to the NTP server. +-------------------- +RESULT pass connection.network.ntp_update Device clock synchronized. +-------------------- +connection.mac_oui +-------------------- +Check Physical device address OUI against IEEE registration and verify it is registered with the correct manufacturer +-------------------- +Using the host hardware address 9a:02:57:1e:8f:01 +Mac OUI Test +-------------------- +RESULT fail connection.mac_oui Manufacturer prefix not found! + +-------------------- +connection.dns.hostname_connect +-------------------- +Check device uses the DNS server from DHCP and resolves hostnames +-------------------- +RESULT skip connection.dns.hostname_connect Device did not send any DNS requests +``` + +#### Module Config + +|Attribute|Value| +|---|---| +|enabled|True| + ## Report complete diff --git a/docs/integration_testing.md b/docs/integration_testing.md deleted file mode 100644 index 1c6435a9b0..0000000000 --- a/docs/integration_testing.md +++ /dev/null @@ -1,83 +0,0 @@ -# Integration Testing - -DAQ currently uses Travis CI for integration testing: https://travis-ci.org/ - -## Configuration - -The `test_udmi` test module uses the Registrar and Validator to check that a device is -properly communicating through Cloud IoT, automated through DAQ. - -### GCP Credential - -To run cloud-based tests, setup the Travis `GCP_BASE64_CRED` env variable with a `base64` encoded -service account key for your project. It's recommended to use a dedicated key with a nice name -like `daq-travis`, but not required. Encode the key value as per below, and cut/paste the -resulting string into a -[Travis environment variable](https://docs.travis-ci.com/user/environment-variables/#defining-variables-in-repository-settings) -for a `GCP_BASE64_CRED` varaible. Note the `-w 0` option is required for proper parsing/formatting, -as there can't be any newlines in the copied string. - - -$ base64 -w 0 local/gcp_service_account.json -ewoICJ1eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiYm9zLWRhcS10ZXN0aW5nIiwKICAicHJpd -… -iOiAiaHR0cHM6Ly93LWRhcS10ZXN0aW5nLmlhbS5nc2VydmljZWFjY291bnQuY29tIgp9Cg== - - -## Travis CI Testing - -* Run the [registrar tool](registrar.md) to properly configure the cloud project. -* `gcp_topic` config to `local/system.conf` as described in this doc. -* Configure test subsystem with proper cloud endpoint in `{test_site}/cloud_iot_config.json`. -* Configure the DUT with the proper cloud device credentials (device specific). For _faux_ devices, this means copying -the assocatied `rsa_private.pkcs8` file to someting like `inst/faux/daq-faux-2/local/` (exact path depends on which faux). -* Test with `bin/registrar`, `pubber/bin/run`, and `bin/validate` manually, before integrated testing through DAQ. - -### Is my Travis set up correctly? - -If Travis is set up correctly, you should see messages at the beginning of the log file: -``` -Setting environment variables from repository settings -$ export DOCKER_USERNAME=[secure] -$ export DOCKER_PASSWORD=[secure] -$ export GCP_BASE64_CRED=[secure] -``` - -Further down there would be more details about the cred itself: -``` -Running test script testing/test_aux.sh -Writing test results to inst/test_aux.out and inst/test_aux.gcp -Decoding GCP_BASE64_CRED to inst/config/gcp_service_account.json -base64 wc: 1 1 3097 -GCP service account is "daq-travis@daq-testing.iam.gserviceaccount.com" -``` - -If the `3097` character count is wildly off, then likely something went wrong with the newlines. - -### Travis Build For "External" Pull Requests - -Travis will not use encrypted environment variables when testing against pull requests -from foreign github repositories, even if you've forked from another repository that you -have full control of via Github. Travis authorization != Github authorization, even if -you sign into Travis using Github! This is as it should be b/c security. see the following -for more info: - -- https://docs.travis-ci.com/user/environment-variables/#defining-variables-in-repository-settings -- https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions - -If your test is failing from a PR, you'll see something like in a similar log location: - -``` -Encrypted environment variables have been removed for security reasons. -See https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions -Setting environment variables from .travis.yml -$ export DOCKER_STARTUP_TIMEOUT_MS=60000 -$ export DAQ_TEST=aux -``` - -### Other Travis Caveats - -Take note the URL in your browser's address bar when running Travis. You might be on either -travis-ci.com or travis-ci.org. Any particular setup -may end up across both sites for undertermined reasons. Please consult with your browser's -exact URL for more clarity. diff --git a/docs/module_test.md b/docs/module_test.md index d337e45749..ce600c8885 100644 --- a/docs/module_test.md +++ b/docs/module_test.md @@ -80,7 +80,7 @@ RESULT fail security.x509 ## Continous Testing Continuous testing of module-specific builds is handled through the `testing/test_modules.sh` -script (as invoked by Travis). Execution results are compared against the +script (as invoked by [Github actions](https://github.com/faucetsdn/daq/actions)). Execution results are compared against the `testing/test_modules.out` file. To add a new test, add a few lines to the top of the test script and expected results to the output file. Every test module is required to be continously tested somewhere, either as part of `test_modules.sh` or elsewhere. diff --git a/docs/orchestration.md b/docs/orchestration.md index 4804ec356f..703c5235a0 100644 --- a/docs/orchestration.md +++ b/docs/orchestration.md @@ -11,7 +11,7 @@ to change. ## Data Rouces -The overal orchestration capability relies on several simple data sources: +The overall orchestration capability relies on several simple data sources: 1. [Overall network topology](topologies.md), which indicates how the network hardware is configured. 2. [Device MUD files](../mud_files), which provide an [IETF Standard MUD descriptor](https://datatracker.ietf.org/doc/draft-ietf-opsawg-mud/) that describes diff --git a/docs/pubber.md b/docs/pubber.md deleted file mode 100644 index 588396d1ac..0000000000 --- a/docs/pubber.md +++ /dev/null @@ -1,81 +0,0 @@ -# Pubber Reference Client - -The _Pubber_ reference client is a sample implementation of a client-side 'device' that implements -the [UDMI Schema](../schemas/udmi/README.md). It's not intended to be any sort of production-worthy -code or library, rather just a proof-of-concept of what needs to happen. - -## Build Pubber - -
-~/daq$ pubber/bin/build
-Running in /home/peringknife/daq/pubber
-
-> Task :compileJava
-…
-
-BUILD SUCCESSFUL in 2s
-2 actionable tasks: 1 executed, 1 up-to-date
-
- -## Key Generation - -
-~/daq$ pubber/bin/keygen
-Generating a 2048 bit RSA private key
-............+++
-......................................+++
-writing new private key to 'local/rsa_private.pem'
------
-~/daq$ ls -l local/rsa_*
--rw-r--r-- 1 user primarygroup 1094 Nov 19 18:56 local/rsa_cert.pem
--rw------- 1 user primarygroup 1704 Nov 19 18:56 local/rsa_private.pem
--rw-r--r-- 1 user primarygroup 1216 Nov 19 18:56 local/rsa_private.pkcs8
-
- -After generating the key pair, you'll have to upload/associate the `pubber_cert.pem` public certificate -with the device entry in the cloud console as an _RS256_cert_. (This can be done when the device is -created, or anytime after.) - -## Configuration - -The `local/pubber.json` file configures the key cloud parameters needed for operation -(the actual values in the file shold match your GCP setup): -
-~/daq$ cat local/pubber.json
-{
-  "projectId": "gcp-account",
-  "cloudRegion": "us-central1",
-  "registryId": "sensor_hub",
-  "deviceId": "AHU-1"
-}
-
- -## Operation - -
-~/daq$ pubber/bin/run
-[main] INFO daq.pubber.Pubber - Reading configuration from /home/user/daq/local/pubber.json
-[main] INFO daq.pubber.Pubber - Starting instance for registry sensor_hub
-[main] INFO daq.pubber.MqttPublisher - Creating new publisher-client for GAT-001
-[main] INFO daq.pubber.MqttPublisher - Attempting connection to sensor_hub:GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Received new config daq.udmi.Message$Config@209307c7
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Starting executor with send message delay 2000
-[main] INFO daq.pubber.Pubber - synchronized start config result true
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-…
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-
- - -## Cloud Setup - -To use Pubber, there needs to be a cloud-side device entry configured in a GCP project configured to -use [Cloud IoT](https://cloud.google.com/iot/docs/). The -[Creating or Editing a Device](https://cloud.google.com/iot/docs/how-tos/devices#creating_or_editing_a_device) -section of the documentation describe how to create a simple device and key-pair (see next section for -a helper script). You can/should substitute the relevant values in the configuration below for your -specific setup. The relevant bits of configuration are the information in the local/pubber.json -file (see above), and the generated public key (also see above). - -Alternatively, you can use the [registrar tool](registrar.md) to automate device registration. diff --git a/docs/pubsub.md b/docs/pubsub.md deleted file mode 100644 index 8333453577..0000000000 --- a/docs/pubsub.md +++ /dev/null @@ -1,127 +0,0 @@ -# PubSub Setup Documentation - -This document describes the [GCP PubSub in Cloud IoT](https://cloud.google.com/iot-core/) mechanism for -processing device messages. There are three major message types employed by the system: -* Config: Messages sent from cloud-to-device that _configure_ the device (idempotent). -* State: Messags sent from device-to-cloud reporting _state_ form the device (idempotent). -* Events: Messages sent from device-to-cloud for streaming _events_ (non-idempotent). - -The exact semantic meaning of theses is determined by the underlying schema used. E.g., the -[UDMI Schema](../schemas/udmi/README.md) specifies one set of conventions for managing IoT devices. - -## Validator Configuration - -Streaming validation validates a stream of messages pulled from a GCP PubSub topic. There are three values -in the `local/system.conf` file required to make it work: -* `gcp_cred`: The service account credentials, as per the general [DAQ Firebase setup](firebase.md). -* `gcp_topic`: The _PubSub_ (not MQTT) topic name. -* `schema_path`: Indicates which schema to validate against. - -You will need to add full Project Editor permissions for the service account. -E.g., to validate messages against the UDMI schema on the `projects/gcp-account/topics/target` topic, -there should be something like: - -
-~/daq$ fgrep gcp_ local/system.conf
-gcp_cred=local/gcp-account-de56aa4b1e47.json
-gcp_topic=target
-schema_path=schemas/udmi
-
- -## Message/Schema Mapping - -When using the -[GCP Cloud IoT Core MQTT Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events) -there are multiple ways the subschema used during validation is chosen. -* An `events` message is validated against the sub-schema indicated by the MQTT topic `subFolder`. E.g., the MQTT -topic `/devices/{device-id}/events/pointset` will be validated against `.../pointset.json`. -* [Device state messages](https://cloud.google.com/iot/docs/how-tos/config/getting-state#reporting_device_state) -are validated against the `.../state.json` schema. -* All messages have their attributes validated against the `.../attributes.json` schema. These attributes are -automatically defined by the MQTT Client ID and Topic, so are not explicitly included in any message payload. -* The `config` messages are artifically injected into the `target` PubSub topic by the configuration script -(below) so they can be easily checked by the validation engine. - -The simple `state_shunt` function in `daq/functions/state_shunt` will automatically send state update messages -to the `target` PubSub topic. Install this function to enable validation of state updates. (Also make sure to -configure the Cloud IoT project to send state message to the state topic!) - -## Pubber Reference Client - -The [Pubber Reference Client](pubber.md) is a complete reference client that can be used to test out streaming -validation in absence of a real known-working device. The basic setup and documentation listed on the Pubber -page are assumed to be "running in the background" for the other examples in this section. - -## Streaming Validation - -Running the `bin/validate` script will will parse the configuration file and automatically start -verifying PubSub messages against the indicated schema. Using the `pubber` client, the output -should look something like: -
-~/daq$ bin/validate
-Loading config from local/system.conf
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Using credentials from /home/user/daq/local/gcp-account-de56aa4b1e47.json
-Executing validator /home/user/daq/schemas/udmi pubsub:target...
-Running schema . in /home/user/daq/schemas/udmi
-Ignoring subfolders []
-Results will be uploaded to https://console.cloud.google.com/firestore/data/registries/?project=gcp-account
-Also found in such directories as /home/user/daq/schemas/udmi/out
-Connecting to pubsub topic target
-Entering pubsub message loop on projects/gcp-account/subscriptions/daq-validator
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-…
-
- -If there are no _state_ validation messages (but there are _pointset_ ones), then the `state_shunt` -function described above is not installed properly. - -## Injecting Configuration - -The `validator/bin/config` script can be used to inject a configuration message to a device: -
-~/daq$ validator/bin/config GAT-001 schemas/udmi/config.tests/gateway.json
-Configuring gcp-account:us-central1:sensor_hub:GAT-001 from schemas/udmi/config.tests/gateway.json
-messageIds:
-- '301010492284043'
-Updated configuration for device [GAT-001].
-
- -If using the `pubber` client, there should be a corresponding flury of activity: -
-…
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Received new config daq.udmi.Message$Config@3666b3a5
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Starting executor with send message delay 2000
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-…
-
- -And an associated bit of activity in the validation output: -
-…
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/config_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-…
-
diff --git a/docs/registrar.md b/docs/registrar.md deleted file mode 100644 index f63dfc43c4..0000000000 --- a/docs/registrar.md +++ /dev/null @@ -1,182 +0,0 @@ -# Registrar Overview - -The `registrar` is a utility program that registers and updates devies in Cloud IoT. -Running `bin/registrar` will pull the necessary configuraiton values from `local/system.conf`, -build the executable, and register/update devices. - -## Configuration - -The `local/system.conf` file should have the following parameters (in `x=y` syntax): -* `gcp_cred`: Defines the target project and [service account](service.md) to use for configuration. -* `site_path`: [Site-specific configuration](site_path.md) for the devices that need to be registered. -* `schema_path`: Path to metadata schema (see the [DAQ PubSub documentation](pubsub.md) for more details/examples). - -The target `gcp_cred` service account will need the _Cloud IoT Provisioner_ and _Pub/Sub Publisher_ roles. -There also needs to be an existing `registrar` topic (or as configured in `cloud_iot_config.json`, below). - -## Theory Of Operation - -* The target set of _expected_ devices is determined from directory entries in -_{site_path}_/devices/. -* Existing devices that are not listed in the site config are blocked (as per -Cloud IoT device setting). -* If a device directory does not have an appropriate key, one will be automaticaly generated. -* Devices not found in the target registry are automatically created. -* Existing device registy entries are unblocked and updated with the appropriate keys. - -## Device Settings - -When registering or updating a device, the Registrar manipulates a few key pieces of device -information: -* Auth keys: Public authentiation keys for the device. -* Metadata: Various information about a device (e.g. site-code, location in the building). - -This information is sourced from a few key files: - -* `{site_dir}/cloud_iot_config.json`: -Cloud project configuration parameters (`registry_id`, `cloud_region`, etc...). -* `{site_dir}/devices/{device_id}/metadata.json`: -Device metadata (e.g. location, key type). -* `{site_dir}/devices/{device_id}/rsa_private.pem`: -Generated private key for device (used on-device). - -## Sample Output - -The produced `registration_summary.json` document provides an overview of the analyzed files, -clearly any errors that should be addressed for full spec compliance. Additionaly, an -`errors.json` - -
-user@machine:~/daq$ cat local/site/cloud_iot_config.json 
-{
-  "cloud_region": "us-central1",
-  "site_name": "SG-MBC2-B80",
-  "registry_id": "iotRegistry",
-  "registrar_topic": "registrar"
-}
-user@machine:~/daq$ bin/registrar daq-testing
-Activating venv
-Flattening config from local/system.yaml into inst/config/system.conf
-Note: Some input files use or override a deprecated API.
-Note: Recompile with -Xlint:deprecation for details.
-Running tools version 1.5.1-16-g9ed5861
-Using cloud project bos-daq-testing
-Using site config dir local/site
-Using schema root dir schemas/udmi
-Using device filter
-Reading Cloud IoT config from /home/user/daq/local/site/cloud_iot_config.json
-Initializing with default credentials...
-Jun 12, 2020 1:24:37 PM com.google.auth.oauth2.DefaultCredentialsProvider warnAboutProblematicCredentials
-WARNING: Your application has authenticated using end user credentials from Google Cloud SDK. We recommend that most server applications use service accounts instead. If your application continues to use end user credentials from Cloud SDK, you might receive a "quota exceeded" or "API not enabled" error. For more information about service accounts, see https://cloud.google.com/docs/authentication/.
-Created service for project bos-daq-testing
-Working with project bos-daq-testing registry iotRegistry
-Loading local device AHU-1-1
-Loading local device AHU-1-2
-Fetching remote registry iotRegistry
-Updated device entry AHU-1-1
-Sending metadata message for AHU-1-1
-WARNING: An illegal reflective access operation has occurred
-WARNING: Illegal reflective access by com.google.protobuf.UnsafeUtil (file:/home/user/daq/validator/build/libs/validator-1.0-SNAPSHOT-all.jar) to field java.nio.Buffer.address
-WARNING: Please consider reporting this to the maintainers of com.google.protobuf.UnsafeUtil
-WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
-WARNING: All illegal access operations will be denied in a future release
-Updated device entry AHU-1-2
-Sending metadata message for AHU-1-2
-Processed 2 devices
-Updating local/site/devices/AHU-1-1/errors.json
-Updating local/site/devices/AHU-1-2/errors.json
-
-Summary:
-  Device Envelope: 2
-  Device Key: 1
-  Device Validating: 2
-Out of 2 total.
-Done with PubSubPusher
-Registrar complete, exit 0
-user@machine:~/daq$ cat local/site/registration_summary.json 
-{
-  "Envelope" : {
-    "AHU-1-1" : "java.lang.IllegalStateException: Validating envelope AHU-1-1",
-    "AHU-1-2" : "java.lang.IllegalStateException: Validating envelope AHU-1-2"
-  },
-  "Key" : {
-    "AHU-1-2" : "java.lang.RuntimeException: Duplicate credentials found for AHU-1-1 & AHU-1-2"
-  },
-  "Validating" : {
-    "AHU-1-1" : "org.everit.json.schema.ValidationException: #: 43 schema violations found",
-    "AHU-1-2" : "org.everit.json.schema.ValidationException: #: 43 schema violations found"
-  }
-}
-user@machine:~/daq$ head local/site/devices/AHU-1-1/errors.json 
-Exceptions for AHU-1-1
-  Validating envelope AHU-1-1
-    #/deviceId: string [AHU-1-1] does not match pattern ^[A-Z]{2,6}-[1-9][0-9]{0,2}$
-  #: 43 schema violations found
-    #/pointset/points: 40 schema violations found
-      #/pointset/points/chilled_return_water_temperature_sensor/units: °C is not a valid enum value
-      #/pointset/points/chilled_supply_water_temperature_sensor/units: °C is not a valid enum value
-      #/pointset/points/chilled_water_valve_percentage_command/units: % is not a valid enum value
-
- -## Sequence Diagram - -Expected workflow to configure a registry using Registrar: - -* `Device`: Target IoT Device -* `Local`: Local clone of site configuration repo -* `Registrar`: This utility program -* `Registry`: Target Cloud IoT Core registry -* `Repo`: Remote site configuration repo - -All operations are manaul except those involving the `Registrar` tool. - -
-+---------+                +-------+                 +-----------+                 +-----------+ +-------+
-| Device  |                | Local |                 | Registrar |                 | Registry  | | Repo  |
-+---------+                +-------+                 +-----------+                 +-----------+ +-------+
-     |                         |                           |                             |           |
-     |                         |                           |                       Pull repo locally |
-     |                         |<--------------------------------------------------------------------|
-     |                         |    ---------------------\ |                             |           |
-     |                         |    | Run Registrar tool |-|                             |           |
-     |                         |    |--------------------| |                             |           |
-     |                         |                           |                             |           |
-     |                         | Read device configs       |                             |           |
-     |                         |-------------------------->|                             |           |
-     |                         |                           |                             |           |
-     |                         |                           |            Read device list |           |
-     |                         |                           |<----------------------------|           |
-     |                         |                           |                             |           |
-     |                         |           Write auth keys |                             |           |
-     |                         |<--------------------------|                             |           |
-     |                         |                           |                             |           |
-     |                         |                           | Update device entries       |           |
-     |                         |                           |---------------------------->|           |
-     |                         |   ----------------------\ |                             |           |
-     |                         |   | Registrar tool done |-|                             |           |
-     |                         |   |---------------------| |                             |           |
-     |                         |                           |                             |           |
-     |     Install private key |                           |                             |           |
-     |<------------------------|                           |                             |           |
-     |                         |                           |                             |           |
-     |                         | Push changes              |                             |           |
-     |                         |-------------------------------------------------------------------->|
-     |                         |                           |                             |           |
-
- -### Source - -Use with [ASCII Sequence Diagram Creator](https://textart.io/sequence#) - -
-object Device Local Registrar Registry Repo
-Repo -> Local: Pull repo locally
-note left of Registrar: Run Registrar tool
-Local -> Registrar: Read device configs
-Registry -> Registrar: Read device list
-Registrar -> Local: Write auth keys
-Registrar -> Registry: Update device entries
-note left of Registrar: Registrar tool done
-Local -> Device: Install private key
-Local -> Repo: Push changes
-
diff --git a/docs/service.md b/docs/service.md index 02bf501c79..4bace1de28 100644 --- a/docs/service.md +++ b/docs/service.md @@ -4,7 +4,7 @@ Many functions of DAQ require a standard GCP service account, rather than person Once created, there's a limited set of permissions that can be granted to enable various bits and pieces of functionality. -Each individual install of DAQ should have it's own service account. The accound name is +Each individual install of DAQ should have it's own service account. The account name is assumed to be unique, and having multiple installs with the same account will cause confusion and unpredictable results. diff --git a/docs/soak_report.md b/docs/soak_report.md index 7e99b37e7b..748bfeb93c 100644 --- a/docs/soak_report.md +++ b/docs/soak_report.md @@ -9,11 +9,12 @@ Source: local |base.startup.dhcp|2|0| |base.switch.ping|0|2| |base.target.ping|2|0| -|security.ports.nmap|2|0| +|security.nmap.ports|2|0| +|security.nmap.http|2|0| |categories|pass|skip| |---|---|---| -|Other|6|2| +|Other|8|2| |missing tests|count| |---|---| diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md index 4a8fa1ba09..05aca2c9d6 100644 --- a/docs/troubleshooting.md +++ b/docs/troubleshooting.md @@ -9,7 +9,7 @@ mailing list, and use it as the primary source of troubleshooting. email somebody directly, but will likely result in a slower response time. * The `inst/cmdrun.log` file contains a copy of the console output from DAQ. * This file should be attached to communications about resolving DAQ issues. - * It's not necessary to include any assocaited `local/system.yaml` file, since the + * It's not necessary to include any associated `local/system.yaml` file, since the contents of that are already included. * Make sure everything is running properly using the internal simulation setup before tackling anything to do with external switches or physical devices. @@ -29,12 +29,17 @@ a summary of all test results. * The determination of _PASS_ vs. _FAIL_ is one of policy, not a technical consideration. If the question is "Is it OK if this tests fails or not?" then you need to contact whomever is responsible for policy, not DAQ-proper. - * The reports are _optionally_ available trough the _optionally_ configured + * The reports are _optionally_ available through the _optionally_ configured GCP instance, but that's only relevant after the basics are working. -* Capturing a complete zip of the `inst/` directory should encompass all the -state neesary to diagnose/debug problems, so simply captuing that and sending -it along would be sufficient in most cases. Be wary of file size, as `inst/` -can collect cruft over time and occasionally need to be cleaned. +* Running `bin/techsupport.sh` will create a zipped techsupport file that + contains all configuration, packet captures and runtime logs of a run. + Sending that file is sufficient in most cases. Be wary of file + size, as `inst/` might have large pcap files or older files that can be + trimmed to get more manageable file sizes for email attachments. +* Unless you are developing for DAQ and want the latest code, ensure that you + are on the latest stable software version tracked by the git tag `release_stable`. +* If a test run blocks or errors out, try running `bin/troubleshoot` to detect + some common misconfiguration and setup related issues. ## Test-Specific @@ -82,4 +87,4 @@ directory. * Filter results for the device's MAC address with something like: tcpdump -en -r testing.pacp ether host de:vi:ce:ma:ca:dr. * There is no one-size-fits-all guidance here, because what is expected is - extremeley test-specific. + extremely test-specific. diff --git a/docs/validator.md b/docs/validator.md deleted file mode 100644 index 80ab8c4b24..0000000000 --- a/docs/validator.md +++ /dev/null @@ -1,152 +0,0 @@ -# Validator Setup - -The `validator` is a sub-component of DAQ that can be used to validate JSON files or stream against a schema -defined by the standard [JSON Schema](https://json-schema.org/) format. The validator does not itself specify -any policy, i.e. which schema to use when, rather just a mechanism to test and validate. - -The "schema set" is a configurable variable, and the system maps various events to different sub-schemas within -that set. Direct file-based validations run against an explicitly specified sub-schema, while the dynamic PubSub -validator dynamically chooses the sub-schema based off of message parameters. There's currently two schemas -available, defined in the `schemas/` subdirectory: -* `simple`, which is really just there to make sure the system works. -* [`UDMI`](../schemas/udmi/README.md), which is a building-oriented schema for data collection. - -## Validation Mechanisms - -There are several different ways to run the validator depending on the specific objective: -* Local File Validation -* Integration Testing -* PubSub Stream Validation - -### Local File Validation - -Local file validation runs the code against a set of local schemas and inputs. The example below shows -validating one schema file against one specific test input. -Specifying a directory, rather than a specific schema or input, will run against the entire set. -An output file is generated that has details about the schema validation result. - -
-~/daq$ validator/bin/validate schemas/simple/simple.json schemas/simple/simple.tests/example.json
-Executing validator schemas/simple/simple.json schemas/simple/simple.tests/example.json...
-Running schema simple.json in /home/user/daq/schemas/simple
-Validating example.json against simple.json
-Validation complete, exit 0
-~/daq$
-
- -### Integration Testing - -The `validator/bin/test` script runs a regression suite of all schemas against all tests. -This must pass before any PR can be approved. If there is any failure, a bunch of diagnostic -information will be included about what exactly went wrong. - -
-~/daq/validator$ bin/test
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Validating empty.json against config.json
-Validating errors.json against config.json
-
-Validating example.json against state.json
-Validating error.json against simple.json
-Validating example.json against simple.json
-
-Done with validation.
-
- -### PubSub Stream Validation - -Validating a live PubSub stream requires more setup, but ultimately most closely reflects what an -actual system would be doing during operation. The [DAQ PubSub Documentation](pubsub.md) details -how to set this up. It uses the same underlying schema files as the techniques above, but routes -it though a live stream in the cloud. - -Streaming validation validates a stream of messages pulled from a GCP PubSub topic. -There are three configuration values required in the `local/system.yaml` file to make it work: -* `gcp_cred`: The service account credentials, as per the general [DAQ Firebase setup](firebase.md). -* `gcp_topic`: The _PubSub_ (not MQTT) topic name. -* `schema_path`: Indicates which schema to validate against. - -You will need to add full Project Editor permissions for the service account. -E.g., to validate messages on the `projects/gcp-account/topics/telemetry` topic, -there should be something like: - -
-~/daq$ fgrep gcp_ local/system.conf
-gcp_cred=local/gcp-project-ce6716521378.json
-gcp_topic=telemetry
-schema_path=schemas/abacab/
-
- -Running `bin/validate` will parse the configuration file and automatically start -verifying PubSub messages against the indicated schema. -The execution output has a link to a location in the Firestore setup -where schema results will be stored, along with a local directory of results. - -
-~/daq$ bin/validate
-Using credentials from /home/user/daq/local/gcp-project-ce6716521378.json
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Executing validator /home/user/daq/validator/schemas/abacab/ pubsub:telemetry_topic...
-Running schema . in /home/user/daq/validator/schemas/abacab
-Ignoring subfolders []
-Results will be uploaded to https://console.cloud.google.com/firestore/data/registries/?project=gcp-project
-Also found in such directories as /home/user/daq/validator/schemas/abacab/out
-Connecting to pubsub topic telemetry
-Entering pubsub message loop on projects/gcp-project/subscriptions/daq-validator
-Success validating out/pointset_FCU_09_INT_NE_07.json
-Success validating out/pointset_FCU_07_EXT_SW_06.json
-Error validating out/logentry_TCE01_01_NE_Controls.json: DeviceId TCE01_01_NE_Controls must match pattern ^([a-z][_a-z0-9-]*[a-z0-9]|[A-Z][_A-Z0-9-]*[A-Z0-9])$
-Success validating out/logentry_FCU_01_NE_08.json
-Error validating out/pointset_TCE01_01_NE_Controls.json: DeviceId TCE01_01_NE_Controls must match pattern ^([a-z][_a-z0-9-]*[a-z0-9]|[A-Z][_A-Z0-9-]*[A-Z0-9])$
-Success validating out/logentry_FCU_01_SE_04.json
-
-
- -## Site Validation - -Following on from individual-device validation, it is possible to validate against an entire building model -This is a WIP provisional feature. But, roughly speaking, it looks like this: - -
-~/daq$ export GOOGLE_APPLICATION_CREDENTIALS=local/essential-monkey.json
-~/daq$ validator/bin/validate schemas/udmi pubsub:topic dev site_model/
-
- -* `schemas/udmi` is the schema to validate against. -* `pubsub:topic` points to the pub-sub topic stream to validate. -* `dev` is an arbitrary designator for running different clients against the same project. -* `site_model/` is a directory containing the requisite building model. - -Output from a site validation run will be in `validations/metadata_report.json`. - -### Types and Topics - -When using the -[GCP Cloud IoT Core MQTT Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events) -there are multiple ways the subschema used during validation is chosen. -* All messages have their attributes validated against the `.../attributes.json` schema. These attributes are -automatically defined server-side by the MQTT Client ID and Topic, and are not explicitly included in any message payload. -* A [device event message](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events) -is validated against the sub-schema indicated by the MQTT topic `subFolder`. E.g., the MQTT -topic `/devices/{device-id}/events/pointset` will be validated against `.../pointset.json`. -* [Device state messages](https://cloud.google.com/iot/docs/how-tos/config/getting-state#reporting_device_state) -are validated against the `.../state.json` schema on `/devices/{device-id}/state` MQTT topic. -* (There currently is no stream validation of -[device config messages](https://cloud.google.com/iot/docs/how-tos/config/configuring-devices#mqtt), which are sent on the -`/devices/{device-id}/config` topic.) - -See this handy-dandy table: - -| Type | Category | subFolder | MQTT Topic | Schema File | -|----------|----------|-----------|----------------------------------------|---------------| -| state | state | _n/a_ | `/devices/{device_id}/state` | state.json | -| config | config | _n/a_ | `/devices/{device-id}/config` | config.json | -| pointset | event | pointset | `/devices/{device-id}/events/pointset` | pointset.json | -| logentry | event | logentry | `/devices/{device-id}/events/logentry` | logentry.json | diff --git a/etc/MININET_VERSION b/etc/MININET_VERSION new file mode 100644 index 0000000000..2357edf889 --- /dev/null +++ b/etc/MININET_VERSION @@ -0,0 +1 @@ +2.3.0d6 diff --git a/etc/UDMI_VERSION b/etc/UDMI_VERSION new file mode 100644 index 0000000000..9084fa2f71 --- /dev/null +++ b/etc/UDMI_VERSION @@ -0,0 +1 @@ +1.1.0 diff --git a/etc/docker_images.txt b/etc/docker_images.txt index 193e733424..f8a184f0ab 100644 --- a/etc/docker_images.txt +++ b/etc/docker_images.txt @@ -1,23 +1,28 @@ -daqf/aardvark 34718b2f3fd5 -daqf/default 3ac95db36ee4 -daqf/faucet 45c13344a8ed -daqf/faux1 ecff07f12534 -daqf/faux2 39914ae11741 -daqf/gauge 1431053cf25e -daqf/networking af56b0732100 -daqf/switch 67954aca8dce -daqf/test_bacext 363b6d476ac8 -daqf/test_bacnet 073a0eb5529f -daqf/test_brute 700d986d5e83 -daqf/test_discover ad34b17b41e6 -daqf/test_fail c9a7e6b43bd0 -daqf/test_hold cb120980c658 -daqf/test_macoui a828288c855b -daqf/test_mudgee d4ed15ef1dfc -daqf/test_nmap 78aa5def41e5 -daqf/test_pass 74167ef0df55 -daqf/test_password 471bd1290918 -daqf/test_ping 5618e0243643 -daqf/test_switch 47585fc0876e -daqf/test_tls 9c5f28b74fed -daqf/test_udmi fc13d4c80b0d +daqf/aardvark 6fb0f6c52222 +daqf/default f8652a12fdd8 +daqf/faucet 1ec12d632685 +daqf/faux1 3d8f075bf6de +daqf/faux2 58b756b90505 +daqf/gauge ace0ffe33b8f +daqf/networking 4f25f942b538 +daqf/switch b2113d0aa5d9 +daqf/test_bacext daa0a06c718e +daqf/test_bacnet 6a3c93c4decc +daqf/test_brute aa76b01d5eed +daqf/test_discover 0ca76d766349 +daqf/test_fail 8ef4103069a5 +daqf/test_hold 5c923cd1a464 +daqf/test_macoui a605473e0f8d +daqf/test_manual 8026fdd99a5b +daqf/test_mudgee 189aa0b635fd +daqf/test_network 557df3ae19f9 +daqf/test_nmap 8ceb63e71c79 +daqf/test_ntp a5b21e0039e6 +daqf/test_pass 62dd10381336 +daqf/test_password d318555d2d3e +daqf/test_ping fe8e4dd5ddc2 +daqf/test_ssh 054efbf1b3c3 +daqf/test_switch e5bb16e85362 +daqf/test_tls 17de1ebf13ce +daqf/test_udmi b6d5381f32f0 +daqf/usi 348d54ddbb7c diff --git a/etc/docker_images.ver b/etc/docker_images.ver index 26ca594609..158c747293 100644 --- a/etc/docker_images.ver +++ b/etc/docker_images.ver @@ -1 +1 @@ -1.5.1 +1.9.5 diff --git a/firebase/functions/index.js b/firebase/functions/index.js index f17a3d2c6c..4db0fd5e53 100644 --- a/firebase/functions/index.js +++ b/firebase/functions/index.js @@ -123,7 +123,7 @@ function handleTestResult(origin, siteName, message) { const deviceDoc = originDoc.collection('device').doc(message.device_id); const updates = [ - originDoc.set({ 'updated': timestamp }), + originDoc.set({ 'updated': timestamp }, { merge: true }), siteDoc.set({ 'updated': timestamp }), portDoc.set({ 'updated': timestamp }), deviceDoc.set({ 'updated': timestamp }) @@ -147,7 +147,7 @@ function handleTestResult(origin, siteName, message) { } console.log('Test Result: ', timestamp, origin, siteName, message.port, - message.runid, message.name, message.device_id, message.state); + message.runid, message.daq_run_id, message.name, message.device_id, message.state); const runDoc = originDoc.collection('runid').doc(message.runid); const lastDoc = originDoc.collection('last').doc(message.name); const resultDoc = runDoc.collection('test').doc(message.name); @@ -168,6 +168,7 @@ function handleTestResult(origin, siteName, message) { } return Promise.all([ runDoc.set({ 'updated': timestamp, + 'daq_run_id': message.daq_run_id, 'last_name': message.name }, { merge: true }), resultDoc.set(message), @@ -193,17 +194,22 @@ function handleTestResult(origin, siteName, message) { function handleHeartbeat(origin, message) { const timestamp = new Date().toJSON(); const originDoc = db.collection('origin').doc(origin); - console.log('heartbeat', timestamp, origin) + console.log('heartbeat', timestamp, origin, message) const heartbeatDoc = originDoc.collection('runner').doc('heartbeat'); return Promise.all([ - originDoc.set({ 'updated': timestamp }), + originDoc.set({ + 'updated': timestamp, + 'version': message.version + }), heartbeatDoc.get().then((result) => { const current = result.data(); - if (!current || !current.message || current.message.timestamp < message.timestamp) + const defined = current && current.message && current.message.timestamp; + if (!defined || current.message.timestamp < message.timestamp) { return heartbeatDoc.set({ 'updated': timestamp, message }); + } }) ]); } diff --git a/firebase/functions/package-lock.json b/firebase/functions/package-lock.json index 3d7970d0d1..d39919d4fe 100644 --- a/firebase/functions/package-lock.json +++ b/firebase/functions/package-lock.json @@ -14,79 +14,163 @@ "integrity": "sha512-88h74TMQ6wXChPA6h9Q3E1Jg6TkTHep2+k63OWg3s0ozyGVMeY+TTOti7PFPzq5RhszQPQOoCi59es4MaRvgCw==" }, "@firebase/component": { - "version": "0.1.13", - "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.1.13.tgz", - "integrity": "sha512-DuSIM96NQkE3Yo77IOa5BWw8VBdvCR5cbMLNiFT4X3dTU15Dm0zHjncQHt/6rQpABGNYWAfOCJmSU1v6vc3DFA==", + "version": "0.1.18", + "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.1.18.tgz", + "integrity": "sha512-c8gd1k/e0sbBTR0xkLIYUN8nVkA0zWxcXGIvdfYtGEsNw6n7kh5HkcxKXOPB8S7bcPpqZkGgBIfvd94IyG2gaQ==", "requires": { - "@firebase/util": "0.2.48", - "tslib": "1.11.1" + "@firebase/util": "0.3.1", + "tslib": "^1.11.1" } }, "@firebase/database": { - "version": "0.6.4", - "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.6.4.tgz", - "integrity": "sha512-m3jaElEEXhr3a9D+M/kbDuRCQG5EmrnSqyEq7iNk3s5ankIrALid0AYm2RZF764F/DIeMFtAzng4EyyEqsaQlQ==", + "version": "0.6.11", + "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.6.11.tgz", + "integrity": "sha512-QOHhB7+CdjVhEXG9CyX0roA9ARJcEuwbozz0Bix+ULuZqjQ58KUFHMH1apW6EEiUP22d/mYD7dNXsUGshjL9PA==", "requires": { "@firebase/auth-interop-types": "0.1.5", - "@firebase/component": "0.1.13", - "@firebase/database-types": "0.5.1", - "@firebase/logger": "0.2.5", - "@firebase/util": "0.2.48", + "@firebase/component": "0.1.18", + "@firebase/database-types": "0.5.2", + "@firebase/logger": "0.2.6", + "@firebase/util": "0.3.1", "faye-websocket": "0.11.3", - "tslib": "1.11.1" + "tslib": "^1.11.1" } }, "@firebase/database-types": { - "version": "0.5.1", - "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.5.1.tgz", - "integrity": "sha512-onQxom1ZBYBJ648w/VNRzUewovEDAH7lvnrrpCd69ukkyrMk6rGEO/PQ9BcNEbhlNtukpsqRS0oNOFlHs0FaSA==", + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.5.2.tgz", + "integrity": "sha512-ap2WQOS3LKmGuVFKUghFft7RxXTyZTDr0Xd8y2aqmWsbJVjgozi0huL/EUMgTjGFrATAjcf2A7aNs8AKKZ2a8g==", "requires": { "@firebase/app-types": "0.6.1" } }, "@firebase/logger": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.2.5.tgz", - "integrity": "sha512-qqw3m0tWs/qrg7axTZG/QZq24DIMdSY6dGoWuBn08ddq7+GLF5HiqkRj71XznYeUUbfRq5W9C/PSFnN4JxX+WA==" + "version": "0.2.6", + "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.2.6.tgz", + "integrity": "sha512-KIxcUvW/cRGWlzK9Vd2KB864HlUnCfdTH0taHE0sXW5Xl7+W68suaeau1oKNEqmc3l45azkd4NzXTCWZRZdXrw==" }, "@firebase/util": { - "version": "0.2.48", - "resolved": "https://registry.npmjs.org/@firebase/util/-/util-0.2.48.tgz", - "integrity": "sha512-6Wzq6IBF//3mrMTmTQ+JmceM0PMQpxV2GVfXhZn/4sMMkkhB0MA908nPDnatoHwUKyWE3BMw+uTLkyBnkuTu5A==", + "version": "0.3.1", + "resolved": "https://registry.npmjs.org/@firebase/util/-/util-0.3.1.tgz", + "integrity": "sha512-zjVd9rfL08dRRdZILFn1RZTHb1euCcnD9N/9P56gdBcm2bvT5XsCC4G6t5toQBpE/H/jYe5h6MZMqfLu3EQLXw==", "requires": { - "tslib": "1.11.1" + "tslib": "^1.11.1" } }, "@google-cloud/common": { - "version": "2.4.0", - "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz", - "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==", + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.2.tgz", + "integrity": "sha512-W7JRLBEJWYtZQQuGQX06U6GBOSLrSrlvZxv6kGNwJtFrusu6AVgZltQ9Pajuz9Dh9aSXy9aTnBcyxn2/O0EGUw==", "optional": true, "requires": { - "@google-cloud/projectify": "^1.0.0", - "@google-cloud/promisify": "^1.0.0", - "arrify": "^2.0.0", - "duplexify": "^3.6.0", + "@google-cloud/projectify": "^2.0.0", + "@google-cloud/promisify": "^2.0.0", + "arrify": "^2.0.1", + "duplexify": "^4.1.1", "ent": "^2.2.0", "extend": "^3.0.2", - "google-auth-library": "^5.5.0", - "retry-request": "^4.0.0", - "teeny-request": "^6.0.0" - } - }, - "@google-cloud/firestore": { - "version": "3.8.4", - "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-3.8.4.tgz", - "integrity": "sha512-LCZeqB6goNKzD5G/wcoqWaQ2uf3FV/dtU5OSypqOWl+vHMTEVh1ap2H21JXaEydxq53lCayGfqjhDQzs0J3Qew==", - "optional": true, - "requires": { - "deep-equal": "^2.0.0", - "functional-red-black-tree": "^1.0.1", - "google-gax": "^1.13.0", - "readable-stream": "^3.4.0", - "through2": "^3.0.0" + "google-auth-library": "^6.0.0", + "retry-request": "^4.1.1", + "teeny-request": "^7.0.0" }, "dependencies": { + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "optional": true + }, + "duplexify": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz", + "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==", + "optional": true, + "requires": { + "end-of-stream": "^1.4.1", + "inherits": "^2.0.3", + "readable-stream": "^3.1.1", + "stream-shift": "^1.0.0" + } + }, + "gaxios": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", + "optional": true, + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", + "optional": true, + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "optional": true, + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "optional": true, + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "optional": true, + "requires": { + "yallist": "^4.0.0" + } + }, "readable-stream": { "version": "3.6.0", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", @@ -97,6 +181,200 @@ "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "optional": true + } + } + }, + "@google-cloud/firestore": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-4.2.0.tgz", + "integrity": "sha512-YCiKaTYCbXSoEvZ8cTmpgg4ebAvmFUOu3hj/aX+lHiOK7LsoFVi4jgNknogSqIiv04bxAysTBodpgn8XoZ4l5g==", + "optional": true, + "requires": { + "fast-deep-equal": "^3.1.1", + "functional-red-black-tree": "^1.0.1", + "google-gax": "^2.2.0" + }, + "dependencies": { + "@grpc/grpc-js": { + "version": "1.1.5", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.1.5.tgz", + "integrity": "sha512-2huf5z85TdZI4nLmJQ9Zdfd+6vmIyBDs7B4L71bTaHKA9pRsGKAH24XaktMk/xneKJIqAgeIZtg1cyivVZtvrg==", + "optional": true, + "requires": { + "@grpc/proto-loader": "^0.6.0-pre14", + "@types/node": "^12.12.47", + "google-auth-library": "^6.0.0", + "semver": "^6.2.0" + }, + "dependencies": { + "@grpc/proto-loader": { + "version": "0.6.0-pre9", + "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.6.0-pre9.tgz", + "integrity": "sha512-oM+LjpEjNzW5pNJjt4/hq1HYayNeQT+eGrOPABJnYHv7TyNPDNzkQ76rDYZF86X5swJOa4EujEMzQ9iiTdPgww==", + "optional": true, + "requires": { + "@types/long": "^4.0.1", + "lodash.camelcase": "^4.3.0", + "long": "^4.0.0", + "protobufjs": "^6.9.0", + "yargs": "^15.3.1" + } + } + } + }, + "@types/node": { + "version": "12.12.54", + "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.54.tgz", + "integrity": "sha512-ge4xZ3vSBornVYlDnk7yZ0gK6ChHf/CHB7Gl1I0Jhah8DDnEQqBzgohYG4FX4p81TNirSETOiSyn+y1r9/IR6w==", + "optional": true + }, + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "optional": true + }, + "gaxios": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", + "optional": true, + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", + "optional": true, + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-gax": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.7.0.tgz", + "integrity": "sha512-0dBATy8mMVlfOBrT85Q+NzBpZ4OJZUMrPI9wJULpiIDq2w1zlN30Duor+fQUcMEjanYEc72G58M4iUVve0jfXw==", + "optional": true, + "requires": { + "@grpc/grpc-js": "~1.1.1", + "@grpc/proto-loader": "^0.5.1", + "@types/long": "^4.0.0", + "abort-controller": "^3.0.0", + "duplexify": "^3.6.0", + "google-auth-library": "^6.0.0", + "is-stream-ended": "^0.1.4", + "lodash.at": "^4.6.0", + "lodash.has": "^4.5.2", + "node-fetch": "^2.6.0", + "protobufjs": "^6.9.0", + "retry-request": "^4.0.0", + "semver": "^6.0.0", + "walkdir": "^0.4.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "optional": true, + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "optional": true, + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "optional": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "protobufjs": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", + "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", + "optional": true, + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + }, + "dependencies": { + "@types/node": { + "version": "13.13.15", + "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.15.tgz", + "integrity": "sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw==", + "optional": true + } + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "optional": true } } }, @@ -109,85 +387,68 @@ } }, "@google-cloud/paginator": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz", - "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==", - "optional": true, + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.4.tgz", + "integrity": "sha512-fKI+jYQdV1F9jtG6tSRro3ilNSeBWVmTzxc8Z0kiPRXcj8eshh9fiF8TtxfDefyUKgTdWgHpzGBwLbZ/OGikJg==", "requires": { "arrify": "^2.0.0", "extend": "^3.0.2" } }, "@google-cloud/precise-date": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/precise-date/-/precise-date-2.0.1.tgz", - "integrity": "sha512-uXrLK/1rYx6pWNHL5U8NurHwmqLX7CwDFuJtRoaZe9lhe8RU7AJS67CMsMvHB0OziCcBAiKdAFzHm9zljI2nKQ==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@google-cloud/precise-date/-/precise-date-2.0.3.tgz", + "integrity": "sha512-+SDJ3ZvGkF7hzo6BGa8ZqeK3F6Z4+S+KviC9oOK+XCs3tfMyJCh/4j93XIWINgMMDIh9BgEvlw4306VxlXIlYA==" }, "@google-cloud/projectify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz", - "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==", - "optional": true + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz", + "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ==" }, "@google-cloud/promisify": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz", - "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==", - "optional": true + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.2.tgz", + "integrity": "sha512-EvuabjzzZ9E2+OaYf+7P9OAiiwbTxKYL0oGLnREQd+Su2NTQBpomkdlkBowFvyWsaV0d1sSGxrKpSNcrhPqbxg==" }, "@google-cloud/pubsub": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/@google-cloud/pubsub/-/pubsub-2.1.0.tgz", - "integrity": "sha512-9k4ucPR4X9/BKu1ht9RfXAqGpQzLZOGYpGgoq9Cnxhp9SDjAXkgIKN02pYCXZDdoLng25Mf+xkMnc3AfzJimnA==", + "version": "2.5.0", + "resolved": "https://registry.npmjs.org/@google-cloud/pubsub/-/pubsub-2.5.0.tgz", + "integrity": "sha512-7bbbQqa+LSTopVjt20EZ8maO6rEpbO7v8EvDImHMsbRS30HJ5+kClbaQTRvhNzhc1qy221A1GbHPHMCQ/U5E3Q==", "requires": { "@google-cloud/paginator": "^3.0.0", "@google-cloud/precise-date": "^2.0.0", "@google-cloud/projectify": "^2.0.0", "@google-cloud/promisify": "^2.0.0", + "@opentelemetry/api": "^0.10.0", + "@opentelemetry/tracing": "^0.10.0", "@types/duplexify": "^3.6.0", "@types/long": "^4.0.0", "arrify": "^2.0.0", "extend": "^3.0.2", "google-auth-library": "^6.0.0", - "google-gax": "^2.1.0", + "google-gax": "^2.7.0", "is-stream-ended": "^0.1.4", "lodash.snakecase": "^4.1.1", - "p-defer": "^3.0.0", - "protobufjs": "^6.8.1" + "p-defer": "^3.0.0" }, "dependencies": { - "@google-cloud/paginator": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.0.tgz", - "integrity": "sha512-iPdxTujlZQlMGNLHPtYoVwRu8IuLFr6y0GJwsX9hKULMgqGXrP/z0MV4ROGpRAkNE1FIfa1aDfNlwZHfF2z4bQ==", - "requires": { - "arrify": "^2.0.0", - "extend": "^3.0.2" - } - }, - "@google-cloud/projectify": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.0.tgz", - "integrity": "sha512-7wZ+m4N3Imtb5afOPfqNFyj9cKrlfVQ+t5YRxLS7tUpn8Pn/i7QuVubZRTXllaWjO4T5t/gm/r2x7oy5ajjvFQ==" - }, - "@google-cloud/promisify": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.1.tgz", - "integrity": "sha512-82EQzwrNauw1fkbUSr3f+50Bcq7g4h0XvLOk8C5e9ABkXYHei7ZPi9tiMMD7Vh3SfcdH97d1ibJ3KBWp2o1J+w==" - }, "@grpc/grpc-js": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.4.tgz", - "integrity": "sha512-Qawt6HUrEmljQMPWnLnIXpcjelmtIAydi3M9awiG02WWJ1CmIvFEx4IOC1EsWUWUlabOGksRbpfvoIeZKFTNXw==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.1.3.tgz", + "integrity": "sha512-HtOsk2YUofBcm1GkPqGzb6pwHhv+74eC2CUO229USIDKRtg30ycbZmqC+HdNtY3nHqoc9IgcRlntFgopyQoYCA==", "requires": { - "google-auth-library": "^6.0.0", "semver": "^6.2.0" } }, + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==" + }, "gaxios": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", - "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", "requires": { "abort-controller": "^3.0.0", "extend": "^3.0.2", @@ -197,18 +458,18 @@ } }, "gcp-metadata": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz", - "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==", + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", "requires": { "gaxios": "^3.0.0", - "json-bigint": "^0.3.0" + "json-bigint": "^1.0.0" } }, "google-auth-library": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.1.tgz", - "integrity": "sha512-NWEM9W0o+fmUJMK/wEuJ1vAc8H/JAseOWB8tjOAAkz8yobU+5IDtO/rPCbbRwFF1obIOCe0lj1pkq9ld2OFZeg==", + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", "requires": { "arrify": "^2.0.0", "base64-js": "^1.3.0", @@ -218,15 +479,15 @@ "gcp-metadata": "^4.1.0", "gtoken": "^5.0.0", "jws": "^4.0.0", - "lru-cache": "^5.0.0" + "lru-cache": "^6.0.0" } }, "google-gax": { - "version": "2.5.0", - "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.5.0.tgz", - "integrity": "sha512-Xqh+rinq93qSGOcs5aQdlrwBUR+/9AaFArLCvSGnx7Mye9p4u0dC98r2TO7wB4m1W138Swd6UPYGQyBg9BM/4g==", + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.7.0.tgz", + "integrity": "sha512-0dBATy8mMVlfOBrT85Q+NzBpZ4OJZUMrPI9wJULpiIDq2w1zlN30Duor+fQUcMEjanYEc72G58M4iUVve0jfXw==", "requires": { - "@grpc/grpc-js": "~1.0.0", + "@grpc/grpc-js": "~1.1.1", "@grpc/proto-loader": "^0.5.1", "@types/long": "^4.0.0", "abort-controller": "^3.0.0", @@ -240,85 +501,102 @@ "retry-request": "^4.0.0", "semver": "^6.0.0", "walkdir": "^0.4.0" - }, - "dependencies": { - "protobufjs": { - "version": "6.9.0", - "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz", - "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==", - "requires": { - "@protobufjs/aspromise": "^1.1.2", - "@protobufjs/base64": "^1.1.2", - "@protobufjs/codegen": "^2.0.4", - "@protobufjs/eventemitter": "^1.1.0", - "@protobufjs/fetch": "^1.1.0", - "@protobufjs/float": "^1.0.2", - "@protobufjs/inquire": "^1.1.0", - "@protobufjs/path": "^1.1.2", - "@protobufjs/pool": "^1.1.0", - "@protobufjs/utf8": "^1.1.0", - "@types/long": "^4.0.1", - "@types/node": "^13.7.0", - "long": "^4.0.0" - } - } } }, "google-p12-pem": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz", - "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", "requires": { "node-forge": "^0.9.0" } }, "gtoken": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz", - "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==", + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", "requires": { "gaxios": "^3.0.0", "google-p12-pem": "^3.0.0", "jws": "^4.0.0", "mime": "^2.2.0" } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "protobufjs": { + "version": "6.10.1", + "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz", + "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==", + "requires": { + "@protobufjs/aspromise": "^1.1.2", + "@protobufjs/base64": "^1.1.2", + "@protobufjs/codegen": "^2.0.4", + "@protobufjs/eventemitter": "^1.1.0", + "@protobufjs/fetch": "^1.1.0", + "@protobufjs/float": "^1.0.2", + "@protobufjs/inquire": "^1.1.0", + "@protobufjs/path": "^1.1.2", + "@protobufjs/pool": "^1.1.0", + "@protobufjs/utf8": "^1.1.0", + "@types/long": "^4.0.1", + "@types/node": "^13.7.0", + "long": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" } } }, "@google-cloud/storage": { - "version": "4.7.0", - "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.7.0.tgz", - "integrity": "sha512-f0guAlbeg7Z0m3gKjCfBCu7FG9qS3M3oL5OQQxlvGoPtK7/qg3+W+KQV73O2/sbuS54n0Kh2mvT5K2FWzF5vVQ==", + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.2.0.tgz", + "integrity": "sha512-zxHXZajtVA0Qx9IOnDUDb76mtKn5M20LKV/phmnVos7foozG9YZ6yYod90pRC/GgP3eOgxNYdt6KQcapssPsFw==", "optional": true, "requires": { - "@google-cloud/common": "^2.1.1", - "@google-cloud/paginator": "^2.0.0", - "@google-cloud/promisify": "^1.0.0", + "@google-cloud/common": "^3.3.0", + "@google-cloud/paginator": "^3.0.0", + "@google-cloud/promisify": "^2.0.0", "arrify": "^2.0.0", "compressible": "^2.0.12", "concat-stream": "^2.0.0", - "date-and-time": "^0.13.0", + "date-and-time": "^0.14.0", "duplexify": "^3.5.0", "extend": "^3.0.2", "gaxios": "^3.0.0", - "gcs-resumable-upload": "^2.2.4", + "gcs-resumable-upload": "^3.1.0", "hash-stream-validation": "^0.2.2", "mime": "^2.2.0", "mime-types": "^2.0.8", "onetime": "^5.1.0", - "p-limit": "^2.2.0", + "p-limit": "^3.0.1", "pumpify": "^2.0.0", - "readable-stream": "^3.4.0", "snakeize": "^0.1.0", "stream-events": "^1.0.1", - "through2": "^3.0.0", "xdg-basedir": "^4.0.0" }, "dependencies": { "gaxios": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz", - "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==", + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", "optional": true, "requires": { "abort-controller": "^3.0.0", @@ -328,15 +606,13 @@ "node-fetch": "^2.3.0" } }, - "readable-stream": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz", - "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==", + "p-limit": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz", + "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==", "optional": true, "requires": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" + "p-try": "^2.0.0" } } } @@ -358,6 +634,57 @@ "protobufjs": "^6.8.6" } }, + "@opentelemetry/api": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-0.10.2.tgz", + "integrity": "sha512-GtpMGd6vkzDMYcpu2t9LlhEgMy/SzBwRnz48EejlRArYqZzqSzAsKmegUK7zHgl+EOIaK9mKHhnRaQu3qw20cA==", + "requires": { + "@opentelemetry/context-base": "^0.10.2" + } + }, + "@opentelemetry/context-base": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/context-base/-/context-base-0.10.2.tgz", + "integrity": "sha512-hZNKjKOYsckoOEgBziGMnBcX0M7EtstnCmwz5jZUOUYwlZ+/xxX6z3jPu1XVO2Jivk0eLfuP9GP+vFD49CMetw==" + }, + "@opentelemetry/core": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-0.10.2.tgz", + "integrity": "sha512-DhkiTp5eje2zTGd+HAIKWpGE6IR6lq7tUpYt4nnkhOi6Hq9WQAANVDCWEZEbYOw57LkdXbE50FZ/kMvHDm450Q==", + "requires": { + "@opentelemetry/api": "^0.10.2", + "@opentelemetry/context-base": "^0.10.2", + "semver": "^7.1.3" + }, + "dependencies": { + "semver": { + "version": "7.3.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz", + "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ==" + } + } + }, + "@opentelemetry/resources": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-0.10.2.tgz", + "integrity": "sha512-5JGC2TPSAIHth615IURt+sSsTljY43zTfJD0JE9PHC6ipZPiQ0dpQDZOrLn8NAMfOHY1jeWwpIuLASjqbXUfuw==", + "requires": { + "@opentelemetry/api": "^0.10.2", + "@opentelemetry/core": "^0.10.2", + "gcp-metadata": "^3.5.0" + } + }, + "@opentelemetry/tracing": { + "version": "0.10.2", + "resolved": "https://registry.npmjs.org/@opentelemetry/tracing/-/tracing-0.10.2.tgz", + "integrity": "sha512-mNAhARn4dEdOjTa9OdysjI4fRHMbvr4YSbPuH7jhkyPzgoa+DnvnbY3GGpEay6kpuYJsrW8Ef9OIKAV/GndhbQ==", + "requires": { + "@opentelemetry/api": "^0.10.2", + "@opentelemetry/context-base": "^0.10.2", + "@opentelemetry/core": "^0.10.2", + "@opentelemetry/resources": "^0.10.2" + } + }, "@protobufjs/aspromise": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz", @@ -427,6 +754,12 @@ "@types/node": "*" } }, + "@types/color-name": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz", + "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==", + "optional": true + }, "@types/connect": { "version": "3.4.33", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.33.tgz", @@ -454,9 +787,9 @@ } }, "@types/express-serve-static-core": { - "version": "4.17.7", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.7.tgz", - "integrity": "sha512-EMgTj/DF9qpgLXyc+Btimg+XoH7A2liE8uKul8qSmMTHCeNYzydDKFdsJskDvw42UsesCnhO63dO0Grbj8J4Dw==", + "version": "4.17.9", + "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.9.tgz", + "integrity": "sha512-DG0BYg6yO+ePW+XoDENYz8zhNGC3jDDEpComMYn7WJc4mY1Us8Rw9ax2YhJXxpyk2SF47PQAoQ0YyVT1a0bEkA==", "requires": { "@types/node": "*", "@types/qs": "*", @@ -477,9 +810,9 @@ "integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==" }, "@types/mime": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.2.tgz", - "integrity": "sha512-4kPlzbljFcsttWEq6aBW0OZe6BDajAmyvr2xknBG92tejQnvdGtT9+kXSZ580DqpxY9qG2xeQVF9Dq0ymUTo5Q==" + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.3.tgz", + "integrity": "sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q==" }, "@types/node": { "version": "13.9.5", @@ -487,9 +820,9 @@ "integrity": "sha512-hkzMMD3xu6BrJpGVLeQ3htQQNAcOrJjX7WFmtK8zWQpz2UJf13LCFF2ALA7c9OVdvc2vQJeDdjfR35M0sBCxvw==" }, "@types/qs": { - "version": "6.9.3", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.3.tgz", - "integrity": "sha512-7s9EQWupR1fTc2pSMtXRQ9w9gLOcrJn+h7HOXw4evxyvVqMi4f+q7d2tnFe3ng3SNHjtK+0EzGMGFUQX4/AQRA==" + "version": "6.9.4", + "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.4.tgz", + "integrity": "sha512-+wYo+L6ZF6BMoEjtf8zB2esQsqdV6WsjRK/GP9WOgLPrq87PbNWgIxS76dS5uvl/QXtHGakZmwTznIfcPXcKlQ==" }, "@types/range-parser": { "version": "1.2.3", @@ -497,9 +830,9 @@ "integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA==" }, "@types/serve-static": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.4.tgz", - "integrity": "sha512-jTDt0o/YbpNwZbQmE/+2e+lfjJEJJR0I3OFaKQKPWkASkCoW3i6fsUnqudSMcNAfbtmADGu8f4MV4q+GqULmug==", + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.5.tgz", + "integrity": "sha512-6M64P58N+OXjU432WoLLBQxbA0LRGBCRm7aAGQJ+SMC1IMl0dgRVi9EFfoDcS2a7Xogygk/eGN94CfwU9UF7UQ==", "requires": { "@types/express-serve-static-core": "*", "@types/mime": "*" @@ -530,12 +863,22 @@ "debug": "4" } }, - "array-filter": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-1.0.0.tgz", - "integrity": "sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=", + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", "optional": true }, + "ansi-styles": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz", + "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==", + "optional": true, + "requires": { + "@types/color-name": "^1.1.1", + "color-convert": "^2.0.1" + } + }, "array-flatten": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz", @@ -543,17 +886,8 @@ }, "arrify": { "version": "2.0.1", - "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", - "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" - }, - "available-typed-arrays": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.2.tgz", - "integrity": "sha512-XWX3OX8Onv97LMk/ftVyBibpGwY5a8SmuxZPzeOxqmuEqUCOM9ZE+uIaD1VNJ5QnvU2UQusvmKbuM1FR8QWGfQ==", - "optional": true, - "requires": { - "array-filter": "^1.0.0" - } + "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz", + "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==" }, "base64-js": { "version": "1.3.1", @@ -613,6 +947,38 @@ "resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz", "integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg==" }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "optional": true + }, + "cliui": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz", + "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==", + "optional": true, + "requires": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^6.2.0" + } + }, + "color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "optional": true, + "requires": { + "color-name": "~1.1.4" + } + }, + "color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "optional": true + }, "compressible": { "version": "2.0.18", "resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz", @@ -712,9 +1078,9 @@ "optional": true }, "date-and-time": { - "version": "0.13.1", - "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.13.1.tgz", - "integrity": "sha512-/Uge9DJAT+s+oAcDxtBhyR8+sKjUnZbYmyhbmWjTHNtX7B7oWD8YyYdeXcBRbwSj6hVvj+IQegJam7m7czhbFw==", + "version": "0.14.0", + "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.14.0.tgz", + "integrity": "sha512-0wY8b90XjQkRxv3XGT8k1ffyDQOf4+T+2hiWp7rwYgoEn8OyYDsHZdnVrPlzxbwjLUY66mVBXr59eKOwpSV7lw==", "optional": true }, "debug": { @@ -725,43 +1091,11 @@ "ms": "^2.1.1" } }, - "deep-equal": { - "version": "2.0.3", - "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.0.3.tgz", - "integrity": "sha512-Spqdl4H+ky45I9ByyJtXteOm9CaIrPmnIPmOhrkKGNYWeDgCvJ8jNYVCTjChxW4FqGuZnLHADc8EKRMX6+CgvA==", - "optional": true, - "requires": { - "es-abstract": "^1.17.5", - "es-get-iterator": "^1.1.0", - "is-arguments": "^1.0.4", - "is-date-object": "^1.0.2", - "is-regex": "^1.0.5", - "isarray": "^2.0.5", - "object-is": "^1.1.2", - "object-keys": "^1.1.1", - "object.assign": "^4.1.0", - "regexp.prototype.flags": "^1.3.0", - "side-channel": "^1.0.2", - "which-boxed-primitive": "^1.0.1", - "which-collection": "^1.0.1", - "which-typed-array": "^1.1.2" - }, - "dependencies": { - "isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "optional": true - } - } - }, - "define-properties": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz", - "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==", - "requires": { - "object-keys": "^1.0.12" - } + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=", + "optional": true }, "depd": { "version": "1.1.2", @@ -814,6 +1148,12 @@ "resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz", "integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=" }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "optional": true + }, "encodeurl": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz", @@ -833,57 +1173,6 @@ "integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=", "optional": true }, - "es-abstract": { - "version": "1.17.5", - "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz", - "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==", - "requires": { - "es-to-primitive": "^1.2.1", - "function-bind": "^1.1.1", - "has": "^1.0.3", - "has-symbols": "^1.0.1", - "is-callable": "^1.1.5", - "is-regex": "^1.0.5", - "object-inspect": "^1.7.0", - "object-keys": "^1.1.1", - "object.assign": "^4.1.0", - "string.prototype.trimleft": "^2.1.1", - "string.prototype.trimright": "^2.1.1" - } - }, - "es-get-iterator": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz", - "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==", - "optional": true, - "requires": { - "es-abstract": "^1.17.4", - "has-symbols": "^1.0.1", - "is-arguments": "^1.0.4", - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-string": "^1.0.5", - "isarray": "^2.0.5" - }, - "dependencies": { - "isarray": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz", - "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==", - "optional": true - } - } - }, - "es-to-primitive": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz", - "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==", - "requires": { - "is-callable": "^1.1.4", - "is-date-object": "^1.0.1", - "is-symbol": "^1.0.2" - } - }, "escape-html": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz", @@ -961,6 +1250,12 @@ "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" }, + "fast-deep-equal": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz", + "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", + "optional": true + }, "fast-text-encoding": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.1.tgz", @@ -1003,36 +1298,42 @@ } } }, + "find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "optional": true, + "requires": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + } + }, "firebase-admin": { - "version": "8.12.1", - "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-8.12.1.tgz", - "integrity": "sha512-DZ4Q7QQJYaO2BhnhZLrhL+mGRTCLS5WrxjbJtuKGmbKRBepwMhx++EQA5yhnGnIXgDHnp5SrZnVKygNdXtH8BQ==", - "requires": { - "@firebase/database": "^0.6.0", - "@google-cloud/firestore": "^3.0.0", - "@google-cloud/storage": "^4.1.2", - "@types/node": "^8.10.59", + "version": "9.1.1", + "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-9.1.1.tgz", + "integrity": "sha512-HkzY9yN/kOe1EQgjheURAQ4pFBerI54TBL0+nj1fwzKnAnGCpcI73Bbwx99Pk3u2x4rj6bDcsZfz9bA8y7DWtQ==", + "requires": { + "@firebase/database": "^0.6.10", + "@firebase/database-types": "^0.5.2", + "@google-cloud/firestore": "^4.0.0", + "@google-cloud/storage": "^5.0.0", + "@types/node": "^10.10.0", "dicer": "^0.3.0", - "jsonwebtoken": "8.1.0", - "node-forge": "0.7.4" + "jsonwebtoken": "^8.5.1", + "node-forge": "^0.9.1" }, "dependencies": { "@types/node": { - "version": "8.10.61", - "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.61.tgz", - "integrity": "sha512-l+zSbvT8TPRaCxL1l9cwHCb0tSqGAGcjPJFItGGYat5oCTiq1uQQKYg5m7AF1mgnEBzFXGLJ2LRmNjtreRX76Q==" - }, - "node-forge": { - "version": "0.7.4", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.4.tgz", - "integrity": "sha512-8Df0906+tq/omxuCZD6PqhPaQDYuyJ1d+VITgxoIA8zvQd1ru+nMJcDChHH324MWitIgbVkAkQoGEEVJNpn/PA==" + "version": "10.17.28", + "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.28.tgz", + "integrity": "sha512-dzjES1Egb4c1a89C7lKwQh8pwjYmlOAG9dW1pBgxEk57tMrLnssOfEthz8kdkNaBd7lIqQx7APm5+mZ619IiCQ==" } } }, "firebase-functions": { - "version": "3.7.0", - "resolved": "https://registry.npmjs.org/firebase-functions/-/firebase-functions-3.7.0.tgz", - "integrity": "sha512-+ROj2Gs2/KyM+T8jYo7AKaHynFsN49sXbgZMll3zuGa9/8oiDsXp9e1Iy2JMkFmSZg67jeYw5Ue2OSpz0XiqFQ==", + "version": "3.11.0", + "resolved": "https://registry.npmjs.org/firebase-functions/-/firebase-functions-3.11.0.tgz", + "integrity": "sha512-i1uMhZ/M6i5SCI3ulKo7EWX0/LD+I5o6N+sk0HbOWfzyWfOl0iJTvQkR3BVDcjrlhPVC4xG1bDTLxd+DTkLqaw==", "requires": { "@types/express": "4.17.3", "cors": "^2.8.5", @@ -1040,12 +1341,6 @@ "lodash": "^4.17.14" } }, - "foreach": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz", - "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=", - "optional": true - }, "forwarded": { "version": "0.1.2", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz", @@ -1056,11 +1351,6 @@ "resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz", "integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac=" }, - "function-bind": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz", - "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==" - }, "functional-red-black-tree": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", @@ -1089,19 +1379,119 @@ } }, "gcs-resumable-upload": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.3.tgz", - "integrity": "sha512-sf896I5CC/1AxeaGfSFg3vKMjUq/r+A3bscmVzZm10CElyRanN0XwPu/MxeIO4LSP+9uF6yKzXvNsaTsMXUG6Q==", + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.1.tgz", + "integrity": "sha512-RS1osvAicj9+MjCc6jAcVL1Pt3tg7NK2C2gXM5nqD1Gs0klF2kj5nnAFSBy97JrtslMIQzpb7iSuxaG8rFWd2A==", "optional": true, "requires": { "abort-controller": "^3.0.0", "configstore": "^5.0.0", - "gaxios": "^2.0.0", - "google-auth-library": "^5.0.0", + "extend": "^3.0.2", + "gaxios": "^3.0.0", + "google-auth-library": "^6.0.0", "pumpify": "^2.0.0", "stream-events": "^1.0.4" + }, + "dependencies": { + "bignumber.js": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz", + "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==", + "optional": true + }, + "gaxios": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz", + "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==", + "optional": true, + "requires": { + "abort-controller": "^3.0.0", + "extend": "^3.0.2", + "https-proxy-agent": "^5.0.0", + "is-stream": "^2.0.0", + "node-fetch": "^2.3.0" + } + }, + "gcp-metadata": { + "version": "4.1.4", + "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz", + "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "json-bigint": "^1.0.0" + } + }, + "google-auth-library": { + "version": "6.0.6", + "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz", + "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==", + "optional": true, + "requires": { + "arrify": "^2.0.0", + "base64-js": "^1.3.0", + "ecdsa-sig-formatter": "^1.0.11", + "fast-text-encoding": "^1.0.0", + "gaxios": "^3.0.0", + "gcp-metadata": "^4.1.0", + "gtoken": "^5.0.0", + "jws": "^4.0.0", + "lru-cache": "^6.0.0" + } + }, + "google-p12-pem": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz", + "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==", + "optional": true, + "requires": { + "node-forge": "^0.9.0" + } + }, + "gtoken": { + "version": "5.0.3", + "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz", + "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==", + "optional": true, + "requires": { + "gaxios": "^3.0.0", + "google-p12-pem": "^3.0.0", + "jws": "^4.0.0", + "mime": "^2.2.0" + } + }, + "json-bigint": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz", + "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==", + "optional": true, + "requires": { + "bignumber.js": "^9.0.0" + } + }, + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "optional": true, + "requires": { + "yallist": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "optional": true + } } }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==", + "optional": true + }, "google-auth-library": { "version": "5.10.1", "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz", @@ -1165,19 +1555,6 @@ "mime": "^2.2.0" } }, - "has": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz", - "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==", - "requires": { - "function-bind": "^1.1.1" - } - }, - "has-symbols": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz", - "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==" - }, "hash-stream-validation": { "version": "0.2.3", "resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.3.tgz", @@ -1267,44 +1644,10 @@ "resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz", "integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==" }, - "is-arguments": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz", - "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==", - "optional": true - }, - "is-bigint": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.0.tgz", - "integrity": "sha512-t5mGUXC/xRheCK431ylNiSkGGpBp8bHENBcENTkDT6ppwPzEVxNGZRvgvmOEfbWkFhA7D2GEuE2mmQTr78sl2g==", - "optional": true - }, - "is-boolean-object": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.0.1.tgz", - "integrity": "sha512-TqZuVwa/sppcrhUCAYkGBk7w0yxfQQnxq28fjkO53tnK9FQXmdwz2JS5+GjsWQ6RByES1K40nI+yDic5c9/aAQ==", - "optional": true - }, - "is-callable": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.0.tgz", - "integrity": "sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw==" - }, - "is-date-object": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz", - "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g==" - }, - "is-map": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz", - "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==", - "optional": true - }, - "is-number-object": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz", - "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==", + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "optional": true }, "is-obj": { @@ -1313,20 +1656,6 @@ "integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==", "optional": true }, - "is-regex": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.0.tgz", - "integrity": "sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw==", - "requires": { - "has-symbols": "^1.0.1" - } - }, - "is-set": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz", - "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==", - "optional": true - }, "is-stream": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz", @@ -1337,50 +1666,12 @@ "resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz", "integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw==" }, - "is-string": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz", - "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==", - "optional": true - }, - "is-symbol": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz", - "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==", - "requires": { - "has-symbols": "^1.0.1" - } - }, - "is-typed-array": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.3.tgz", - "integrity": "sha512-BSYUBOK/HJibQ30wWkWold5txYwMUXQct9YHAQJr8fSwvZoiglcqB0pd7vEN23+Tsi9IUEjztdOSzl4qLVYGTQ==", - "optional": true, - "requires": { - "available-typed-arrays": "^1.0.0", - "es-abstract": "^1.17.4", - "foreach": "^2.0.5", - "has-symbols": "^1.0.1" - } - }, "is-typedarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=", "optional": true }, - "is-weakmap": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz", - "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==", - "optional": true - }, - "is-weakset": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.1.tgz", - "integrity": "sha512-pi4vhbhVHGLxohUw7PhGsueT4vRGFoXhP7+RGN0jKIv9+8PWYCQTqtADngrxOm2g46hoH0+g8uZZBzMrvVGDmw==", - "optional": true - }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", @@ -1395,11 +1686,11 @@ } }, "jsonwebtoken": { - "version": "8.1.0", - "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.1.0.tgz", - "integrity": "sha1-xjl80uX9WD1lwAeoPce7eOaYK4M=", + "version": "8.5.1", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz", + "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==", "requires": { - "jws": "^3.1.4", + "jws": "^3.2.2", "lodash.includes": "^4.3.0", "lodash.isboolean": "^3.0.3", "lodash.isinteger": "^4.0.4", @@ -1407,8 +1698,8 @@ "lodash.isplainobject": "^4.0.6", "lodash.isstring": "^4.0.1", "lodash.once": "^4.0.0", - "ms": "^2.0.0", - "xtend": "^4.0.1" + "ms": "^2.1.1", + "semver": "^5.6.0" }, "dependencies": { "jwa": { @@ -1429,6 +1720,11 @@ "jwa": "^1.4.1", "safe-buffer": "^5.0.1" } + }, + "semver": { + "version": "5.7.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz", + "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==" } } }, @@ -1451,10 +1747,19 @@ "safe-buffer": "^5.0.1" } }, + "locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "optional": true, + "requires": { + "p-locate": "^4.1.0" + } + }, "lodash": { - "version": "4.17.15", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", - "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" + "version": "4.17.20", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz", + "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA==" }, "lodash.at": { "version": "4.6.0", @@ -1597,37 +1902,6 @@ "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM=" }, - "object-inspect": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz", - "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw==" - }, - "object-is": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.2.tgz", - "integrity": "sha512-5lHCz+0uufF6wZ7CRFWJN3hp8Jqblpgve06U5CMQ3f//6iDjPr2PEo9MWCjEssDsa+UZEL4PkFpr+BMop6aKzQ==", - "optional": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" - } - }, - "object-keys": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz", - "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA==" - }, - "object.assign": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz", - "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==", - "requires": { - "define-properties": "^1.1.2", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.0", - "object-keys": "^1.0.11" - } - }, "on-finished": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz", @@ -1645,9 +1919,9 @@ } }, "onetime": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", - "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", + "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", "optional": true, "requires": { "mimic-fn": "^2.1.0" @@ -1667,6 +1941,15 @@ "p-try": "^2.0.0" } }, + "p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "optional": true, + "requires": { + "p-limit": "^2.2.0" + } + }, "p-try": { "version": "2.2.0", "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", @@ -1678,6 +1961,12 @@ "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", "integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==" }, + "path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "optional": true + }, "path-to-regexp": { "version": "0.1.7", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", @@ -1812,15 +2101,17 @@ } } }, - "regexp.prototype.flags": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz", - "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==", - "optional": true, - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.0-next.1" - } + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=", + "optional": true + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==", + "optional": true }, "retry-request": { "version": "4.1.1", @@ -1904,21 +2195,17 @@ "send": "0.17.1" } }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=", + "optional": true + }, "setprototypeof": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz", "integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw==" }, - "side-channel": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz", - "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==", - "optional": true, - "requires": { - "es-abstract": "^1.17.0-next.1", - "object-inspect": "^1.7.0" - } - }, "signal-exit": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz", @@ -1955,42 +2242,15 @@ "resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz", "integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo=" }, - "string.prototype.trimend": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz", - "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==", - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" - } - }, - "string.prototype.trimleft": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz", - "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==", - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimstart": "^1.0.0" - } - }, - "string.prototype.trimright": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz", - "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==", - "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5", - "string.prototype.trimend": "^1.0.0" - } - }, - "string.prototype.trimstart": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz", - "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==", + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "optional": true, "requires": { - "define-properties": "^1.1.3", - "es-abstract": "^1.17.5" + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" } }, "string_decoder": { @@ -2008,6 +2268,15 @@ } } }, + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "optional": true, + "requires": { + "ansi-regex": "^5.0.0" + } + }, "stubs": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz", @@ -2015,16 +2284,16 @@ "optional": true }, "teeny-request": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz", - "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz", + "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==", "optional": true, "requires": { "http-proxy-agent": "^4.0.0", "https-proxy-agent": "^5.0.0", "node-fetch": "^2.2.0", "stream-events": "^1.0.5", - "uuid": "^7.0.0" + "uuid": "^8.0.0" } }, "through2": { @@ -2041,9 +2310,9 @@ "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw==" }, "tslib": { - "version": "1.11.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz", - "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA==" + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.13.0.tgz", + "integrity": "sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q==" }, "type-is": { "version": "1.6.18", @@ -2094,9 +2363,9 @@ "integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM=" }, "uuid": { - "version": "7.0.3", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz", - "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==", + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.0.tgz", + "integrity": "sha512-fX6Z5o4m6XsXBdli9g7DtWgAx+osMsRRZFKma1mIUsLCz6vRvv+pz5VNbyu9UEDzpMWulZfvpgb/cmDXVulYFQ==", "optional": true }, "vary": { @@ -2124,43 +2393,21 @@ "resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz", "integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg==" }, - "which-boxed-primitive": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.1.tgz", - "integrity": "sha512-7BT4TwISdDGBgaemWU0N0OU7FeAEJ9Oo2P1PHRm/FCWoEi2VLWC9b6xvxAA3C/NMpxg3HXVgi0sMmGbNUbNepQ==", - "optional": true, - "requires": { - "is-bigint": "^1.0.0", - "is-boolean-object": "^1.0.0", - "is-number-object": "^1.0.3", - "is-string": "^1.0.4", - "is-symbol": "^1.0.2" - } - }, - "which-collection": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz", - "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==", - "optional": true, - "requires": { - "is-map": "^2.0.1", - "is-set": "^2.0.1", - "is-weakmap": "^2.0.1", - "is-weakset": "^2.0.1" - } + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=", + "optional": true }, - "which-typed-array": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.2.tgz", - "integrity": "sha512-KT6okrd1tE6JdZAy3o2VhMoYPh3+J6EMZLyrxBQsZflI1QCZIxMrIYLkosd8Twf+YfknVIHmYQPgJt238p8dnQ==", + "wrap-ansi": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz", + "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==", "optional": true, "requires": { - "available-typed-arrays": "^1.0.2", - "es-abstract": "^1.17.5", - "foreach": "^2.0.5", - "function-bind": "^1.1.1", - "has-symbols": "^1.0.1", - "is-typed-array": "^1.1.3" + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" } }, "wrappy": { @@ -2189,12 +2436,48 @@ "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", - "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" + "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", + "optional": true + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==", + "optional": true }, "yallist": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" + }, + "yargs": { + "version": "15.4.1", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz", + "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==", + "optional": true, + "requires": { + "cliui": "^6.0.0", + "decamelize": "^1.2.0", + "find-up": "^4.1.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^4.2.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^18.1.2" + } + }, + "yargs-parser": { + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", + "optional": true, + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } } } } diff --git a/firebase/functions/package.json b/firebase/functions/package.json index b4eebe0523..3e506ecf77 100644 --- a/firebase/functions/package.json +++ b/firebase/functions/package.json @@ -9,13 +9,13 @@ "logs": "firebase functions:log" }, "engines": { - "node": "8" + "node": "10" }, "dependencies": { - "@google-cloud/pubsub": "2.1.0", + "@google-cloud/pubsub": "2.5.0", "@google-cloud/iot": "1.8.0", - "firebase-admin": "8.12.1", - "firebase-functions": "3.7.0", + "firebase-admin": "9.1.1", + "firebase-functions": "3.11.0", "extend": "3.0.2" }, "private": true diff --git a/firebase/public/config.html b/firebase/public/config.html index f94216c57f..438013214b 100644 --- a/firebase/public/config.html +++ b/firebase/public/config.html @@ -11,8 +11,8 @@ - - + +
diff --git a/firebase/public/index.html b/firebase/public/index.html index d9ce546c59..c4928cd090 100644 --- a/firebase/public/index.html +++ b/firebase/public/index.html @@ -31,14 +31,14 @@

Filters:

-
- - -
+
+ + +
@@ -52,10 +52,10 @@

Filters:

-

Sites

-

Origins

+

Sites

+

Users

@@ -84,4 +84,4 @@

Users

if (typeof daq_deploy_version !== 'undefined') { document.getElementById('deploy-version').innerHTML = daq_deploy_version; } - \ No newline at end of file + diff --git a/firebase/public/main.js b/firebase/public/main.js index 09f4237a5e..a6eeee087f 100644 --- a/firebase/public/main.js +++ b/firebase/public/main.js @@ -8,11 +8,6 @@ const display_columns = []; const display_rows = []; const row_timestamps = {}; -const data_state = {}; - -let last_result_time_sec = 0; -let heartbeatTimestamp = 0; - const origin_id = getQueryParam('origin'); const site_name = getQueryParam('site'); const port_id = getQueryParam('port'); @@ -21,8 +16,13 @@ const device_id = getQueryParam('device'); const run_id = getQueryParam('runid'); const from = getQueryParam('from'); const to = getQueryParam('to'); + +const data_state = {}; +let last_result_time_sec = 0; +let heartbeatTimestamp = 0; var db; -var activePorts = []; +var activePorts = new Set(); + document.addEventListener('DOMContentLoaded', () => { db = firebase.firestore(); const settings = { @@ -289,7 +289,7 @@ function watcherAdd(ref, collection, limit, handler) { }, (e) => console.error(e)); } -function listSites(db) { +function listSites() { const linkGroup = document.querySelector('#listings .sites'); db.collection('site').get().then((snapshot) => { snapshot.forEach((site_doc) => { @@ -303,21 +303,31 @@ function listSites(db) { }).catch((e) => statusUpdate('registry list error', e)); } -function listOrigins(db) { - const linkGroup = document.querySelector('#listings .origins'); +function addOrigin(originId) { + db.collection('origin').doc(originId).get().then((result) => { + const linkGroup = document.querySelector('#listings .origins'); + const originLink = document.createElement('a'); + originLink.setAttribute('href', '/?origin=' + originId); + originLink.innerHTML = originId; + linkGroup.appendChild(originLink); + const originInfo = document.createElement('span'); + const version = result.data() && result.data().version; + const updated = result.data() && result.data().updated; + originInfo.innerHTML = ` ${version}, ${updated}`; + linkGroup.appendChild(originInfo); + linkGroup.appendChild(document.createElement('p')); + }); +} + +function listOrigins() { db.collection('origin').get().then((snapshot) => { snapshot.forEach((originDoc) => { - const origin = originDoc.id; - const originLink = document.createElement('a'); - originLink.setAttribute('href', '/?origin=' + origin); - originLink.innerHTML = origin; - linkGroup.appendChild(originLink); - linkGroup.appendChild(document.createElement('p')); + addOrigin(originDoc.id); }); }).catch((e) => statusUpdate('origin list error', e)); } -function listUsers(db) { +function listUsers() { const link_group = document.querySelector('#listings .users'); db.collection('users').get().then((snapshot) => { snapshot.forEach((user_doc) => { @@ -354,9 +364,9 @@ function dashboardSetup() { triggerOrigin(db, origin_id); } else { document.getElementById('listings').classList.add('active'); - listSites(db); - listOrigins(db); - listUsers(db); + listOrigins(); + listSites(); + listUsers(); } return origin_id; diff --git a/firebase/public/protos.hash b/firebase/public/protos.hash index bab39e76c2..4ddf9485f8 100644 --- a/firebase/public/protos.hash +++ b/firebase/public/protos.hash @@ -1 +1 @@ -b7a56a30dafe26576d6bdef00dfb57dc07a016ac proto/system_config.proto +f76de649c75ed722febfc0750c53672f22af5ab1 proto/system_config.proto diff --git a/firebase/public/protos.html b/firebase/public/protos.html index d24bed4958..f4cf44b2e7 100644 --- a/firebase/public/protos.html +++ b/firebase/public/protos.html @@ -198,6 +198,14 @@

Table of Contents

MSwitchSetup +
  • + MUSISetup +
  • + + +
  • + ERunTriggerType +
  • @@ -430,10 +438,10 @@

    DaqConfig

    - fail_hook + topology_hook string -

    Hook for failure diagnostics.

    +

    Hook for device topology updates.

    @@ -478,6 +486,27 @@

    DaqConfig

    Set time between port disconnect and host tests shutdown

    + + usi_setup + USISetup + +

    USI url

    + + + + run_trigger_type + RunTriggerType + +

    Configures events that trigger a DAQ run

    + + + + debug_mode + bool + +

    verbose output

    + + @@ -613,7 +642,14 @@

    SwitchSetup

    lo_port int32 -

    Local port of open flow controller

    +

    Local port of DAQ OpenFlow controller

    + + + + alt_port + int32 + +

    Local port for an alternate OpenFlow controller

    @@ -679,7 +715,61 @@

    SwitchSetup

    +

    USISetup

    +

    USI paramters

    + + + + + + + + + + + + + + + + + + + + + + +
    FieldTypeLabelDescription
    urlstring

    rpc_timeout_secint32

    + + + + + + + +

    RunTriggerType

    +

    + + + + + + + + + + + + + + + + + + + +
    NameNumberDescription
    PORT0

    VLAN1

    diff --git a/libs/proto/system_config_pb2.py b/libs/proto/system_config_pb2.py index e4746e687f..f2784ab726 100644 --- a/libs/proto/system_config_pb2.py +++ b/libs/proto/system_config_pb2.py @@ -1,7 +1,9 @@ -# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: daq/proto/system_config.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) +from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -18,9 +20,34 @@ package='', syntax='proto3', serialized_options=None, - serialized_pb=b'\n\x1d\x64\x61q/proto/system_config.proto\"\xfb\x07\n\tDaqConfig\x12\x18\n\x10site_description\x18\x01 \x01(\t\x12\x18\n\x10monitor_scan_sec\x18\x02 \x01(\x05\x12\x1b\n\x13\x64\x65\x66\x61ult_timeout_sec\x18\x03 \x01(\x05\x12\x12\n\nsettle_sec\x18& \x01(\x05\x12\x11\n\tbase_conf\x18\x04 \x01(\t\x12\x11\n\tsite_path\x18\x05 \x01(\t\x12\x1f\n\x17initial_dhcp_lease_time\x18\x06 \x01(\t\x12\x17\n\x0f\x64hcp_lease_time\x18\x07 \x01(\t\x12\x19\n\x11\x64hcp_response_sec\x18\' \x01(\x05\x12\x1e\n\x16long_dhcp_response_sec\x18\x08 \x01(\x05\x12\"\n\x0cswitch_setup\x18\t \x01(\x0b\x32\x0c.SwitchSetup\x12\x12\n\nhost_tests\x18\x10 \x01(\t\x12\x13\n\x0b\x62uild_tests\x18$ \x01(\x08\x12\x11\n\trun_limit\x18\x11 \x01(\x05\x12\x11\n\tfail_mode\x18\x12 \x01(\x08\x12\x13\n\x0bsingle_shot\x18\" \x01(\x08\x12\x15\n\rresult_linger\x18\x13 \x01(\x08\x12\x0f\n\x07no_test\x18\x14 \x01(\x08\x12\x11\n\tkeep_hold\x18( \x01(\x08\x12\x14\n\x0c\x64\x61q_loglevel\x18\x15 \x01(\t\x12\x18\n\x10mininet_loglevel\x18\x16 \x01(\t\x12\x13\n\x0b\x66inish_hook\x18# \x01(\t\x12\x10\n\x08gcp_cred\x18\x17 \x01(\t\x12\x11\n\tgcp_topic\x18\x18 \x01(\t\x12\x13\n\x0bschema_path\x18\x19 \x01(\t\x12\x11\n\tmud_files\x18\x1a \x01(\t\x12\x14\n\x0c\x64\x65vice_specs\x18\x1b \x01(\t\x12\x13\n\x0btest_config\x18\x1c \x01(\t\x12\x19\n\x11port_debounce_sec\x18\x1d \x01(\x05\x12\x11\n\tfail_hook\x18\x1e \x01(\t\x12\x17\n\x0f\x64\x65vice_template\x18\x1f \x01(\t\x12\x14\n\x0csite_reports\x18 \x01(\t\x12\x1f\n\x17run_data_retention_days\x18! \x01(\x02\x12.\n\ninterfaces\x18% \x03(\x0b\x32\x1a.DaqConfig.InterfacesEntry\x12/\n\x0b\x66\x61il_module\x18/ \x03(\x0b\x32\x1a.DaqConfig.FailModuleEntry\x12\x1d\n\x15port_flap_timeout_sec\x18\x30 \x01(\x05\x1a=\n\x0fInterfacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.Interface:\x02\x38\x01\x1a\x31\n\x0f\x46\x61ilModuleEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe2\x01\n\x0bSwitchSetup\x12\x11\n\tctrl_intf\x18\t \x01(\t\x12\x0f\n\x07ip_addr\x18\x0b \x01(\t\x12\x13\n\x0buplink_port\x18\r \x01(\x05\x12\x0f\n\x07lo_port\x18\x0e \x01(\x05\x12\x0f\n\x07lo_addr\x18\x0f \x01(\t\x12\x11\n\tmods_addr\x18\x10 \x01(\t\x12\x0f\n\x07of_dpid\x18) \x01(\t\x12\x11\n\tdata_intf\x18* \x01(\t\x12\x0e\n\x06\x65xt_br\x18+ \x01(\t\x12\r\n\x05model\x18, \x01(\t\x12\x10\n\x08username\x18- \x01(\t\x12\x10\n\x08password\x18. \x01(\t\"\'\n\tInterface\x12\x0c\n\x04opts\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x62\x06proto3' + serialized_pb=_b('\n\x1d\x64\x61q/proto/system_config.proto\"\xdc\x08\n\tDaqConfig\x12\x18\n\x10site_description\x18\x01 \x01(\t\x12\x18\n\x10monitor_scan_sec\x18\x02 \x01(\x05\x12\x1b\n\x13\x64\x65\x66\x61ult_timeout_sec\x18\x03 \x01(\x05\x12\x12\n\nsettle_sec\x18& \x01(\x05\x12\x11\n\tbase_conf\x18\x04 \x01(\t\x12\x11\n\tsite_path\x18\x05 \x01(\t\x12\x1f\n\x17initial_dhcp_lease_time\x18\x06 \x01(\t\x12\x17\n\x0f\x64hcp_lease_time\x18\x07 \x01(\t\x12\x19\n\x11\x64hcp_response_sec\x18\' \x01(\x05\x12\x1e\n\x16long_dhcp_response_sec\x18\x08 \x01(\x05\x12\"\n\x0cswitch_setup\x18\t \x01(\x0b\x32\x0c.SwitchSetup\x12\x12\n\nhost_tests\x18\x10 \x01(\t\x12\x13\n\x0b\x62uild_tests\x18$ \x01(\x08\x12\x11\n\trun_limit\x18\x11 \x01(\x05\x12\x11\n\tfail_mode\x18\x12 \x01(\x08\x12\x13\n\x0bsingle_shot\x18\" \x01(\x08\x12\x15\n\rresult_linger\x18\x13 \x01(\x08\x12\x0f\n\x07no_test\x18\x14 \x01(\x08\x12\x11\n\tkeep_hold\x18( \x01(\x08\x12\x14\n\x0c\x64\x61q_loglevel\x18\x15 \x01(\t\x12\x18\n\x10mininet_loglevel\x18\x16 \x01(\t\x12\x13\n\x0b\x66inish_hook\x18# \x01(\t\x12\x10\n\x08gcp_cred\x18\x17 \x01(\t\x12\x11\n\tgcp_topic\x18\x18 \x01(\t\x12\x13\n\x0bschema_path\x18\x19 \x01(\t\x12\x11\n\tmud_files\x18\x1a \x01(\t\x12\x14\n\x0c\x64\x65vice_specs\x18\x1b \x01(\t\x12\x13\n\x0btest_config\x18\x1c \x01(\t\x12\x19\n\x11port_debounce_sec\x18\x1d \x01(\x05\x12\x15\n\rtopology_hook\x18\x1e \x01(\t\x12\x17\n\x0f\x64\x65vice_template\x18\x1f \x01(\t\x12\x14\n\x0csite_reports\x18 \x01(\t\x12\x1f\n\x17run_data_retention_days\x18! \x01(\x02\x12.\n\ninterfaces\x18% \x03(\x0b\x32\x1a.DaqConfig.InterfacesEntry\x12/\n\x0b\x66\x61il_module\x18/ \x03(\x0b\x32\x1a.DaqConfig.FailModuleEntry\x12\x1d\n\x15port_flap_timeout_sec\x18\x30 \x01(\x05\x12\x1c\n\tusi_setup\x18\x31 \x01(\x0b\x32\t.USISetup\x12)\n\x10run_trigger_type\x18\x32 \x01(\x0e\x32\x0f.RunTriggerType\x12\x12\n\ndebug_mode\x18\x33 \x01(\x08\x1a=\n\x0fInterfacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.Interface:\x02\x38\x01\x1a\x31\n\x0f\x46\x61ilModuleEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"0\n\x08USISetup\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x17\n\x0frpc_timeout_sec\x18\x02 \x01(\x05\"\xf4\x01\n\x0bSwitchSetup\x12\x11\n\tctrl_intf\x18\t \x01(\t\x12\x0f\n\x07ip_addr\x18\x0b \x01(\t\x12\x13\n\x0buplink_port\x18\r \x01(\x05\x12\x0f\n\x07lo_port\x18\x0e \x01(\x05\x12\x10\n\x08\x61lt_port\x18\x10 \x01(\x05\x12\x0f\n\x07lo_addr\x18\x12 \x01(\t\x12\x11\n\tmods_addr\x18\x14 \x01(\t\x12\x0f\n\x07of_dpid\x18) \x01(\t\x12\x11\n\tdata_intf\x18* \x01(\t\x12\x0e\n\x06\x65xt_br\x18+ \x01(\t\x12\r\n\x05model\x18, \x01(\t\x12\x10\n\x08username\x18- \x01(\t\x12\x10\n\x08password\x18. \x01(\t\"\'\n\tInterface\x12\x0c\n\x04opts\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05*$\n\x0eRunTriggerType\x12\x08\n\x04PORT\x10\x00\x12\x08\n\x04VLAN\x10\x01\x62\x06proto3') ) +_RUNTRIGGERTYPE = _descriptor.EnumDescriptor( + name='RunTriggerType', + full_name='RunTriggerType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='PORT', index=0, number=0, + serialized_options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='VLAN', index=1, number=1, + serialized_options=None, + type=None), + ], + containing_type=None, + serialized_options=None, + serialized_start=1490, + serialized_end=1526, +) +_sym_db.RegisterEnumDescriptor(_RUNTRIGGERTYPE) + +RunTriggerType = enum_type_wrapper.EnumTypeWrapper(_RUNTRIGGERTYPE) +PORT = 0 +VLAN = 1 @@ -34,7 +61,7 @@ _descriptor.FieldDescriptor( name='key', full_name='DaqConfig.InterfacesEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -51,14 +78,14 @@ nested_types=[], enum_types=[ ], - serialized_options=b'8\001', + serialized_options=_b('8\001'), is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], - serialized_start=941, - serialized_end=1002, + serialized_start=1038, + serialized_end=1099, ) _DAQCONFIG_FAILMODULEENTRY = _descriptor.Descriptor( @@ -71,14 +98,14 @@ _descriptor.FieldDescriptor( name='key', full_name='DaqConfig.FailModuleEntry.key', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='value', full_name='DaqConfig.FailModuleEntry.value', index=1, number=2, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -88,14 +115,14 @@ nested_types=[], enum_types=[ ], - serialized_options=b'8\001', + serialized_options=_b('8\001'), is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], - serialized_start=1004, - serialized_end=1053, + serialized_start=1101, + serialized_end=1150, ) _DAQCONFIG = _descriptor.Descriptor( @@ -108,7 +135,7 @@ _descriptor.FieldDescriptor( name='site_description', full_name='DaqConfig.site_description', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -136,28 +163,28 @@ _descriptor.FieldDescriptor( name='base_conf', full_name='DaqConfig.base_conf', index=4, number=4, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='site_path', full_name='DaqConfig.site_path', index=5, number=5, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='initial_dhcp_lease_time', full_name='DaqConfig.initial_dhcp_lease_time', index=6, number=6, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='dhcp_lease_time', full_name='DaqConfig.dhcp_lease_time', index=7, number=7, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -185,7 +212,7 @@ _descriptor.FieldDescriptor( name='host_tests', full_name='DaqConfig.host_tests', index=11, number=16, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -241,63 +268,63 @@ _descriptor.FieldDescriptor( name='daq_loglevel', full_name='DaqConfig.daq_loglevel', index=19, number=21, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='mininet_loglevel', full_name='DaqConfig.mininet_loglevel', index=20, number=22, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='finish_hook', full_name='DaqConfig.finish_hook', index=21, number=35, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='gcp_cred', full_name='DaqConfig.gcp_cred', index=22, number=23, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='gcp_topic', full_name='DaqConfig.gcp_topic', index=23, number=24, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='schema_path', full_name='DaqConfig.schema_path', index=24, number=25, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='mud_files', full_name='DaqConfig.mud_files', index=25, number=26, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='device_specs', full_name='DaqConfig.device_specs', index=26, number=27, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='test_config', full_name='DaqConfig.test_config', index=27, number=28, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -309,23 +336,23 @@ is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='fail_hook', full_name='DaqConfig.fail_hook', index=29, + name='topology_hook', full_name='DaqConfig.topology_hook', index=29, number=30, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='device_template', full_name='DaqConfig.device_template', index=30, number=31, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='site_reports', full_name='DaqConfig.site_reports', index=31, number=32, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -357,6 +384,27 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='usi_setup', full_name='DaqConfig.usi_setup', index=36, + number=49, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='run_trigger_type', full_name='DaqConfig.run_trigger_type', index=37, + number=50, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='debug_mode', full_name='DaqConfig.debug_mode', index=38, + number=51, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), ], extensions=[ ], @@ -370,7 +418,45 @@ oneofs=[ ], serialized_start=34, - serialized_end=1053, + serialized_end=1150, +) + + +_USISETUP = _descriptor.Descriptor( + name='USISetup', + full_name='USISetup', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='url', full_name='USISetup.url', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='rpc_timeout_sec', full_name='USISetup.rpc_timeout_sec', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=1152, + serialized_end=1200, ) @@ -384,14 +470,14 @@ _descriptor.FieldDescriptor( name='ctrl_intf', full_name='SwitchSetup.ctrl_intf', index=0, number=9, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( name='ip_addr', full_name='SwitchSetup.ip_addr', index=1, number=11, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -410,58 +496,65 @@ is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='lo_addr', full_name='SwitchSetup.lo_addr', index=4, - number=15, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + name='alt_port', full_name='SwitchSetup.alt_port', index=4, + number=16, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='mods_addr', full_name='SwitchSetup.mods_addr', index=5, - number=16, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + name='lo_addr', full_name='SwitchSetup.lo_addr', index=5, + number=18, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='of_dpid', full_name='SwitchSetup.of_dpid', index=6, + name='mods_addr', full_name='SwitchSetup.mods_addr', index=6, + number=20, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=_b("").decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR), + _descriptor.FieldDescriptor( + name='of_dpid', full_name='SwitchSetup.of_dpid', index=7, number=41, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='data_intf', full_name='SwitchSetup.data_intf', index=7, + name='data_intf', full_name='SwitchSetup.data_intf', index=8, number=42, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='ext_br', full_name='SwitchSetup.ext_br', index=8, + name='ext_br', full_name='SwitchSetup.ext_br', index=9, number=43, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='model', full_name='SwitchSetup.model', index=9, + name='model', full_name='SwitchSetup.model', index=10, number=44, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='username', full_name='SwitchSetup.username', index=10, + name='username', full_name='SwitchSetup.username', index=11, number=45, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), _descriptor.FieldDescriptor( - name='password', full_name='SwitchSetup.password', index=11, + name='password', full_name='SwitchSetup.password', index=12, number=46, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -477,8 +570,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1056, - serialized_end=1282, + serialized_start=1203, + serialized_end=1447, ) @@ -492,7 +585,7 @@ _descriptor.FieldDescriptor( name='opts', full_name='Interface.opts', index=0, number=1, type=9, cpp_type=9, label=1, - has_default_value=False, default_value=b"".decode('utf-8'), + has_default_value=False, default_value=_b("").decode('utf-8'), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR), @@ -515,8 +608,8 @@ extension_ranges=[], oneofs=[ ], - serialized_start=1284, - serialized_end=1323, + serialized_start=1449, + serialized_end=1488, ) _DAQCONFIG_INTERFACESENTRY.fields_by_name['value'].message_type = _INTERFACE @@ -525,46 +618,57 @@ _DAQCONFIG.fields_by_name['switch_setup'].message_type = _SWITCHSETUP _DAQCONFIG.fields_by_name['interfaces'].message_type = _DAQCONFIG_INTERFACESENTRY _DAQCONFIG.fields_by_name['fail_module'].message_type = _DAQCONFIG_FAILMODULEENTRY +_DAQCONFIG.fields_by_name['usi_setup'].message_type = _USISETUP +_DAQCONFIG.fields_by_name['run_trigger_type'].enum_type = _RUNTRIGGERTYPE DESCRIPTOR.message_types_by_name['DaqConfig'] = _DAQCONFIG +DESCRIPTOR.message_types_by_name['USISetup'] = _USISETUP DESCRIPTOR.message_types_by_name['SwitchSetup'] = _SWITCHSETUP DESCRIPTOR.message_types_by_name['Interface'] = _INTERFACE +DESCRIPTOR.enum_types_by_name['RunTriggerType'] = _RUNTRIGGERTYPE _sym_db.RegisterFileDescriptor(DESCRIPTOR) -DaqConfig = _reflection.GeneratedProtocolMessageType('DaqConfig', (_message.Message,), { +DaqConfig = _reflection.GeneratedProtocolMessageType('DaqConfig', (_message.Message,), dict( - 'InterfacesEntry' : _reflection.GeneratedProtocolMessageType('InterfacesEntry', (_message.Message,), { - 'DESCRIPTOR' : _DAQCONFIG_INTERFACESENTRY, - '__module__' : 'daq.proto.system_config_pb2' + InterfacesEntry = _reflection.GeneratedProtocolMessageType('InterfacesEntry', (_message.Message,), dict( + DESCRIPTOR = _DAQCONFIG_INTERFACESENTRY, + __module__ = 'daq.proto.system_config_pb2' # @@protoc_insertion_point(class_scope:DaqConfig.InterfacesEntry) - }) + )) , - 'FailModuleEntry' : _reflection.GeneratedProtocolMessageType('FailModuleEntry', (_message.Message,), { - 'DESCRIPTOR' : _DAQCONFIG_FAILMODULEENTRY, - '__module__' : 'daq.proto.system_config_pb2' + FailModuleEntry = _reflection.GeneratedProtocolMessageType('FailModuleEntry', (_message.Message,), dict( + DESCRIPTOR = _DAQCONFIG_FAILMODULEENTRY, + __module__ = 'daq.proto.system_config_pb2' # @@protoc_insertion_point(class_scope:DaqConfig.FailModuleEntry) - }) + )) , - 'DESCRIPTOR' : _DAQCONFIG, - '__module__' : 'daq.proto.system_config_pb2' + DESCRIPTOR = _DAQCONFIG, + __module__ = 'daq.proto.system_config_pb2' # @@protoc_insertion_point(class_scope:DaqConfig) - }) + )) _sym_db.RegisterMessage(DaqConfig) _sym_db.RegisterMessage(DaqConfig.InterfacesEntry) _sym_db.RegisterMessage(DaqConfig.FailModuleEntry) -SwitchSetup = _reflection.GeneratedProtocolMessageType('SwitchSetup', (_message.Message,), { - 'DESCRIPTOR' : _SWITCHSETUP, - '__module__' : 'daq.proto.system_config_pb2' +USISetup = _reflection.GeneratedProtocolMessageType('USISetup', (_message.Message,), dict( + DESCRIPTOR = _USISETUP, + __module__ = 'daq.proto.system_config_pb2' + # @@protoc_insertion_point(class_scope:USISetup) + )) +_sym_db.RegisterMessage(USISetup) + +SwitchSetup = _reflection.GeneratedProtocolMessageType('SwitchSetup', (_message.Message,), dict( + DESCRIPTOR = _SWITCHSETUP, + __module__ = 'daq.proto.system_config_pb2' # @@protoc_insertion_point(class_scope:SwitchSetup) - }) + )) _sym_db.RegisterMessage(SwitchSetup) -Interface = _reflection.GeneratedProtocolMessageType('Interface', (_message.Message,), { - 'DESCRIPTOR' : _INTERFACE, - '__module__' : 'daq.proto.system_config_pb2' +Interface = _reflection.GeneratedProtocolMessageType('Interface', (_message.Message,), dict( + DESCRIPTOR = _INTERFACE, + __module__ = 'daq.proto.system_config_pb2' # @@protoc_insertion_point(class_scope:Interface) - }) + )) _sym_db.RegisterMessage(Interface) diff --git a/libs/proto/usi_pb2.py b/libs/proto/usi_pb2.py new file mode 100644 index 0000000000..780ac2e1c2 --- /dev/null +++ b/libs/proto/usi_pb2.py @@ -0,0 +1,486 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: usi.proto + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='usi.proto', + package='usi', + syntax='proto3', + serialized_options=b'\n\004grpcB\010USIProtoP\001', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\tusi.proto\x12\x03usi\"\'\n\x14SwitchActionResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\xc9\x01\n\rPowerResponse\x12!\n\x19\x63urrent_power_consumption\x18\x01 \x01(\x02\x12\x1d\n\x15max_power_consumption\x18\x02 \x01(\x02\x12$\n\x0bpoe_support\x18\x03 \x01(\x0e\x32\x0f.usi.POESupport\x12\"\n\npoe_status\x18\x04 \x01(\x0e\x32\x0e.usi.POEStatus\x12,\n\x0fpoe_negotiation\x18\x05 \x01(\x0e\x32\x13.usi.POENegotiation\"]\n\x11InterfaceResponse\x12$\n\x0blink_status\x18\x01 \x01(\x0e\x32\x0f.usi.LinkStatus\x12\x12\n\nlink_speed\x18\x02 \x01(\x05\x12\x0e\n\x06\x64uplex\x18\x03 \x01(\t\"w\n\nSwitchInfo\x12\x0f\n\x07ip_addr\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65vice_port\x18\x03 \x01(\x05\x12\x1f\n\x05model\x18\x04 \x01(\x0e\x32\x10.usi.SwitchModel\x12\x10\n\x08username\x18\x05 \x01(\t\x12\x10\n\x08password\x18\x06 \x01(\t*F\n\x0bSwitchModel\x12\x17\n\x13\x41LLIED_TELESIS_X230\x10\x00\x12\x0e\n\nCISCO_9300\x10\x01\x12\x0e\n\nOVS_SWITCH\x10\x02*\x1e\n\nLinkStatus\x12\x06\n\x02UP\x10\x00\x12\x08\n\x04\x44OWN\x10\x01*\'\n\nPOESupport\x12\x0b\n\x07\x45NABLED\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01*1\n\tPOEStatus\x12\x06\n\x02ON\x10\x00\x12\x07\n\x03OFF\x10\x01\x12\t\n\x05\x46\x41ULT\x10\x02\x12\x08\n\x04\x44\x45NY\x10\x03*C\n\x0ePOENegotiation\x12\x17\n\x13NEGOTIATION_ENABLED\x10\x00\x12\x18\n\x14NEGOTIATION_DISABLED\x10\x01\x32\xef\x01\n\nUSIService\x12\x31\n\x08GetPower\x12\x0f.usi.SwitchInfo\x1a\x12.usi.PowerResponse\"\x00\x12\x39\n\x0cGetInterface\x12\x0f.usi.SwitchInfo\x1a\x16.usi.InterfaceResponse\"\x00\x12:\n\ndisconnect\x12\x0f.usi.SwitchInfo\x1a\x19.usi.SwitchActionResponse\"\x00\x12\x37\n\x07\x63onnect\x12\x0f.usi.SwitchInfo\x1a\x19.usi.SwitchActionResponse\"\x00\x42\x12\n\x04grpcB\x08USIProtoP\x01\x62\x06proto3' +) + +_SWITCHMODEL = _descriptor.EnumDescriptor( + name='SwitchModel', + full_name='usi.SwitchModel', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='ALLIED_TELESIS_X230', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='CISCO_9300', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='OVS_SWITCH', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=479, + serialized_end=549, +) +_sym_db.RegisterEnumDescriptor(_SWITCHMODEL) + +SwitchModel = enum_type_wrapper.EnumTypeWrapper(_SWITCHMODEL) +_LINKSTATUS = _descriptor.EnumDescriptor( + name='LinkStatus', + full_name='usi.LinkStatus', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='UP', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='DOWN', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=551, + serialized_end=581, +) +_sym_db.RegisterEnumDescriptor(_LINKSTATUS) + +LinkStatus = enum_type_wrapper.EnumTypeWrapper(_LINKSTATUS) +_POESUPPORT = _descriptor.EnumDescriptor( + name='POESupport', + full_name='usi.POESupport', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='ENABLED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='DISABLED', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=583, + serialized_end=622, +) +_sym_db.RegisterEnumDescriptor(_POESUPPORT) + +POESupport = enum_type_wrapper.EnumTypeWrapper(_POESUPPORT) +_POESTATUS = _descriptor.EnumDescriptor( + name='POEStatus', + full_name='usi.POEStatus', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='ON', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='OFF', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='FAULT', index=2, number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='DENY', index=3, number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=624, + serialized_end=673, +) +_sym_db.RegisterEnumDescriptor(_POESTATUS) + +POEStatus = enum_type_wrapper.EnumTypeWrapper(_POESTATUS) +_POENEGOTIATION = _descriptor.EnumDescriptor( + name='POENegotiation', + full_name='usi.POENegotiation', + filename=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + values=[ + _descriptor.EnumValueDescriptor( + name='NEGOTIATION_ENABLED', index=0, number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + _descriptor.EnumValueDescriptor( + name='NEGOTIATION_DISABLED', index=1, number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key), + ], + containing_type=None, + serialized_options=None, + serialized_start=675, + serialized_end=742, +) +_sym_db.RegisterEnumDescriptor(_POENEGOTIATION) + +POENegotiation = enum_type_wrapper.EnumTypeWrapper(_POENEGOTIATION) +ALLIED_TELESIS_X230 = 0 +CISCO_9300 = 1 +OVS_SWITCH = 2 +UP = 0 +DOWN = 1 +ENABLED = 0 +DISABLED = 1 +ON = 0 +OFF = 1 +FAULT = 2 +DENY = 3 +NEGOTIATION_ENABLED = 0 +NEGOTIATION_DISABLED = 1 + + + +_SWITCHACTIONRESPONSE = _descriptor.Descriptor( + name='SwitchActionResponse', + full_name='usi.SwitchActionResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='success', full_name='usi.SwitchActionResponse.success', index=0, + number=1, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=18, + serialized_end=57, +) + + +_POWERRESPONSE = _descriptor.Descriptor( + name='PowerResponse', + full_name='usi.PowerResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='current_power_consumption', full_name='usi.PowerResponse.current_power_consumption', index=0, + number=1, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='max_power_consumption', full_name='usi.PowerResponse.max_power_consumption', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=float(0), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='poe_support', full_name='usi.PowerResponse.poe_support', index=2, + number=3, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='poe_status', full_name='usi.PowerResponse.poe_status', index=3, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='poe_negotiation', full_name='usi.PowerResponse.poe_negotiation', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=60, + serialized_end=261, +) + + +_INTERFACERESPONSE = _descriptor.Descriptor( + name='InterfaceResponse', + full_name='usi.InterfaceResponse', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='link_status', full_name='usi.InterfaceResponse.link_status', index=0, + number=1, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='link_speed', full_name='usi.InterfaceResponse.link_speed', index=1, + number=2, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='duplex', full_name='usi.InterfaceResponse.duplex', index=2, + number=3, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=263, + serialized_end=356, +) + + +_SWITCHINFO = _descriptor.Descriptor( + name='SwitchInfo', + full_name='usi.SwitchInfo', + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name='ip_addr', full_name='usi.SwitchInfo.ip_addr', index=0, + number=1, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='device_port', full_name='usi.SwitchInfo.device_port', index=1, + number=3, type=5, cpp_type=1, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='model', full_name='usi.SwitchInfo.model', index=2, + number=4, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='username', full_name='usi.SwitchInfo.username', index=3, + number=5, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + _descriptor.FieldDescriptor( + name='password', full_name='usi.SwitchInfo.password', index=4, + number=6, type=9, cpp_type=9, label=1, + has_default_value=False, default_value=b"".decode('utf-8'), + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + serialized_options=None, + is_extendable=False, + syntax='proto3', + extension_ranges=[], + oneofs=[ + ], + serialized_start=358, + serialized_end=477, +) + +_POWERRESPONSE.fields_by_name['poe_support'].enum_type = _POESUPPORT +_POWERRESPONSE.fields_by_name['poe_status'].enum_type = _POESTATUS +_POWERRESPONSE.fields_by_name['poe_negotiation'].enum_type = _POENEGOTIATION +_INTERFACERESPONSE.fields_by_name['link_status'].enum_type = _LINKSTATUS +_SWITCHINFO.fields_by_name['model'].enum_type = _SWITCHMODEL +DESCRIPTOR.message_types_by_name['SwitchActionResponse'] = _SWITCHACTIONRESPONSE +DESCRIPTOR.message_types_by_name['PowerResponse'] = _POWERRESPONSE +DESCRIPTOR.message_types_by_name['InterfaceResponse'] = _INTERFACERESPONSE +DESCRIPTOR.message_types_by_name['SwitchInfo'] = _SWITCHINFO +DESCRIPTOR.enum_types_by_name['SwitchModel'] = _SWITCHMODEL +DESCRIPTOR.enum_types_by_name['LinkStatus'] = _LINKSTATUS +DESCRIPTOR.enum_types_by_name['POESupport'] = _POESUPPORT +DESCRIPTOR.enum_types_by_name['POEStatus'] = _POESTATUS +DESCRIPTOR.enum_types_by_name['POENegotiation'] = _POENEGOTIATION +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +SwitchActionResponse = _reflection.GeneratedProtocolMessageType('SwitchActionResponse', (_message.Message,), { + 'DESCRIPTOR' : _SWITCHACTIONRESPONSE, + '__module__' : 'usi_pb2' + # @@protoc_insertion_point(class_scope:usi.SwitchActionResponse) + }) +_sym_db.RegisterMessage(SwitchActionResponse) + +PowerResponse = _reflection.GeneratedProtocolMessageType('PowerResponse', (_message.Message,), { + 'DESCRIPTOR' : _POWERRESPONSE, + '__module__' : 'usi_pb2' + # @@protoc_insertion_point(class_scope:usi.PowerResponse) + }) +_sym_db.RegisterMessage(PowerResponse) + +InterfaceResponse = _reflection.GeneratedProtocolMessageType('InterfaceResponse', (_message.Message,), { + 'DESCRIPTOR' : _INTERFACERESPONSE, + '__module__' : 'usi_pb2' + # @@protoc_insertion_point(class_scope:usi.InterfaceResponse) + }) +_sym_db.RegisterMessage(InterfaceResponse) + +SwitchInfo = _reflection.GeneratedProtocolMessageType('SwitchInfo', (_message.Message,), { + 'DESCRIPTOR' : _SWITCHINFO, + '__module__' : 'usi_pb2' + # @@protoc_insertion_point(class_scope:usi.SwitchInfo) + }) +_sym_db.RegisterMessage(SwitchInfo) + + +DESCRIPTOR._options = None + +_USISERVICE = _descriptor.ServiceDescriptor( + name='USIService', + full_name='usi.USIService', + file=DESCRIPTOR, + index=0, + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_start=745, + serialized_end=984, + methods=[ + _descriptor.MethodDescriptor( + name='GetPower', + full_name='usi.USIService.GetPower', + index=0, + containing_service=None, + input_type=_SWITCHINFO, + output_type=_POWERRESPONSE, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='GetInterface', + full_name='usi.USIService.GetInterface', + index=1, + containing_service=None, + input_type=_SWITCHINFO, + output_type=_INTERFACERESPONSE, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='disconnect', + full_name='usi.USIService.disconnect', + index=2, + containing_service=None, + input_type=_SWITCHINFO, + output_type=_SWITCHACTIONRESPONSE, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name='connect', + full_name='usi.USIService.connect', + index=3, + containing_service=None, + input_type=_SWITCHINFO, + output_type=_SWITCHACTIONRESPONSE, + serialized_options=None, + create_key=_descriptor._internal_create_key, + ), +]) +_sym_db.RegisterServiceDescriptor(_USISERVICE) + +DESCRIPTOR.services_by_name['USIService'] = _USISERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/libs/proto/usi_pb2_grpc.py b/libs/proto/usi_pb2_grpc.py new file mode 100644 index 0000000000..c8e57501c9 --- /dev/null +++ b/libs/proto/usi_pb2_grpc.py @@ -0,0 +1,161 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import usi_pb2 as usi__pb2 + + +class USIServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetPower = channel.unary_unary( + '/usi.USIService/GetPower', + request_serializer=usi__pb2.SwitchInfo.SerializeToString, + response_deserializer=usi__pb2.PowerResponse.FromString, + ) + self.GetInterface = channel.unary_unary( + '/usi.USIService/GetInterface', + request_serializer=usi__pb2.SwitchInfo.SerializeToString, + response_deserializer=usi__pb2.InterfaceResponse.FromString, + ) + self.disconnect = channel.unary_unary( + '/usi.USIService/disconnect', + request_serializer=usi__pb2.SwitchInfo.SerializeToString, + response_deserializer=usi__pb2.SwitchActionResponse.FromString, + ) + self.connect = channel.unary_unary( + '/usi.USIService/connect', + request_serializer=usi__pb2.SwitchInfo.SerializeToString, + response_deserializer=usi__pb2.SwitchActionResponse.FromString, + ) + + +class USIServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetPower(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetInterface(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def disconnect(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def connect(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_USIServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'GetPower': grpc.unary_unary_rpc_method_handler( + servicer.GetPower, + request_deserializer=usi__pb2.SwitchInfo.FromString, + response_serializer=usi__pb2.PowerResponse.SerializeToString, + ), + 'GetInterface': grpc.unary_unary_rpc_method_handler( + servicer.GetInterface, + request_deserializer=usi__pb2.SwitchInfo.FromString, + response_serializer=usi__pb2.InterfaceResponse.SerializeToString, + ), + 'disconnect': grpc.unary_unary_rpc_method_handler( + servicer.disconnect, + request_deserializer=usi__pb2.SwitchInfo.FromString, + response_serializer=usi__pb2.SwitchActionResponse.SerializeToString, + ), + 'connect': grpc.unary_unary_rpc_method_handler( + servicer.connect, + request_deserializer=usi__pb2.SwitchInfo.FromString, + response_serializer=usi__pb2.SwitchActionResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'usi.USIService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + + + # This class is part of an EXPERIMENTAL API. +class USIService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetPower(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/usi.USIService/GetPower', + usi__pb2.SwitchInfo.SerializeToString, + usi__pb2.PowerResponse.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def GetInterface(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/usi.USIService/GetInterface', + usi__pb2.SwitchInfo.SerializeToString, + usi__pb2.InterfaceResponse.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def disconnect(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/usi.USIService/disconnect', + usi__pb2.SwitchInfo.SerializeToString, + usi__pb2.SwitchActionResponse.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) + + @staticmethod + def connect(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary(request, target, '/usi.USIService/connect', + usi__pb2.SwitchInfo.SerializeToString, + usi__pb2.SwitchActionResponse.FromString, + options, channel_credentials, + call_credentials, compression, wait_for_ready, timeout, metadata) diff --git a/mudacl/build.gradle b/mudacl/build.gradle index 13f3fd4017..f3667604bb 100644 --- a/mudacl/build.gradle +++ b/mudacl/build.gradle @@ -5,12 +5,12 @@ buildscript { } } dependencies { - classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0" + classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0" } } plugins { - id 'com.github.johnrengelman.shadow' version '5.2.0' + id 'com.github.johnrengelman.shadow' version '6.0.0' id 'java' id 'maven' } @@ -32,7 +32,7 @@ repositories { } dependencies { - compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.11.0' - compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.11.0' + compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.11.1' + compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.11.1' testCompile group: 'junit', name: 'junit', version: '4.13' } diff --git a/mudacl/gradle/wrapper/gradle-wrapper.properties b/mudacl/gradle/wrapper/gradle-wrapper.properties index 16871c71a0..567aa53d89 100644 --- a/mudacl/gradle/wrapper/gradle-wrapper.properties +++ b/mudacl/gradle/wrapper/gradle-wrapper.properties @@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip diff --git a/proto/system_config.proto b/proto/system_config.proto index a0cbfbda48..966a5b780a 100644 --- a/proto/system_config.proto +++ b/proto/system_config.proto @@ -94,8 +94,8 @@ message DaqConfig { // Set port-debounce for flaky connections. Zero to disable. int32 port_debounce_sec = 29; - // Hook for failure diagnostics. - string fail_hook = 30; + // Hook for device topology updates. + string topology_hook = 30; // Directory of defaults for new devices. string device_template = 31; @@ -114,8 +114,29 @@ message DaqConfig { // Set time between port disconnect and host tests shutdown int32 port_flap_timeout_sec = 48; + + // USI url + USISetup usi_setup = 49; + + // Configures events that trigger a DAQ run + RunTriggerType run_trigger_type = 50; + + // verbose output + bool debug_mode = 51; } +enum RunTriggerType { + PORT = 0; + VLAN = 1; +} + +/** + * USI paramters +**/ +message USISetup { + string url = 1; + int32 rpc_timeout_sec = 2; +} /* * System configuraiton of the access switch. This is used by the system @@ -131,14 +152,17 @@ message SwitchSetup { // Dataplane uplink port int32 uplink_port = 13; - // Local port of open flow controller + // Local port of DAQ OpenFlow controller int32 lo_port = 14; + // Local port for an alternate OpenFlow controller + int32 alt_port = 16; + // IP address and subnet for local control plane interface - string lo_addr = 15; + string lo_addr = 18; // IP address template and subnet for module ip addresses - string mods_addr = 16; + string mods_addr = 20; // Dataplane id of external OpenFlow switch string of_dpid = 41; diff --git a/pubber/.idea/codeStyles/codeStyleConfig.xml b/pubber/.idea/codeStyles/codeStyleConfig.xml deleted file mode 100644 index c79f34ced8..0000000000 --- a/pubber/.idea/codeStyles/codeStyleConfig.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - \ No newline at end of file diff --git a/pubber/.idea/dictionaries/peringknife.xml b/pubber/.idea/dictionaries/peringknife.xml deleted file mode 100644 index 1f2f3fc05a..0000000000 --- a/pubber/.idea/dictionaries/peringknife.xml +++ /dev/null @@ -1,7 +0,0 @@ - - - - pubber - - - \ No newline at end of file diff --git a/pubber/.idea/encodings.xml b/pubber/.idea/encodings.xml deleted file mode 100644 index 15a15b218a..0000000000 --- a/pubber/.idea/encodings.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/pubber/.idea/gradle.xml b/pubber/.idea/gradle.xml deleted file mode 100644 index a931762ec9..0000000000 --- a/pubber/.idea/gradle.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/jarRepositories.xml b/pubber/.idea/jarRepositories.xml deleted file mode 100644 index 6f70f42344..0000000000 --- a/pubber/.idea/jarRepositories.xml +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml b/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml deleted file mode 100644 index a61dc7e59a..0000000000 --- a/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml deleted file mode 100644 index 940abc9cd6..0000000000 --- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml deleted file mode 100644 index c39a1aad89..0000000000 --- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml deleted file mode 100644 index 401e4470cc..0000000000 --- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml b/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml deleted file mode 100644 index 6a37163770..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml deleted file mode 100644 index f7052b657f..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml b/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml deleted file mode 100644 index 5afd4e53b5..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml b/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml deleted file mode 100644 index 7dd2f70770..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml deleted file mode 100644 index f653b75d6a..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml deleted file mode 100644 index 3ab192cf7d..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml deleted file mode 100644 index 831f72e025..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml deleted file mode 100644 index 409ca08042..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml b/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml deleted file mode 100644 index 19f8e9622f..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml b/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml deleted file mode 100644 index a9aba07437..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml b/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml deleted file mode 100644 index aeea865ae3..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml deleted file mode 100644 index 49ddf929e7..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml deleted file mode 100644 index f09079becb..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml deleted file mode 100644 index 42054ea1de..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml b/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml deleted file mode 100644 index c6616f41e4..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml b/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml deleted file mode 100644 index cbe1b3266b..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml b/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml deleted file mode 100644 index b4cd21969d..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml deleted file mode 100644 index 4c947ec6df..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml deleted file mode 100644 index 6c259c21e1..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml deleted file mode 100644 index b4ec53cbea..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml b/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml deleted file mode 100644 index 07c6748fa9..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml b/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml deleted file mode 100644 index ab45264c2d..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml deleted file mode 100644 index 8549a6371c..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml b/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml deleted file mode 100644 index e294c29fa5..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml b/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml deleted file mode 100644 index 6866f74bf0..0000000000 --- a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml b/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml deleted file mode 100644 index 6b097056b4..0000000000 --- a/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml b/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml deleted file mode 100644 index d3ab2fa027..0000000000 --- a/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml b/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml deleted file mode 100644 index c588bf6754..0000000000 --- a/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml b/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml deleted file mode 100644 index 35a975da46..0000000000 --- a/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml b/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml deleted file mode 100644 index c84796132f..0000000000 --- a/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml b/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml deleted file mode 100644 index b9fb75155c..0000000000 --- a/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml b/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml deleted file mode 100644 index f2223bcf6b..0000000000 --- a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml b/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml deleted file mode 100644 index a4fb7c1897..0000000000 --- a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml deleted file mode 100644 index 8308d1661d..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml deleted file mode 100644 index 42b3336f05..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml deleted file mode 100644 index f4249256ab..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml deleted file mode 100644 index 3b8ead556e..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml deleted file mode 100644 index 95d433c45d..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml deleted file mode 100644 index 9a3dd1b89b..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml deleted file mode 100644 index f282c5df26..0000000000 --- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml b/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml deleted file mode 100644 index c255c18f27..0000000000 --- a/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml b/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml deleted file mode 100644 index 0751597837..0000000000 --- a/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml deleted file mode 100644 index 30fb818235..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml deleted file mode 100644 index 97cd8c9cb9..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml deleted file mode 100644 index 54abbbe3aa..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml deleted file mode 100644 index 58ac06ba5b..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml deleted file mode 100644 index 68118cd002..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml deleted file mode 100644 index 614e913e87..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml deleted file mode 100644 index 1a98e17a91..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml deleted file mode 100644 index 9298ca6e14..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml deleted file mode 100644 index ca38677084..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml deleted file mode 100644 index b70942e76b..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml deleted file mode 100644 index e00975f2d7..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml deleted file mode 100644 index f055031276..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml deleted file mode 100644 index 467269eee3..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml deleted file mode 100644 index a3b889ee62..0000000000 --- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml b/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml deleted file mode 100644 index 5cdfe84133..0000000000 --- a/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml b/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml deleted file mode 100644 index f480add6e2..0000000000 --- a/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml b/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml deleted file mode 100644 index 434a174d24..0000000000 --- a/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml b/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml deleted file mode 100644 index f45e0d77bd..0000000000 --- a/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml b/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml deleted file mode 100644 index 0cef6bc81e..0000000000 --- a/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml b/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml deleted file mode 100644 index 80d562c2fb..0000000000 --- a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml b/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml deleted file mode 100644 index e203c6b668..0000000000 --- a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml b/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml deleted file mode 100644 index 72ee118d97..0000000000 --- a/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml b/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml deleted file mode 100644 index 01970270a7..0000000000 --- a/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml b/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml deleted file mode 100644 index 8262f729c2..0000000000 --- a/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml b/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml deleted file mode 100644 index 64dc62f3d4..0000000000 --- a/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml b/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml deleted file mode 100644 index 6aa377ef4f..0000000000 --- a/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml b/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml deleted file mode 100644 index e6b52ad30d..0000000000 --- a/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml b/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml deleted file mode 100644 index dd23f3e0c1..0000000000 --- a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml b/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml deleted file mode 100644 index 586ac1e599..0000000000 --- a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml b/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml deleted file mode 100644 index 0fcafe29d0..0000000000 --- a/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/modules.xml b/pubber/.idea/modules.xml deleted file mode 100644 index f4ca1e7a1b..0000000000 --- a/pubber/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/modules/datafmt.iml b/pubber/.idea/modules/datafmt.iml deleted file mode 100644 index baed4f134d..0000000000 --- a/pubber/.idea/modules/datafmt.iml +++ /dev/null @@ -1,21 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/uiDesigner.xml b/pubber/.idea/uiDesigner.xml deleted file mode 100644 index e96534fb27..0000000000 --- a/pubber/.idea/uiDesigner.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/.idea/vcs.xml b/pubber/.idea/vcs.xml deleted file mode 100644 index 26b269dd99..0000000000 --- a/pubber/.idea/vcs.xml +++ /dev/null @@ -1,61 +0,0 @@ - - - - - - - - - - \ No newline at end of file diff --git a/pubber/bin/build b/pubber/bin/build deleted file mode 100755 index a2bd6be40a..0000000000 --- a/pubber/bin/build +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -e - -rundir=$(dirname $0) -cd $rundir/.. - -echo Running in $PWD - -rm -rf build - -./gradlew build -./gradlew shadow diff --git a/pubber/bin/keygen b/pubber/bin/keygen deleted file mode 100755 index 1b3cf60d67..0000000000 --- a/pubber/bin/keygen +++ /dev/null @@ -1,26 +0,0 @@ -#!/bin/bash -e - -ROOT=$(realpath $(dirname $0)/../..) -cd $ROOT - -TARGET_PREFIX=local/rsa_ - -PUBLIC_CERT=${TARGET_PREFIX}cert.pem -PRIVATE_CERT=${TARGET_PREFIX}private.pem -PRIVATE_KEY=${TARGET_PREFIX}private.pkcs8 - -if [ -f $PUBLIC_CERT ]; then - echo $PUBLIC_CERT already exists, exiting. - false -fi -if [ -f $PRIVATE_CERT ]; then - echo $PRIVATE_CERT already exists, exiting. - false -fi -if [ -f $PRIVATE_KEY ]; then - echo $PRIVATE_KEY already exists, exiting. - false -fi - -openssl req -x509 -nodes -newkey rsa:2048 -keyout $PRIVATE_CERT -days 1000000 -out $PUBLIC_CERT -subj "/CN=unused" -openssl pkcs8 -topk8 -inform PEM -outform DER -in $PRIVATE_CERT -nocrypt > $PRIVATE_KEY diff --git a/pubber/bin/run b/pubber/bin/run deleted file mode 100755 index ee14f351aa..0000000000 --- a/pubber/bin/run +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash -e - -ROOT=$(realpath $(dirname $0)/../..) -cd $ROOT - -conf_file=local/pubber.json - -if [ ! -f $conf_file ]; then - echo Pubber config file not found: $(realpath $conf_file) - false -fi - -java -jar pubber/build/libs/pubber-1.0-SNAPSHOT-all.jar $conf_file diff --git a/pubber/build.gradle b/pubber/build.gradle deleted file mode 100644 index 1bdfb151a3..0000000000 --- a/pubber/build.gradle +++ /dev/null @@ -1,54 +0,0 @@ -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" - } - } - dependencies { - classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0" - } -} - -plugins { - id 'com.github.johnrengelman.shadow' version '5.2.0' - id 'java' - id 'maven' -} - -group 'daq-pubber' -version '1.0-SNAPSHOT' - -sourceCompatibility = 1.8 - -jar { - manifest { - attributes 'Main-Class': 'daq.pubber.Pubber' - } -} - -repositories { - mavenCentral() - mavenLocal() - jcenter() -} - -dependencies { - compile group: 'org.slf4j', name: 'slf4j-simple', version:'1.7.5' - compile 'io.jsonwebtoken:jjwt:0.7.0' - compile 'javax.xml.bind:jaxb-api:2.3.1' - compile 'com.sun.xml.bind:jaxb-impl:2.3.2' - compile 'com.google.guava:guava:22.0' - compile 'com.google.cloud:google-cloud-logging:1.7.0' - compile('com.google.api-client:google-api-client:1.22.0') { - exclude group: 'com.google.guava', module: 'guava-jdk5' - } - compile 'com.fasterxml.jackson.core:jackson-databind:2.11.0' - compile('com.google.apis:google-api-services-cloudiot:v1-rev20170922-1.22.0') { - exclude group: 'com.google.guava', module: 'guava-jdk5' - } - compile 'joda-time:joda-time:2.9.7' - compile 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.1.0' - compile 'io.moquette:moquette-broker:0.10' - testCompile group: 'junit', name: 'junit', version: '4.13' - testCompile 'org.mockito:mockito-core:1.10.19' -} diff --git a/pubber/gradle/wrapper/gradle-wrapper.jar b/pubber/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 01b8bf6b1f..0000000000 Binary files a/pubber/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/pubber/gradle/wrapper/gradle-wrapper.properties b/pubber/gradle/wrapper/gradle-wrapper.properties deleted file mode 100644 index f07e1a85bc..0000000000 --- a/pubber/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Tue Feb 11 09:15:14 PST 2020 -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStorePath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME diff --git a/pubber/gradlew b/pubber/gradlew deleted file mode 100755 index 4453ccea33..0000000000 --- a/pubber/gradlew +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env sh - -############################################################################## -## -## Gradle start up script for UN*X -## -############################################################################## - -# Attempt to set APP_HOME -# Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi -done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" - -warn ( ) { - echo "$*" -} - -die ( ) { - echo - echo "$*" - echo - exit 1 -} - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi -fi - -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi - -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi - # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" - fi - i=$((i+1)) - done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac -fi - -# Escape application args -save ( ) { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - -exec "$JAVACMD" "$@" diff --git a/pubber/local b/pubber/local deleted file mode 120000 index 0a4de1e828..0000000000 --- a/pubber/local +++ /dev/null @@ -1 +0,0 @@ -../local/ \ No newline at end of file diff --git a/pubber/pubber.iml b/pubber/pubber.iml deleted file mode 100644 index e0065ccd31..0000000000 --- a/pubber/pubber.iml +++ /dev/null @@ -1,92 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/pubber/settings.gradle b/pubber/settings.gradle deleted file mode 100644 index 8b13789179..0000000000 --- a/pubber/settings.gradle +++ /dev/null @@ -1 +0,0 @@ - diff --git a/pubber/src/main/java/daq/pubber/AbstractPoint.java b/pubber/src/main/java/daq/pubber/AbstractPoint.java deleted file mode 100644 index 44bd581c68..0000000000 --- a/pubber/src/main/java/daq/pubber/AbstractPoint.java +++ /dev/null @@ -1,15 +0,0 @@ -package daq.pubber; - -import daq.udmi.Message.PointData; -import daq.udmi.Message.PointState; - -public interface AbstractPoint { - - String getName(); - - PointData getData(); - - void updateData(); - - PointState getState(); -} diff --git a/pubber/src/main/java/daq/pubber/Configuration.java b/pubber/src/main/java/daq/pubber/Configuration.java deleted file mode 100644 index 7c362781ef..0000000000 --- a/pubber/src/main/java/daq/pubber/Configuration.java +++ /dev/null @@ -1,17 +0,0 @@ -package daq.pubber; - -/** - */ -public class Configuration { - public String bridgeHostname = "mqtt.googleapis.com"; - public String bridgePort = "443"; - public String projectId; - public String cloudRegion; - public String registryId; - public String gatewayId; - public String deviceId; - public String keyFile = "local/rsa_private.pkcs8"; - public byte[] keyBytes; - public String algorithm = "RS256"; - public Object extraField; -} diff --git a/pubber/src/main/java/daq/pubber/GatewayError.java b/pubber/src/main/java/daq/pubber/GatewayError.java deleted file mode 100644 index fcbc4954c9..0000000000 --- a/pubber/src/main/java/daq/pubber/GatewayError.java +++ /dev/null @@ -1,14 +0,0 @@ -package daq.pubber; - -public class GatewayError { - public String error_type; - public String description; - public String device_id; - public MqttMessageInfo mqtt_message_info; - - public static class MqttMessageInfo { - public String message_type; - public String topic; - public String packet_id; - } -} diff --git a/pubber/src/main/java/daq/pubber/JwtAuthorization.java b/pubber/src/main/java/daq/pubber/JwtAuthorization.java deleted file mode 100644 index cd17be9ba7..0000000000 --- a/pubber/src/main/java/daq/pubber/JwtAuthorization.java +++ /dev/null @@ -1,11 +0,0 @@ -package daq.pubber; - -import java.util.Arrays; - -public class JwtAuthorization { - public String authorization; - - public JwtAuthorization(String jwtToken) { - authorization = jwtToken; - } -} diff --git a/pubber/src/main/java/daq/pubber/MqttPublisher.java b/pubber/src/main/java/daq/pubber/MqttPublisher.java deleted file mode 100644 index 9358ee3546..0000000000 --- a/pubber/src/main/java/daq/pubber/MqttPublisher.java +++ /dev/null @@ -1,364 +0,0 @@ -package daq.pubber; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.util.ISO8601DateFormat; -import com.google.common.base.Preconditions; -import com.google.common.cache.CacheLoader; -import com.google.common.cache.RemovalNotification; -import io.jsonwebtoken.JwtBuilder; -import io.jsonwebtoken.Jwts; -import io.jsonwebtoken.SignatureAlgorithm; -import org.eclipse.paho.client.mqttv3.*; -import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence; -import org.joda.time.DateTime; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.nio.charset.StandardCharsets; -import java.security.KeyFactory; -import java.security.PrivateKey; -import java.security.spec.PKCS8EncodedKeySpec; -import java.util.Map; -import java.util.Set; -import java.util.concurrent.*; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.function.Consumer; - -import static com.google.common.base.Preconditions.checkNotNull; - -/** - * Handle publishing sensor data to a Cloud IoT MQTT endpoint. - */ -public class MqttPublisher { - - private static final Logger LOG = LoggerFactory.getLogger(MqttPublisher.class); - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) - .setDateFormat(new ISO8601DateFormat()) - .setSerializationInclusion(JsonInclude.Include.NON_NULL); - - // Indicate if this message should be a MQTT 'retained' message. - private static final boolean SHOULD_RETAIN = false; - - private static final int MQTT_QOS = 1; - private static final String CONFIG_UPDATE_TOPIC_FMT = "/devices/%s/config"; - private static final String ERRORS_TOPIC_FMT = "/devices/%s/errors"; - private static final String UNUSED_ACCOUNT_NAME = "unused"; - private static final int INITIALIZE_TIME_MS = 20000; - - private static final String MESSAGE_TOPIC_FORMAT = "/devices/%s/%s"; - private static final String BROKER_URL_FORMAT = "ssl://%s:%s"; - private static final String CLIENT_ID_FORMAT = "projects/%s/locations/%s/registries/%s/devices/%s"; - private static final int PUBLISH_THREAD_COUNT = 10; - private static final String HANDLER_KEY_FORMAT = "%s/%s"; - - private final Semaphore connectionLock = new Semaphore(1); - - private final Map mqttClients = new ConcurrentHashMap<>(); - - private final ExecutorService publisherExecutor = - Executors.newFixedThreadPool(PUBLISH_THREAD_COUNT); - - private final Configuration configuration; - private final String registryId; - - private final AtomicInteger publishCounter = new AtomicInteger(0); - private final AtomicInteger errorCounter = new AtomicInteger(0); - private final AtomicInteger expiredCounter = new AtomicInteger(0); - private final Map> handlers = new ConcurrentHashMap<>(); - private final Map> handlersType = new ConcurrentHashMap<>(); - private final Consumer onError; - - MqttPublisher(Configuration configuration, Consumer onError) { - this.configuration = configuration; - this.registryId = configuration.registryId; - this.onError = onError; - validateCloudIoTOptions(); - } - - void publish(String deviceId, String topic, Object data) { - Preconditions.checkNotNull(deviceId, "publish deviceId"); - LOG.debug("Publishing in background " + registryId + "/" + deviceId); - publisherExecutor.submit(() -> publishCore(deviceId, topic, data)); - } - - private void publishCore(String deviceId, String topic, Object data) { - try { - String payload = OBJECT_MAPPER.writeValueAsString(data); - sendMessage(deviceId, getMessageTopic(deviceId, topic), payload.getBytes()); - LOG.debug("Publishing complete " + registryId + "/" + deviceId); - } catch (Exception e) { - errorCounter.incrementAndGet(); - LOG.warn(String.format("Publish failed for %s: %s", deviceId, e)); - if (configuration.gatewayId == null) { - closeDeviceClient(deviceId); - } else { - close(); - } - } - } - - private void closeDeviceClient(String deviceId) { - MqttClient removed = mqttClients.remove(deviceId); - if (removed != null) { - try { - removed.close(); - } catch (Exception e) { - LOG.error("Error closing MQTT client: " + e.toString()); - } - } - } - - void close() { - Set clients = mqttClients.keySet(); - for (String client : clients) { - closeDeviceClient(client); - } - } - - long clientCount() { - return mqttClients.size(); - } - - private void validateCloudIoTOptions() { - try { - checkNotNull(configuration.bridgeHostname, "bridgeHostname"); - checkNotNull(configuration.bridgePort, "bridgePort"); - checkNotNull(configuration.projectId, "projectId"); - checkNotNull(configuration.cloudRegion, "cloudRegion"); - checkNotNull(configuration.keyBytes, "keyBytes"); - checkNotNull(configuration.algorithm, "algorithm"); - } catch (Exception e) { - throw new IllegalStateException("Invalid Cloud IoT Options", e); - } - } - - private MqttClient newBoundClient(String deviceId) { - try { - String gatewayId = configuration.gatewayId; - LOG.debug("Connecting through gateway " + gatewayId); - MqttClient mqttClient = getConnectedClient(gatewayId); - String topic = String.format("/devices/%s/attach", deviceId); - String payload = ""; - LOG.info("Publishing attach message to topic " + topic); - mqttClient.publish(topic, payload.getBytes(StandardCharsets.UTF_8.name()), MQTT_QOS, SHOULD_RETAIN); - return mqttClient; - } catch (Exception e) { - throw new RuntimeException("While binding client " + deviceId, e); - } - } - - private MqttClient newMqttClient(String deviceId) { - try { - Preconditions.checkNotNull(registryId, "registryId is null"); - Preconditions.checkNotNull(deviceId, "deviceId is null"); - MqttClient mqttClient = new MqttClient(getBrokerUrl(), getClientId(deviceId), - new MemoryPersistence()); - return mqttClient; - } catch (Exception e) { - errorCounter.incrementAndGet(); - throw new RuntimeException("Creating new MQTT client " + deviceId, e); - } - } - - private MqttClient connectMqttClient(String deviceId) { - try { - if (!connectionLock.tryAcquire(INITIALIZE_TIME_MS, TimeUnit.MILLISECONDS)) { - throw new RuntimeException("Timeout waiting for connection lock"); - } - MqttClient mqttClient = newMqttClient(deviceId); - if (mqttClient.isConnected()) { - return mqttClient; - } - LOG.info("Attempting connection to " + registryId + ":" + deviceId); - - mqttClient.setCallback(new MqttCallbackHandler(deviceId)); - mqttClient.setTimeToWait(INITIALIZE_TIME_MS); - - MqttConnectOptions options = new MqttConnectOptions(); - options.setMqttVersion(MqttConnectOptions.MQTT_VERSION_3_1_1); - options.setUserName(UNUSED_ACCOUNT_NAME); - options.setMaxInflight(PUBLISH_THREAD_COUNT * 2); - options.setConnectionTimeout(INITIALIZE_TIME_MS); - - options.setPassword(createJwt()); - - mqttClient.connect(options); - - subscribeToUpdates(mqttClient, deviceId); - return mqttClient; - } catch (Exception e) { - throw new RuntimeException("While connecting mqtt client " + deviceId, e); - } finally { - connectionLock.release(); - } - } - - private char[] createJwt() throws Exception { - return createJwt(configuration.projectId, configuration.keyBytes, configuration.algorithm) - .toCharArray(); - } - - private String getClientId(String deviceId) { - // Create our MQTT client. The mqttClientId is a unique string that identifies this device. For - // Google Cloud IoT, it must be in the format below. - return String.format(CLIENT_ID_FORMAT, configuration.projectId, configuration.cloudRegion, - registryId, deviceId); - } - - private String getBrokerUrl() { - // Build the connection string for Google's Cloud IoT MQTT server. Only SSL connections are - // accepted. For server authentication, the JVM's root certificates are used. - return String.format(BROKER_URL_FORMAT, configuration.bridgeHostname, configuration.bridgePort); - } - - private String getMessageTopic(String deviceId, String topic) { - return String.format(MESSAGE_TOPIC_FORMAT, deviceId, topic); - } - - private void subscribeToUpdates(MqttClient client, String deviceId) { - subscribeTopic(client, String.format(CONFIG_UPDATE_TOPIC_FMT, deviceId)); - subscribeTopic(client, String.format(ERRORS_TOPIC_FMT, deviceId)); - } - - private void subscribeTopic(MqttClient client, String updateTopic) { - try { - client.subscribe(updateTopic); - } catch (MqttException e) { - throw new RuntimeException("While subscribing to MQTT topic " + updateTopic, e); - } - } - - public PublisherStats getStatistics() { - return new PublisherStats(); - } - - @SuppressWarnings("unchecked") - public void registerHandler(String deviceId, String mqttTopic, - Consumer handler, Class messageType) { - String key = getHandlerKey(getMessageTopic(deviceId, mqttTopic)); - if (handler == null) { - handlers.remove(key); - handlersType.remove(key); - } else if (handlers.put(key, (Consumer) handler) == null) { - handlersType.put(key, (Class) messageType); - } else { - throw new IllegalStateException("Overwriting existing handler for " + key); - } - } - - private String getHandlerKey(String configTopic) { - return String.format(HANDLER_KEY_FORMAT, registryId, configTopic); - } - - public void connect(String deviceId) { - getConnectedClient(deviceId); - } - - private class MqttCallbackHandler implements MqttCallback { - - private final String deviceId; - - MqttCallbackHandler(String deviceId) { - this.deviceId = deviceId; - } - - /** - * @see MqttCallback#connectionLost(Throwable) - */ - public void connectionLost(Throwable cause) { - LOG.warn("MQTT Connection Lost", cause); - } - - /** - * @see MqttCallback#deliveryComplete(IMqttDeliveryToken) - */ - public void deliveryComplete(IMqttDeliveryToken token) { - } - - /** - * @see MqttCallback#messageArrived(String, MqttMessage) - */ - public void messageArrived(String topic, MqttMessage message) { - String handlerKey = getHandlerKey(topic); - Consumer handler = handlers.get(handlerKey); - Class type = handlersType.get(handlerKey); - if (handler == null) { - onError.accept(new RuntimeException("No registered handler for " + handlerKey)); - } else if (message.toString().length() == 0) { - LOG.warn("Received message is empty for " + handlerKey); - handler.accept(null); - } else { - try { - handler.accept(OBJECT_MAPPER.readValue(message.toString(), type)); - } catch (Exception e) { - onError.accept(e); - } - } - } - } - - private void sendMessage(String deviceId, String mqttTopic, - byte[] mqttMessage) throws Exception { - LOG.debug("Sending message to " + mqttTopic); - getConnectedClient(deviceId).publish(mqttTopic, mqttMessage, MQTT_QOS, SHOULD_RETAIN); - publishCounter.incrementAndGet(); - } - - private MqttClient getConnectedClient(String deviceId) { - try { - String gatewayId = configuration.gatewayId; - if (gatewayId != null && !gatewayId.equals(deviceId)) { - return mqttClients.computeIfAbsent(deviceId, this::newBoundClient); - } - return mqttClients.computeIfAbsent(deviceId, this::connectMqttClient); - } catch (Exception e) { - throw new RuntimeException("While getting mqtt client " + deviceId + ": " + e.toString(), e); - } - } - - /** Load a PKCS8 encoded keyfile from the given path. */ - private PrivateKey loadKeyBytes(byte[] keyBytes, String algorithm) throws Exception { - try { - PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes); - KeyFactory kf = KeyFactory.getInstance(algorithm); - return kf.generatePrivate(spec); - } catch (Exception e) { - throw new IllegalArgumentException("Loading key bytes", e); - } - } - - /** Create a Cloud IoT JWT for the given project id, signed with the given private key */ - protected String createJwt(String projectId, byte[] privateKeyBytes, String algorithm) - throws Exception { - DateTime now = new DateTime(); - // Create a JWT to authenticate this device. The device will be disconnected after the token - // expires, and will have to reconnect with a new token. The audience field should always be set - // to the GCP project id. - JwtBuilder jwtBuilder = - Jwts.builder() - .setIssuedAt(now.toDate()) - .setExpiration(now.plusMinutes(60).toDate()) - .setAudience(projectId); - - if (algorithm.equals("RS256")) { - PrivateKey privateKey = loadKeyBytes(privateKeyBytes, "RSA"); - return jwtBuilder.signWith(SignatureAlgorithm.RS256, privateKey).compact(); - } else if (algorithm.equals("ES256")) { - PrivateKey privateKey = loadKeyBytes(privateKeyBytes, "EC"); - return jwtBuilder.signWith(SignatureAlgorithm.ES256, privateKey).compact(); - } else { - throw new IllegalArgumentException( - "Invalid algorithm " + algorithm + ". Should be one of 'RS256' or 'ES256'."); - } - } - - public class PublisherStats { - public long clientCount = mqttClients.size(); - public int publishCount = publishCounter.getAndSet(0); - public int errorCount = errorCounter.getAndSet(0); - } -} diff --git a/pubber/src/main/java/daq/pubber/Pubber.java b/pubber/src/main/java/daq/pubber/Pubber.java deleted file mode 100644 index 5f28a08efa..0000000000 --- a/pubber/src/main/java/daq/pubber/Pubber.java +++ /dev/null @@ -1,282 +0,0 @@ -package daq.pubber; - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Preconditions; -import daq.udmi.Entry; -import daq.udmi.Message; -import daq.udmi.Message.Pointset; -import daq.udmi.Message.PointsetState; -import daq.udmi.Message.State; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.File; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Date; -import java.util.HashSet; -import java.util.Set; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.ScheduledFuture; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicInteger; - -public class Pubber { - - private static final Logger LOG = LoggerFactory.getLogger(Pubber.class); - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .setSerializationInclusion(JsonInclude.Include.NON_NULL); - - private static final String POINTSET_TOPIC = "events/pointset"; - private static final String SYSTEM_TOPIC = "events/system"; - private static final String STATE_TOPIC = "state"; - private static final String CONFIG_TOPIC = "config"; - private static final String ERROR_TOPIC = "errors"; - - private static final int MIN_REPORT_MS = 200; - private static final int DEFAULT_REPORT_MS = 5000; - private static final int CONFIG_WAIT_TIME_MS = 10000; - private static final int STATE_THROTTLE_MS = 1500; - private static final String CONFIG_ERROR_STATUS_KEY = "config_error"; - private static final int LOGGING_MOD_COUNT = 10; - - private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor(); - - private final Configuration configuration; - private final AtomicInteger messageDelayMs = new AtomicInteger(DEFAULT_REPORT_MS); - private final CountDownLatch configLatch = new CountDownLatch(1); - - private final State deviceState = new State(); - private final Pointset devicePoints = new Pointset(); - private final Set allPoints = new HashSet<>(); - - private MqttPublisher mqttPublisher; - private ScheduledFuture scheduledFuture; - private long lastStateTimeMs; - private int sendCount; - - public static void main(String[] args) throws Exception { - if (args.length != 1) { - throw new IllegalArgumentException("Expected [configPath] as argument"); - } - Pubber pubber = new Pubber(args[0]); - pubber.initialize(); - pubber.startConnection(); - LOG.info("Done with main"); - } - - private Pubber(String configFile) { - File configurationFile = new File(configFile); - LOG.info("Reading configuration from " + configurationFile.getAbsolutePath()); - try { - configuration = OBJECT_MAPPER.readValue(configurationFile, Configuration.class); - } catch (Exception e) { - throw new RuntimeException("While reading configuration file " + configurationFile.getAbsolutePath(), e); - } - info(String.format("Starting instance for project %s registry %s", - configuration.projectId, configuration.registryId)); - - initializeDevice(); - addPoint(new RandomPoint("superimposition_reading", 0, 100, "Celsius")); - addPoint(new RandomPoint("recalcitrant_angle", 0, 360, "deg" )); - addPoint(new RandomPoint("faulty_finding", 1, 1, "truth")); - } - - private void initializeDevice() { - deviceState.system.make_model = "DAQ_pubber"; - deviceState.system.firmware.version = "v1"; - deviceState.pointset = new PointsetState(); - devicePoints.extraField = configuration.extraField; - } - - private synchronized void maybeRestartExecutor(int intervalMs) { - if (scheduledFuture == null || intervalMs != messageDelayMs.get()) { - cancelExecutor(); - messageDelayMs.set(intervalMs); - startExecutor(); - } - } - - private synchronized void startExecutor() { - Preconditions.checkState(scheduledFuture == null); - int delay = messageDelayMs.get(); - LOG.info("Starting executor with send message delay " + delay); - scheduledFuture = executor - .scheduleAtFixedRate(this::sendMessages, delay, delay, TimeUnit.MILLISECONDS); - } - - private synchronized void cancelExecutor() { - if (scheduledFuture != null) { - scheduledFuture.cancel(false); - scheduledFuture = null; - } - } - - private void sendMessages() { - try { - sendDeviceMessage(configuration.deviceId); - updatePoints(); - if (sendCount % LOGGING_MOD_COUNT == 0) { - publishLogMessage(configuration.deviceId,"Sent " + sendCount + " messages"); - } - sendCount++; - } catch (Exception e) { - LOG.error("Fatal error during execution", e); - terminate(); - } - } - - private void updatePoints() { - allPoints.forEach(AbstractPoint::updateData); - } - - private void terminate() { - try { - info("Terminating"); - mqttPublisher.close(); - cancelExecutor(); - } catch (Exception e) { - info("Error terminating: " + e.getMessage()); - } - } - - private void startConnection() throws InterruptedException { - connect(); - boolean result = configLatch.await(CONFIG_WAIT_TIME_MS, TimeUnit.MILLISECONDS); - LOG.info("synchronized start config result " + result); - if (!result) { - mqttPublisher.close(); - } - } - - private void addPoint(AbstractPoint point) { - String pointName = point.getName(); - if (devicePoints.points.put(pointName, point.getData()) != null) { - throw new IllegalStateException("Duplicate pointName " + pointName); - } - deviceState.pointset.points.put(pointName, point.getState()); - allPoints.add(point); - } - - private void initialize() { - Preconditions.checkState(mqttPublisher == null, "mqttPublisher already defined"); - Preconditions.checkNotNull(configuration.keyFile, "configuration keyFile not defined"); - System.err.println("Loading device key file from " + configuration.keyFile); - configuration.keyBytes = getFileBytes(configuration.keyFile); - mqttPublisher = new MqttPublisher(configuration, this::reportError); - if (configuration.gatewayId != null) { - mqttPublisher.registerHandler(configuration.gatewayId, CONFIG_TOPIC, - this::configHandler, Message.Config.class); - mqttPublisher.registerHandler(configuration.gatewayId, ERROR_TOPIC, - this::errorHandler, GatewayError.class); - } - mqttPublisher.registerHandler(configuration.deviceId, CONFIG_TOPIC, - this::configHandler, Message.Config.class); - } - - private void connect() { - try { - mqttPublisher.connect(configuration.deviceId); - LOG.info("Connection complete."); - } catch (Exception e) { - LOG.error("Connection error", e); - LOG.error("Forcing termination"); - System.exit(-1); - } - } - - private void reportError(Exception toReport) { - if (toReport != null) { - LOG.error("Error receiving message: " + toReport); - Entry report = new Entry(toReport); - deviceState.system.statuses.put(CONFIG_ERROR_STATUS_KEY, report); - publishStateMessage(configuration.deviceId); - } else { - Entry previous = deviceState.system.statuses.remove(CONFIG_ERROR_STATUS_KEY); - if (previous != null) { - publishStateMessage(configuration.deviceId); - } - } - } - - private void info(String msg) { - LOG.info(msg); - } - - private void configHandler(Message.Config config) { - try { - info("Received new config " + config); - final int actualInterval; - if (config != null) { - Integer reportInterval = config.system == null ? null : config.system.report_interval_ms; - actualInterval = Integer.max(MIN_REPORT_MS, - reportInterval == null ? DEFAULT_REPORT_MS : reportInterval); - deviceState.system.last_config = config.timestamp; - } else { - actualInterval = DEFAULT_REPORT_MS; - } - maybeRestartExecutor(actualInterval); - configLatch.countDown(); - publishStateMessage(configuration.deviceId); - reportError(null); - } catch (Exception e) { - reportError(e); - } - } - - private void errorHandler(GatewayError error) { - // TODO: Handle error and give up on device. - info(String.format("%s for %s: %s", - error.error_type, error.device_id, error.description)); - } - - private byte[] getFileBytes(String dataFile) { - Path dataPath = Paths.get(dataFile); - try { - return Files.readAllBytes(dataPath); - } catch (Exception e) { - throw new RuntimeException("While getting data from " + dataPath.toAbsolutePath(), e); - } - } - - private void sendDeviceMessage(String deviceId) { - if (mqttPublisher.clientCount() == 0) { - LOG.error("No connected clients, exiting."); - System.exit(-2); - } - info(String.format("Sending test message for %s/%s", configuration.registryId, deviceId)); - devicePoints.timestamp = new Date(); - mqttPublisher.publish(deviceId, POINTSET_TOPIC, devicePoints); - } - - private void publishLogMessage(String deviceId, String logMessage) { - info(String.format("Sending log message for %s/%s", configuration.registryId, deviceId)); - Message.SystemEvent systemEvent = new Message.SystemEvent(); - systemEvent.logentries.add(new Entry(logMessage)); - mqttPublisher.publish(deviceId, SYSTEM_TOPIC, systemEvent); - } - - private void publishStateMessage(String deviceId) { - lastStateTimeMs = sleepUntil(lastStateTimeMs + STATE_THROTTLE_MS); - info("Sending state message for device " + deviceId); - deviceState.timestamp = new Date(); - mqttPublisher.publish(deviceId, STATE_TOPIC, deviceState); - } - - private long sleepUntil(long targetTimeMs) { - long currentTime = System.currentTimeMillis(); - long delay = targetTimeMs - currentTime; - try { - if (delay > 0) { - Thread.sleep(delay); - } - return System.currentTimeMillis(); - } catch (Exception e) { - throw new RuntimeException("While sleeping for " + delay, e); - } - } -} diff --git a/pubber/src/main/java/daq/pubber/RandomPoint.java b/pubber/src/main/java/daq/pubber/RandomPoint.java deleted file mode 100644 index dcf0ff0a03..0000000000 --- a/pubber/src/main/java/daq/pubber/RandomPoint.java +++ /dev/null @@ -1,42 +0,0 @@ -package daq.pubber; - -import daq.udmi.Message.PointData; -import daq.udmi.Message.PointState; - -public class RandomPoint implements AbstractPoint { - - private final String name; - private final double min; - private final double max; - private final PointData data = new PointData(); - private final PointState state = new PointState(); - - public RandomPoint(String name, double min, double max, String units) { - this.name = name; - this.min = min; - this.max = max; - this.state.fault = max == min; - this.state.units = units; - updateData(); - } - - @Override - public void updateData() { - data.present_value = Math.round(Math.random() * (max - min) + min); - } - - @Override - public PointState getState() { - return state; - } - - @Override - public String getName() { - return name; - } - - @Override - public PointData getData() { - return data; - } -} diff --git a/pubber/src/main/java/daq/udmi/Entry.java b/pubber/src/main/java/daq/udmi/Entry.java deleted file mode 100644 index 25201edc72..0000000000 --- a/pubber/src/main/java/daq/udmi/Entry.java +++ /dev/null @@ -1,26 +0,0 @@ -package daq.udmi; - -import java.io.ByteArrayOutputStream; -import java.io.PrintStream; -import java.util.Date; - -public class Entry { - public String message; - public String detail; - public String category = "com.acme.pubber"; - public Integer level = 500; - public Date timestamp = new Date(); - - public Entry(String message) { - this.message = message; - } - - public Entry(Exception e) { - message = e.toString(); - ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); - e.printStackTrace(new PrintStream(outputStream)); - detail = outputStream.toString(); - category = e.getStackTrace()[0].getClassName(); - level = 800; - } -} diff --git a/pubber/src/main/java/daq/udmi/Message.java b/pubber/src/main/java/daq/udmi/Message.java deleted file mode 100644 index c15f9c842c..0000000000 --- a/pubber/src/main/java/daq/udmi/Message.java +++ /dev/null @@ -1,76 +0,0 @@ -package daq.udmi; - -import java.util.ArrayList; -import java.util.Date; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -@SuppressWarnings("unused") -public class Message { - - public static class State extends UdmiBase { - public SystemState system = new SystemState(); - public PointsetState pointset; - } - - public static class Config extends UdmiBase { - public SystemConfig system; - public PointsetConfig pointset; - public GatewayConfig gateway; - } - - public static class Pointset extends UdmiBase { - public Map points = new HashMap<>(); - public Object extraField; - } - - public static class SystemEvent extends UdmiBase { - public List logentries = new ArrayList<>(); - } - - public static class PointsetState { - public Map points = new HashMap<>(); - } - - public static class PointsetConfig { - public Map points = new HashMap<>(); - } - - public static class PointConfig { - } - - public static class GatewayConfig { - public List proxy_ids; - } - - public static class SystemState { - public String make_model; - public Bundle firmware = new Bundle(); - public boolean operational; - public Date last_config; - public Map statuses = new HashMap<>(); - } - - public static class SystemConfig { - public Integer report_interval_ms; - } - - public static class PointData { - public Object present_value; - } - - public static class PointState { - public String units; - public Boolean fault; - } - - public static class Bundle { - public String version; - } - - public static class UdmiBase { - public Integer version = 1; - public Date timestamp = new Date(); - } -} diff --git a/resources/device_types/deltacontrols_o3-din-cpu/module_config.json b/resources/device_types/deltacontrols_o3-din-cpu/module_config.json index 066bf35abb..6859207787 100644 --- a/resources/device_types/deltacontrols_o3-din-cpu/module_config.json +++ b/resources/device_types/deltacontrols_o3-din-cpu/module_config.json @@ -15,9 +15,6 @@ "bacnet": { "enabled": true }, - "macoui": { - "enabled": true - }, "mudgee": { "enabled": true }, diff --git a/resources/device_types/distech_ecy-s1000/module_config.json b/resources/device_types/distech_ecy-s1000/module_config.json index d790899c5c..09e9fd37b9 100644 --- a/resources/device_types/distech_ecy-s1000/module_config.json +++ b/resources/device_types/distech_ecy-s1000/module_config.json @@ -15,9 +15,6 @@ "bacnet": { "enabled": true }, - "macoui": { - "enabled": true - }, "mudgee": { "enabled": true }, diff --git a/resources/setups/baseline/module_config.json b/resources/setups/baseline/module_config.json index 8ebd56cc57..5999aa59b6 100644 --- a/resources/setups/baseline/module_config.json +++ b/resources/setups/baseline/module_config.json @@ -1,15 +1,16 @@ { "modules": { "ipaddr": { - "timeout_sec": 300 + "enabled": false, + "timeout_sec": 900, + "port_flap_timeout_sec": 20, + "dhcp_ranges": [{"start": "192.168.0.1", "end": "192.168.255.254", "prefix_length": 16}] }, "pass": { "enabled": true }, "nmap": { - "enabled": true - }, - "macoui": { + "timeout_sec": 600, "enabled": true }, "switch": { @@ -19,6 +20,7 @@ "enabled": true }, "password": { + "dictionary_dir": "resources/faux", "enabled": true }, "bacext": { diff --git a/resources/setups/qualification/device_module_config.json b/resources/setups/qualification/device_module_config.json index 6019735d2b..131348b856 100644 --- a/resources/setups/qualification/device_module_config.json +++ b/resources/setups/qualification/device_module_config.json @@ -30,9 +30,6 @@ "ipaddr": { "timeout_sec": 120 }, - "macoui": { - "enabled": true - }, "mudgee": { "enabled": true }, diff --git a/resources/setups/qualification/device_type_module_config.json b/resources/setups/qualification/device_type_module_config.json index f183d5527c..0401a128b7 100644 --- a/resources/setups/qualification/device_type_module_config.json +++ b/resources/setups/qualification/device_type_module_config.json @@ -24,10 +24,10 @@ "ipaddr": { "timeout_sec": 120 }, - "macoui": { + "mudgee": { "enabled": true }, - "mudgee": { + "network": { "enabled": true }, "nmap": { diff --git a/resources/setups/qualification/system_module_config.json b/resources/setups/qualification/system_module_config.json index de3cc56461..1cc6fd9835 100644 --- a/resources/setups/qualification/system_module_config.json +++ b/resources/setups/qualification/system_module_config.json @@ -15,10 +15,10 @@ "ipaddr": { "timeout_sec": 120 }, - "macoui": { + "mudgee": { "enabled": true }, - "mudgee": { + "network": { "enabled": true }, "nmap": { @@ -90,6 +90,11 @@ "required": "pass", "expected": "Required Pass" }, + "connection.network.ntp_update": { + "category": "Network Time", + "required": "pass", + "expected": "Required Pass" + }, "connection.network.communication_type": { "category": "Communication", "required": "info", diff --git a/resources/setups/remediation/device_module_config.json b/resources/setups/remediation/device_module_config.json index 976761c762..1d14c6f7aa 100644 --- a/resources/setups/remediation/device_module_config.json +++ b/resources/setups/remediation/device_module_config.json @@ -26,10 +26,10 @@ "ipaddr": { "timeout_sec": 120 }, - "macoui": { + "mudgee": { "enabled": true }, - "mudgee": { + "network": { "enabled": true }, "nmap": { diff --git a/resources/setups/remediation/system_module_config.json b/resources/setups/remediation/system_module_config.json index 17e7793758..e7542de45d 100644 --- a/resources/setups/remediation/system_module_config.json +++ b/resources/setups/remediation/system_module_config.json @@ -15,10 +15,10 @@ "ipaddr": { "timeout_sec": 120 }, - "macoui": { + "mudgee": { "enabled": true }, - "mudgee": { + "network": { "enabled": true }, "nmap": { @@ -86,6 +86,11 @@ "required": "pass", "expected": "Recommended Pass" }, + "connection.network.ntp_update": { + "category": "Network Time", + "required": "pass", + "expected": "Recommended Pass" + }, "connection.network.communication_type": { "category": "Communication", "required": "info", diff --git a/resources/test_site/devices/AHU-1/metadata.json b/resources/test_site/devices/AHU-1/metadata.json index cdee5cc055..09eb5ce94d 100644 --- a/resources/test_site/devices/AHU-1/metadata.json +++ b/resources/test_site/devices/AHU-1/metadata.json @@ -1,13 +1,13 @@ { "pointset": { "points": { - "filter_alarm_pressure_status": { + "faulty_finding": { "units": "Bars" }, - "filter_differential_pressure_sensor": { + "recalcitrant_angle": { "units": "Degrees-Celsius" }, - "filter_differential_pressure_setpoint": { + "superimposition_reading": { "units": "No-units" } } diff --git a/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json b/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json index c999dd7e16..bb981518eb 100644 --- a/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json +++ b/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json @@ -3,10 +3,6 @@ "modules": { "hold": { "enabled": true - }, - "macoui": { - "enabled": true, - "timeout_sec": 1 } } } diff --git a/resources/test_site/module_config.json b/resources/test_site/module_config.json index 4d672b67aa..03f7d504d0 100644 --- a/resources/test_site/module_config.json +++ b/resources/test_site/module_config.json @@ -17,6 +17,9 @@ }, "manual": { "enabled": true + }, + "ssh": { + "enabled": false } }, "process": { @@ -44,7 +47,7 @@ "category": "Security", "expected": "Recommended" }, - "manual.test.travis": { + "manual.test.name": { "required": "pass", "category": "Security", "expected": "Recommended", diff --git a/schemas/simple/simple.json b/schemas/simple/simple.json deleted file mode 100644 index fa31b8e8d0..0000000000 --- a/schemas/simple/simple.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "type" : "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "properties" : { - "rectangle" : {"$ref" : "#/definitions/Rectangle" } - }, - "required": [ - "rectangle" - ], - "definitions" : { - "size" : { - "type" : "number", - "minimum" : 0 - }, - "Rectangle" : { - "type" : "object", - "required": [ - "a", - "b" - ], - "properties" : { - "a" : {"$ref" : "#/definitions/size"}, - "b" : {"$ref" : "#/definitions/size"} - } - } - } -} \ No newline at end of file diff --git a/schemas/simple/simple.tests/error.json b/schemas/simple/simple.tests/error.json deleted file mode 100644 index 361ce8baae..0000000000 --- a/schemas/simple/simple.tests/error.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "rectangle" : { - "a" : -4, - "b" : 5 - } -} diff --git a/schemas/simple/simple.tests/error.out b/schemas/simple/simple.tests/error.out deleted file mode 100644 index d5320ed8a3..0000000000 --- a/schemas/simple/simple.tests/error.out +++ /dev/null @@ -1,4 +0,0 @@ -Validating 1 schemas - Validating 1 files against simple.json - Against input simple.tests/error.json - #/rectangle/a: -4 is not greater or equal to 0 diff --git a/schemas/simple/simple.tests/example.json b/schemas/simple/simple.tests/example.json deleted file mode 100644 index e6751b6099..0000000000 --- a/schemas/simple/simple.tests/example.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "rectangle" : { - "a" : 4, - "b" : 5 - } -} diff --git a/schemas/simple/simple.tests/example.out b/schemas/simple/simple.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/simple/simple.tests/simple.json b/schemas/simple/simple.tests/simple.json deleted file mode 100644 index fa31b8e8d0..0000000000 --- a/schemas/simple/simple.tests/simple.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "type" : "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "properties" : { - "rectangle" : {"$ref" : "#/definitions/Rectangle" } - }, - "required": [ - "rectangle" - ], - "definitions" : { - "size" : { - "type" : "number", - "minimum" : 0 - }, - "Rectangle" : { - "type" : "object", - "required": [ - "a", - "b" - ], - "properties" : { - "a" : {"$ref" : "#/definitions/size"}, - "b" : {"$ref" : "#/definitions/size"} - } - } - } -} \ No newline at end of file diff --git a/schemas/simple/simple.tests/simple.out b/schemas/simple/simple.tests/simple.out deleted file mode 100644 index 85d5807ab3..0000000000 --- a/schemas/simple/simple.tests/simple.out +++ /dev/null @@ -1,4 +0,0 @@ -Validating 1 schemas - Validating 1 files against simple.json - Against input simple.tests/simple.json - #: required key [rectangle] not found diff --git a/schemas/udmi/README.md b/schemas/udmi/README.md deleted file mode 100644 index b0c450fa94..0000000000 --- a/schemas/udmi/README.md +++ /dev/null @@ -1,163 +0,0 @@ -# UDMI Schema - -The Universal Device Management Interface (UDMI) provides a high-level specification for the -management and operation of physical IoT systems. This data is typically exchanged -with a cloud entity that can maintain a "digital twin" or "shadow device" in the cloud. -Nominally meant for use with [Googe's Cloud IoT Core](https://cloud.google.com/iot/docs/), -as a schema it can be applied to any set of data or hosting setup. Additionally, the schema -has provisions for basic telemetry ingestion, such as datapoint streaming from an IoT device. - -By deisgn, this schema is intended to be: -* Universal: Apply to all subsystems in a building, not a singular vertical solution. -* Device: Operations on an IoT _device_, a managed entity in physical space. -* Management: Focus on device _management_, rather than command & control. -* Interface: Define an interface specification, rather than a client-library or -RPC mechanism. - -See the associated [UDMI Tech Stack](TECH_STACK.md) for details about transport mechanism -outside of the core schema definition. For questions and discussion pertaining to this topic, -please join/monitor the -[daq-users@googlegroups.com](https://groups.google.com/forum/#!forum/daq-users) email list - -## Use Cases - -The essence behind UDMI is an automated mechanism for IoT system management. Many current -systems require direct-to-device access, such as through a web browser or telnet/ssh session. -These techniques do not scale to robust managed ecosystems since they rely too heavily on -manual operation (aren't automated), and increase the security exposure of the system -(since they need to expose these management ports). - -UDMI is intended to support a few primary use-cases: -* _Telemetry Ingestion_: Ingest device data points in a standardized format. -* [_Gateway Proxy_](docs/gateway.md): Proxy data/connection for non-UDMI devices, -allowing adaptation to legacy systems. -* _On-Prem Actuation_: Ability to effect on-prem device behavior. -* _Device Testability_: e.g. Trigger a fake alarm to test reporting mechanims. -* _Commissioning Tools_: Streamline complete system setup and install. -* _Operational Diagnostics_: Make it easy for system operators to diagnoe basic faults. -* _Status and Logging_: Report system operational metrics to hosting infrastructure. -* _Key Rotation_: Manage encryption keys and certificates in accordance with best practice. -* _Credential Exchange_: Bootstrap higher-layer authentication to restricted resources. -* _Firmware Updates_: Initiate, monitor, and track firmware updates across an entire fleet -of devices. -* _On-Prem Discovery_: Enumerate and on-prem devices to aid setup or anomaly detection. - -All these situations are conceptually about _management_ of devices, which is conceptually -different than the _control_ or _operation_. These concepts are similar to the _management_, -_control_, and _data_ planes of -[Software Defined Networks](https://queue.acm.org/detail.cfm?id=2560327). -Once operational, the system should be able to operate completely autonomoulsy from the -management capabilities, which are only required to diagnose or tweak system behavior. - -## Design Philiosphy - -In order to provide for management automation, UDMI strives for the following principles: -* Secure and Authenticated: Requires a propertly secure and authenticated channel -from the device to managing infrastructure. -* Declarative Specification: The schema describes the _desired_ state of the system, -relying on the underlying mechanisms to match actual state with desired state. This is -conceptually similar to Kubernetes-style configuraiton files. -* Minimal Elegant Design: Initially underspecified, with an eye towards making it easy to -add new capabilities in the future. It is easier to add something than it is to remove it. -* Reduced Choices: In the long run, choice leads to more work -to implement, and more ambiguity. Strive towards having only _one_ way of doing each thing. -* Structure and Clarity: This is not a "compressed" format and not designed for -very large structures or high-bandwidth streams. -* Property Names:Uses snake_case convention for property names. -* Resource Names: Overall structure (when flattened to paths), follows the -[API Resource Names guidline](https://cloud.google.com/apis/design/resource_names). - -## Schema Structure - -Schemas are broken down into several top-level sub-schemas that are invoked for -different aspects of device management: -* Device _state_ ([example](state.tests/example.json)), sent from device to cloud, -defined by [state.json](state.json). There is one current _state_ per device, -which is considered sticky until a new state message is sent. -is comprised of several subsections (e.g. _system_ or _pointset_) that describe the -relevant sub-state components. -* Device _config_ ([example](config.tests/example.json)), passed from cloud to device, -defined by [config.json](config.json). There is one active _config_ per device, -which is considered current until a new config is recevied. -* Message _envelope_ ([example](envelope.tests/example.json)) for server-side -attributes of received messages, defined by [envelope.json](envelope.json). This is -automatically generated by the transport layer and is then available for server-side -processing. -* Device _metadata_ ([example](metadata.tests/example.json)) stored in the cloud about a device, -but not directly available to or on the device, defined by [metadata.json](metadata.json). -This is essentially a specification about how the device should be configured or -expectations about what the device should be doing. -* Streaming device telemetry, which can take on several different forms, depending on the intended -use, e.g.: - * Streaming _pointset_ ([example](pointset.tests/example.json)) from device to cloud, - defined by [pointset.json](pointset.json). _pointset_ is used for delivering a - set of data point telemetry. - * Core _system_ messages ([example](system.tests/example.json)) from devices, such as log - entries and access logs, defined by [system.json](system.json). - * Local _discover_ messages ([example](discover.tests/example.json)) that show the - results of local scans or probes to determine which devices are on the local network, - defined by [discover.json](discover.json). - -A device client implementation will typically only be aware of the _state_, _config_, and -one or more telemetry messages (e.g. _pointset_), while all others are meant for the supporting -infrastructure. Additionally, the _state_ and _config_ parts are comprised of several distinct -subsections (e.g. _system_, _pointset_, or _gateway_) that relate to various bits of functionality. - -## Validation - -To verify correct operation of a real system, follow the instructions outlined in the -[validator subsystem docs](../../docs/validator.md), which provides for a suitable -communication channel. Additional sample messages are easy to include in the regression -suite if there are new cases to test. - -## Message Detail Notes - -### State Message - -* See notes below about 'State status' fields. -* There is an implicit minimum update interval of _one second_ applied to state updates, and it -is considered an error to update device state more often than that. -* `last_config` should be the timestamp _from_ the `timestamp` field of the last successfully -parsed `config` message. - -### Config Message - -* `sample_rate_sec`: Sampling rate for the system, which should proactively send an -update (e.g. _pointset_, _logentry_, _discover_ message) at this interval. -* `sample_limit_sec`: Minimum time between sample updates. Updates that happen faster than this time -(e.g. due to _cov_ events) should be coalesced so that only the most recent update is sent. -* `fix_value`: Fix a value to be used during diagnostics and operational use. Should -override any operational values, but not override alarm conditions. -* `min_loglevel`: Indicates the minimum loglevel for reporting log messages below which log entries -should not be sent. See note below for a description of the level value. - -### Logentry Message - -* See notes below about 'logentry entries' fields. - -### State status and logentry entries fields - -The State and System/logentry messages both have `status` and `entries` sub-fields, respectivly, that -follow the same structure. -* State `status` entries represent 'sticky' conditions that persist until the situation is cleared, -e.g. "device disconnected". -* A `statuses` entry is a map of 'sticky' conditions that are keyed on a value that can be -used to manage updates by a particular (device dependent) subsystem. -* Logentry `entries` fields are transitory event that happen, e.g. "connection failed". -* The log `entries` field is an array that can be used to collaesce multiple log updates into -one message. -* Config parse errors should be represented as a system-level device state `status` entry. -* The `message` field sould be a one-line representation of the triggering condition. -* The `detail` field can be multi-line and include more detail, e.g. a complete program -stack-trace. -* The `category` field is a device-specific representation of which sub-system the message comes -from. In a Java environment, for example, it would be the fully qualified path name of the Class -triggering the message. -* A `status` or `statuses` `timestamp` field should be the timestamp the condition was triggered, -or most recently updated. It might be different than the top-level message `timestamp` if the -condition is not checked often or is sticky until it's cleared. -* A logentry `entries` `timestamp` field is the time that the event occured, which is potentially -different than the top-level `timestamp` field (which is when the log was sent). -* The status `level` should conform to the numerical -[Stackdriver LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity) -levels. The `DEFAULT` value of 0 is not allowed (lowest value is 100, maximum 800). diff --git a/schemas/udmi/TECH_STACK.md b/schemas/udmi/TECH_STACK.md deleted file mode 100644 index 86d3c0ab4e..0000000000 --- a/schemas/udmi/TECH_STACK.md +++ /dev/null @@ -1,46 +0,0 @@ -# UDMI Technology Stack - -The complete UDMI specificaiton (super set of the base schema), specifies a complete -technology stack for compliant IoT devices. - -# Core Requirements - -* [Google Cloud's MQTT Protocol Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge). - * This is _not_ the same as a generic MQTT Broker, but it is compatible with standard client-side libraries. - * Other transports (non-Google MQTT, CoAP, etc...) are acceptable with prior approval. - * Connected to a specific Cloud IoT Registry designated for each site-specific project. -* Utilizes the MQTT Topic table listed below. -* JSON encoding following the core [UDMI Schema](README.md), specifying the semantic structure of the data. -* Passes the [DAQ Validation Tool](../../docs/validator.md) for all requirements. - -# MQTT Topic Table - -| Type | Category | subFolder | MQTT Topic | Schema File | -|----------|----------|-----------|----------------------------------------|---------------| -| state | state | _n/a_ | `/devices/{device_id}/state` | state.json | -| config | config | _n/a_ | `/devices/{device-id}/config` | config.json | -| pointset | event | pointset | `/devices/{device-id}/events/pointset` | pointset.json | -| system | event | system | `/devices/{device-id}/events/system` | system.json | - -# Backend Systems - -Any backend system (in a GCP project) should adhere to the following guidelines: -* All messages to/from the devices should conform to the UDMI schema payloads (pass validation). -* All exchanges with the devices should go through a PubSub topic: - * The _state_ and _event_ messages are published to a topic configured through the IoT Core registry. - * If necessary, any _config_ or _command_ messages should go through a PubSub topic, and then converted to the requisite Cloud IoT - config write using a simple cloud function. -* To make data persistent, it can be written to a back-end database, e.g. Firestore. See the `device_telemetry` and - `device_state` [example cloud functions](../../firebase/functions/index.js) for details. -* A similar function called `device_config` shows how PubSub can be used to update the Cloud IoT configuration. - -A config push can be tested with something like: - -``` -gcloud pubsub topics publish target \ - --attribute subFolder=config,deviceId=AHU-1,projectId=bos-daq-testing,cloudRegion=us-central1,deviceRegistryId=registrar_test \ - --message '{"version": 1, "timestamp": "2019-01-17T14:02:29.364Z"}' -``` - -The reason for the redirection of any data through a PubSub topic is so that the Cloud IoT registry, if necessary, -can be housed in a different cloud project from the backend applications. diff --git a/schemas/udmi/config.json b/schemas/udmi/config.json deleted file mode 100644 index 73f19b0b4b..0000000000 --- a/schemas/udmi/config.json +++ /dev/null @@ -1,33 +0,0 @@ -{ - "title": "Device Config Schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "timestamp", - "version" - ], - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "system": { - "$ref": "file:config_system.json#" - }, - "gateway": { - "$ref": "file:config_gateway.json#" - }, - "localnet": { - "$ref": "file:config_localnet.json#" - }, - "pointset": { - "$ref": "file:config_pointset.json#" - } - } -} diff --git a/schemas/udmi/config.tests/empty.json b/schemas/udmi/config.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/config.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/config.tests/empty.out b/schemas/udmi/config.tests/empty.out deleted file mode 100644 index c9aa2803fc..0000000000 --- a/schemas/udmi/config.tests/empty.out +++ /dev/null @@ -1,6 +0,0 @@ -Validating 1 schemas - Validating 1 files against config.json - Against input config.tests/empty.json - #: 2 schema violations found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/config.tests/errors.json b/schemas/udmi/config.tests/errors.json deleted file mode 100644 index 407a7c9eb0..0000000000 --- a/schemas/udmi/config.tests/errors.json +++ /dev/null @@ -1,30 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "properties": { - "make_model": "com.yoyodine.flux_capacitor", - "whoowhoo": true, - "release": "231_rev_8" - }, - "type": "config", - "system": { - }, - "points": { - }, - "pointset": { - "sample_rate_sec": "5", - "version": 1, - "id": "miXeD_CaSE", - "timestamp": "2018-08-26T21:39:29.364Z", - "properties": { - "device_id": "33895507", - "object_name": "UK-BRH-XX_AHU-001", - }, - "points": { - "return_air_temperature_sensor": { - "object_type": "analog_input", - "units": "Degrees Celsius" - } - } - } -} diff --git a/schemas/udmi/config.tests/errors.out b/schemas/udmi/config.tests/errors.out deleted file mode 100644 index 80edaa341f..0000000000 --- a/schemas/udmi/config.tests/errors.out +++ /dev/null @@ -1,16 +0,0 @@ -Validating 1 schemas - Validating 1 files against config.json - Against input config.tests/errors.json - #: 10 schema violations found - #/pointset: 7 schema violations found - #/pointset/points/return_air_temperature_sensor: 2 schema violations found - #/pointset/points/return_air_temperature_sensor: extraneous key [object_type] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [units] is not permitted - #/pointset/sample_rate_sec: expected type: Number, found: String - #/pointset: extraneous key [id] is not permitted - #/pointset: extraneous key [properties] is not permitted - #/pointset: extraneous key [timestamp] is not permitted - #/pointset: extraneous key [version] is not permitted - #: extraneous key [points] is not permitted - #: extraneous key [properties] is not permitted - #: extraneous key [type] is not permitted diff --git a/schemas/udmi/config.tests/example.json b/schemas/udmi/config.tests/example.json deleted file mode 100644 index 164fb358b2..0000000000 --- a/schemas/udmi/config.tests/example.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "min_loglevel": 500 - }, - "pointset": { - "sample_limit_sec": 2, - "sample_rate_sec": 500, - "points": { - "return_air_temperature_sensor": { - }, - "nexus_sensor": { - "fix_value": 21.1 - } - } - } -} diff --git a/schemas/udmi/config.tests/example.out b/schemas/udmi/config.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/config.tests/fcu.json b/schemas/udmi/config.tests/fcu.json deleted file mode 100644 index 03380428c8..0000000000 --- a/schemas/udmi/config.tests/fcu.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "version": 1, - "timestamp": "2019-01-17T14:02:29.364Z", - "system": { - "max_update_ms": 50000, - "min_loglevel": 500 - }, - "pointset": { - "points": { - "space_temperature_sensor": { - }, - "fan_run_status": { - "fix_value": true - }, - "fan_run_enable": { - "fix_value": false - }, - "chilled_water_valve_percentage_command": { - "min_update_ms": 1000 - } - } - } -} diff --git a/schemas/udmi/config.tests/fcu.out b/schemas/udmi/config.tests/fcu.out deleted file mode 100644 index f81283bace..0000000000 --- a/schemas/udmi/config.tests/fcu.out +++ /dev/null @@ -1,6 +0,0 @@ -Validating 1 schemas - Validating 1 files against config.json - Against input config.tests/fcu.json - #: 2 schema violations found - #/pointset/points/chilled_water_valve_percentage_command: extraneous key [min_update_ms] is not permitted - #/system: extraneous key [max_update_ms] is not permitted diff --git a/schemas/udmi/config.tests/gateway.json b/schemas/udmi/config.tests/gateway.json deleted file mode 100644 index 294aabb2ce..0000000000 --- a/schemas/udmi/config.tests/gateway.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "gateway": { - "proxy_ids": [ "AHU-123", "SMS-81", "991" ] - } -} diff --git a/schemas/udmi/config.tests/gateway.out b/schemas/udmi/config.tests/gateway.out deleted file mode 100644 index 1f85426706..0000000000 --- a/schemas/udmi/config.tests/gateway.out +++ /dev/null @@ -1,4 +0,0 @@ -Validating 1 schemas - Validating 1 files against config.json - Against input config.tests/gateway.json - #/gateway/proxy_ids/2: string [991] does not match pattern ^[A-Z]{3}-[1-9][0-9]{0,2}$ diff --git a/schemas/udmi/config.tests/proxy.json b/schemas/udmi/config.tests/proxy.json deleted file mode 100644 index 6f56202964..0000000000 --- a/schemas/udmi/config.tests/proxy.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "min_loglevel": 500 - }, - "localnet": { - "subsystem": { - "bacnet": { - "local_id": "0x78ce1900" - } - } - }, - "pointset": { - "sample_limit_sec": 2, - "sample_rate_sec": 500, - "points": { - "return_air_temperature_sensor": { - "ref": "BV23.present_value" - } - } - } -} diff --git a/schemas/udmi/config.tests/proxy.out b/schemas/udmi/config.tests/proxy.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/config.tests/rotate.json b/schemas/udmi/config.tests/rotate.json deleted file mode 100644 index 652a698ba0..0000000000 --- a/schemas/udmi/config.tests/rotate.json +++ /dev/null @@ -1,16 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "auth_key": { - "private": "pkcs8:MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQD2PpibhV7vs1ZqGXsV3bCW2p1+WScg6QUNQQb1Ua8pjwIrOQzPfTROpAlxuBAlSbC+aDIz/NrAF0E7tNJx5N8Zk0ekIoqCEVGx/0XuJtyvSYESBclCD7bD3d6KUHcVOK/7hVo7nVnrEjjmihdsz1TSqxmIiNcSe55xboqtJBJLMb9yE646Y/P/kRKCOurR73h3a1N5ipVgpflyMVEW0z/B6GPTc4FRMCAv/6+Mp7v9kjZ/rJa7VwgSMLl/AJ1xyiH3ScQN2dBTCxeGlOu2Ed4v8Rse3OKbOyIbiXQqPeOdys7+CdAtng7qgDLQzinA1r+1YDeSgpLIEHnsnXHBBz1zAgMBAAECggEACm8ilkxzdmK6AarsvUN0jR/cVpM+hjV5glNEETcR3gfyRpSXR199zOtEfy4WeQ4DUD4oCm3ncwZJPFdwJ2VUUsgb3AeeqN5FAO7ZLrs01LSfpHzcR1FVJD2NhXzdXufVBSpkZWxIeB6AjLxDO9gZNwgK/+8UdfMJBrNxat7Ba7AtrYCaAcqh8ewsoGNJte9OC1ubLSvw5p/88XaBYyhN2MLrrOvv7hezsxVakUquPK0xCekV+ScK+6ezrtZVIkvg2ozlF2cffHRoQEjBDju/qQD3dsBAkqol7Lw25KntrM+wBSyCwD04eFzICeDYUBER20SeKEzYRCOek5TgKIeb8QKBgQD/syGWNfE7ZnlswdXYTaHCgCEgQg4N4wwqUva81agAuJbXYXPiBLq0qDiieyyPIT7qU/uCF+R25OJAl/228cVbkhyXa4TAjM5EAAuHeyOwJi+ZBE+c2Mo4Z4mJoXjLzNSvF+ytRQjoAXiErZ4+Kl6wI7zgeIA+SsA0Yy2qStJSKwKBgQD2iJ9bL0FtC/HkkgiCOblLJ1nSiMiTbDAcm9KbeeRDRVpbsYzdVkOoq1s5z6M6MdWVFIqmXL1ROlVyfesG5Dk3AbssbBt0qiF5ZXEF7N33Bqft/LW2U3mdwLVfQLJwtRZ/Uu+yGJ0y7tCEIdCsuaYRkNtZmSIU+ZcwUMr5ks5F2QKBgQC8R6mmkqfDhmxIod4VvOwsbO53c0wn+5RcoeRfHa/sf/9KLs8GkVbtaaTyN2KTLfbG0voyS+eFiHn0+DXw/MvG5qq48Im6wrOrLKFQrGKV9Tg9IwiARL16lPqYZlatMnE1UJeM6nVpaJPWloUb31UDu/z7CJ/dvmsS6Cia6Sc/KwKBgQC3LK/qmmPsV/G0uORljeok8uoESeltz/a3avfC2YBGk2MdugBF/HBtBFTV0XcgKCLfj9Gs5j8r+UG/vdtA1ZXFTx35VaHYvwf8IOknU+VgQ6vGYfvJqqA0HBkm2vU6VPKQS9kY5Lz4OQTpCA76Jz5C0vSH0AXIu+If3gfSA8gLkQKBgGcnKKp2lgIWh76ed4fpHD1dLH9naSF9GvxBEVAxfhfndMSx/hpkmbkO+78FPSDGqPLL5pc72davdrJfRg8aGi5+e2qb67y0Kd0+vlnUY/xv970/LEKDZmNhQreLTDo/wPpOSW75B6GjPhfNdc5znDUUibn6RMqyYcVOm8bLpqkZ" - } - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "fix_value": 21.1 - } - } - } -} diff --git a/schemas/udmi/config.tests/rotate.out b/schemas/udmi/config.tests/rotate.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/config.tests/smartprimus.json b/schemas/udmi/config.tests/smartprimus.json deleted file mode 100644 index 971290d1d5..0000000000 --- a/schemas/udmi/config.tests/smartprimus.json +++ /dev/null @@ -1,21 +0,0 @@ -{ - "version": 1, - "timestamp": "2019-01-17T14:02:29.364Z", - "system": { - }, - "pointset": { - "sample_rate_sec": 2, - "points": { - "space_temperature_sensor": { - }, - "fan_run_status": { - "fix_value": true - }, - "fan_run_enable": { - "fix_value": false - }, - "chilled_water_valve_percentage_command": { - } - } - } -} diff --git a/schemas/udmi/config.tests/smartprimus.out b/schemas/udmi/config.tests/smartprimus.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/config_gateway.json b/schemas/udmi/config_gateway.json deleted file mode 100644 index be0ba44063..0000000000 --- a/schemas/udmi/config_gateway.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "title": "Gateway Config Snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "proxy_ids" - ], - "properties": { - "proxy_ids": { - "type": "array", - "items": { - "type": "string", - "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$" - } - } - } -} diff --git a/schemas/udmi/config_localnet.json b/schemas/udmi/config_localnet.json deleted file mode 100644 index 4177f240d6..0000000000 --- a/schemas/udmi/config_localnet.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "title": "Proxy Device Config Snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "subsystem": { - "type": "object", - "patternProperties": { - "^[a-z0-9-]+$": { - "additionalProperties": false, - "properties": { - "local_id": { - "type": "string" - } - }, - "required": [ - "local_id" - ] - } - } - } - }, - "required": [ - "subsystem" - ] -} diff --git a/schemas/udmi/config_pointset.json b/schemas/udmi/config_pointset.json deleted file mode 100644 index a7ec32f936..0000000000 --- a/schemas/udmi/config_pointset.json +++ /dev/null @@ -1,34 +0,0 @@ -{ - "title": "pointset config snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "sample_limit_sec": { - "type": "number", - "minimum": 1, - "maximum": 86400 - }, - "sample_rate_sec": { - "type": "number", - "minimum": 1, - "maximum": 86400 - }, - "points": { - "additionalProperties": false, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "additionalProperties": false, - "properties": { - "ref": { - "type": "string" - }, - "fix_value": { - "type": ["number", "string", "boolean"] - } - } - } - } - } - } -} diff --git a/schemas/udmi/config_system.json b/schemas/udmi/config_system.json deleted file mode 100644 index 10a3da3ba2..0000000000 --- a/schemas/udmi/config_system.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "title": "System Config snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "min_loglevel": { - "type": "number", - "minimum": 100, - "maximum": 800 - }, - "auth_key": { - "type": "object", - "additionalProperties": false, - "properties": { - "private": { - "type": "string" - } - }, - "required": [ - "private" - ] - } - } -} diff --git a/schemas/udmi/discover.json b/schemas/udmi/discover.json deleted file mode 100644 index 8abd70e24c..0000000000 --- a/schemas/udmi/discover.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "title": "Device discover schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "protocol": { - "type": "string" - }, - "local_id": { - "type": "string" - }, - "points": { - "additionalProperties": false, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "$ref": "#/definitions/point_property_names" - } - } - } - }, - "required": [ - "timestamp", - "version", - "protocol", - "local_id", - "points" - ], - "definitions": { - "point_property_names": { - "propertyNames": { - "oneOf": [ - { - "enum": [ - "units", - "present_value" - ] - } - ] - } - } - } -} diff --git a/schemas/udmi/discover.tests/empty.json b/schemas/udmi/discover.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/discover.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/discover.tests/empty.out b/schemas/udmi/discover.tests/empty.out deleted file mode 100644 index f9fdc0c692..0000000000 --- a/schemas/udmi/discover.tests/empty.out +++ /dev/null @@ -1,9 +0,0 @@ -Validating 1 schemas - Validating 1 files against discover.json - Against input discover.tests/empty.json - #: 5 schema violations found - #: required key [local_id] not found - #: required key [points] not found - #: required key [protocol] not found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/discover.tests/errors.json b/schemas/udmi/discover.tests/errors.json deleted file mode 100644 index aa3540a596..0000000000 --- a/schemas/udmi/discover.tests/errors.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "id": "sneakyCASE", - "properties": { - "$comment": "Common error cases for target telemetry." - }, - "points": { - "bad_entity_name_": { - "present_value": 21.30108642578125 - }, - "yoyo_motion_sensor": { - "bad_property_name": true - }, - "bad___sensor": { - "present_value": 21.30108642578125 - }, - "missing_present_value": { - }, - "old_properties": { - "properties": { - "present_value": true - } - }, - "magic_voice_recognizer": { - "present_value": { - "present_value": true - } - } - } -} diff --git a/schemas/udmi/discover.tests/errors.out b/schemas/udmi/discover.tests/errors.out deleted file mode 100644 index 2a972276f9..0000000000 --- a/schemas/udmi/discover.tests/errors.out +++ /dev/null @@ -1,15 +0,0 @@ -Validating 1 schemas - Validating 1 files against discover.json - Against input discover.tests/errors.json - #: 8 schema violations found - #/points: 4 schema violations found - #/points/old_properties/properties: #: 0 subschemas matched instead of one - #/points/old_properties/properties: properties is not a valid enum value - #/points/yoyo_motion_sensor/bad_property_name: #: 0 subschemas matched instead of one - #/points/yoyo_motion_sensor/bad_property_name: bad_property_name is not a valid enum value - #/points: extraneous key [bad___sensor] is not permitted - #/points: extraneous key [bad_entity_name_] is not permitted - #: extraneous key [id] is not permitted - #: extraneous key [properties] is not permitted - #: required key [local_id] not found - #: required key [protocol] not found diff --git a/schemas/udmi/discover.tests/example.json b/schemas/udmi/discover.tests/example.json deleted file mode 100644 index 9852ad3818..0000000000 --- a/schemas/udmi/discover.tests/example.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "local_id": "92EA09", - "protocol": "bacnet", - "points": { - "reading_value": { - "units": "C", - "present_value": 21.30108642578125 - }, - "yoyo_motion_sensor": { - "present_value": true - }, - "enum_value": { - "present_value": "hello" - } - } -} diff --git a/schemas/udmi/discover.tests/example.out b/schemas/udmi/discover.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/docs/gateway.md b/schemas/udmi/docs/gateway.md deleted file mode 100644 index 33f31a0152..0000000000 --- a/schemas/udmi/docs/gateway.md +++ /dev/null @@ -1,120 +0,0 @@ -# Device Gateway - -The _gateway_ functionality is used for systems that have legacy, heritage, -or traditional devices that do not communicate directly to the cloud using -the [UDMI specification](../README.md). For example, an older BacNET based -system could use a gateway to translate on-prem communications into UDMI. - -The -[Google Clout IoT Core Gateway Documentation](https://cloud.google.com/iot/docs/how-tos/gateways) -for an overview of the cloud-side implementation of a gateway. UDMI, then, -specifies an additional layer of specification around the associated -message formats. - -Conceptually, there are two types of -entities involved: the _gateway device_, and the _proxied device_. Both of -these are 'devices' in the sense that they have an entry in a cloud registry -and have device-level UDMI data, but they have fundamentally different roles. - -The process of _discovery_, which is where something discovers other devices -on the local network, is conceptually related but functionally distinct and -separate than a gateway. - -## Gateway Operation - -There are two modes for gateway operation: _static_ and _dynamic_. In the -_dynamic_ mode, the gateway functionality if configured dynamically through -gateway _config_ messages, which tell it the local devices it should proxy -for. In a _static_ gateway configuraiton, the gateway will be statically -configured to proxy a set of devices, essentally ignoring any information -in the associated _config_ block. - -The general sequence of events for gateway operation is: -1. Optional metadata specifies configuration paramaters that should be used -at install time to properly (manually) setup the device. -2. (_dynamic_ only) On startup, the gateway connects to the cloud and receives a configuration -block that details which _proxy devices_ the gateway should proxy for. -4. Gateway 'attaches' (Cloud IoT terminology) to the proxied devices, -receiving a configuration block for each proxied device. Any attch errors are -indicated in the gateway _status_ block and sent along as a _logentry_ event. -5. (_dynamic_ only) The proxied device's _config_ block specifies any local connection -parameters for the proxied device, e.g. the BacNET device id. -6. The gateway proxies communication to/from the device, translating between -native (e.g. BacNET) communications and UDMI-based messages. - -### config - -The [gateway config block](../config.tests/gateway.json) -simply specifies the list of target proxy devices. -On a config update, the gateway is responsible for handling any change in -this list (added or removed devices). The details of proxied devices are -kept to a minimum here (IDs only) to avoid overflowing the allowed block -size in cases where there are a large number of devices. - -### state - -Any attach errors, e.g. the gateway can not successfully attach to the target -device, should be reported in the [gateway state](../state.tests/gateway.json) -and a _logentry_ message used to detail the -nature of the problem. If the gateway can attach successfully, any other -errors, e.g. the inability to communicate with the device over the local -network, should be indicated as part of the proxy device status block. - -### telemetry - -Telemety from the gateway would primarily consist of standard -[_logentry_](../logentry.tests/logentry.json) messages, which -provide a running comentary about gateway operation. Specificaly, if there -is an error attaching, then there should be appropriate logging to help -diagnose the problem. - -### metadata - -The gateway [metadata block](../metadata.tests/gateway.json) specifies -any information necessary either for the -initial (manual) configuration of the device or ongoing validation of -operation. E.g., if a gateway device has a unique MAC address used for -local communications, it would be indicated here. - -## Proxy Device Operation - -Proxy devices are those that have a logical cloud device entry (in a registry), -and are associated (bound) to a particular gateway. On-prem, the device -itself talks a local protocol (e.g. BacNET), but does not have a direct -cloud connection. - -### config - -[Proxy device config blocks](../config.tests/proxy.json) contain a special -_localnet_ section that -specifies information required by the gateway to contact the local device. -E.g., the fact that a device is 'BacNET' and also the device's BacNET object -ID. Based on this, the gateway can communicate with the target device and proxy -all other messages. - -Additionally, the gateway is responsible for proxying all other supported -operations of the config bundle. E.g., if a _pointset_ 'force_value" parameter -is specified, the gateway would need to convert that into the local protocol -and trigger the required functionality. - -### state - -There is no gateway-specific _state_ information, but similarly to _config_ the -gateway is responsible for proxying all relevant state from the local device -into the proxied device's state block. E.g., if the device is in an alarm -state, then the gateway would have to transform that from the local format -into the appropriate UDMI message. - -### telemetry - -Telemetry is handled similarly, with the gateway responsible for proxying data -from local devices through to UDMI. In many cases, this would be translating -specific device points into a [_pointset_ message](../pointset.tests/example.json). - -### metadata - -A [proxy device metadata section](../metadata.tests/proxy.json) describes -_localnet_ with the presence of the -device on a local network. This can/should be used for initial programming -and configuration of the device, or to validate proper device configuration. -The gateway implementation itself would not directly deal with this block. diff --git a/schemas/udmi/envelope.json b/schemas/udmi/envelope.json deleted file mode 100644 index 9458e08ba5..0000000000 --- a/schemas/udmi/envelope.json +++ /dev/null @@ -1,39 +0,0 @@ -{ - "title": "Message envelope schema", - "additionalProperties": true, - "properties": { - "deviceId": { - "type": "string", - "pattern": "^[A-Z]{2,6}-[0-9]{1,6}$" - }, - "deviceNumId": { - "type": "string", - "pattern": "^[0-9]+$" - }, - "deviceRegistryId": { - "type": "string", - "pattern": "^[a-zA-Z][-a-zA-Z0-9._+~%]*[a-zA-Z0-9]$" - }, - "projectId": { - "type": "string", - "pattern": "^([.a-z]+:)?[a-z][-a-z0-9]*[a-z0-9]$" - }, - "subFolder": { - "enum": [ - "config", - "discover", - "system", - "metadata", - "pointset", - "state" - ] - } - }, - "required": [ - "projectId", - "deviceRegistryId", - "deviceNumId", - "deviceId", - "subFolder" - ] -} diff --git a/schemas/udmi/envelope.tests/empty.json b/schemas/udmi/envelope.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/envelope.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/envelope.tests/empty.out b/schemas/udmi/envelope.tests/empty.out deleted file mode 100644 index d4d84fce5c..0000000000 --- a/schemas/udmi/envelope.tests/empty.out +++ /dev/null @@ -1,9 +0,0 @@ -Validating 1 schemas - Validating 1 files against envelope.json - Against input envelope.tests/empty.json - #: 5 schema violations found - #: required key [deviceId] not found - #: required key [deviceNumId] not found - #: required key [deviceRegistryId] not found - #: required key [projectId] not found - #: required key [subFolder] not found diff --git a/schemas/udmi/envelope.tests/errors1.json b/schemas/udmi/envelope.tests/errors1.json deleted file mode 100644 index ff059c78e0..0000000000 --- a/schemas/udmi/envelope.tests/errors1.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "deviceRegistryId": "test/registry", - "deviceNumId": "921302198324X", - "deviceId": "fcu-1" -} diff --git a/schemas/udmi/envelope.tests/errors1.out b/schemas/udmi/envelope.tests/errors1.out deleted file mode 100644 index 601887a26a..0000000000 --- a/schemas/udmi/envelope.tests/errors1.out +++ /dev/null @@ -1,9 +0,0 @@ -Validating 1 schemas - Validating 1 files against envelope.json - Against input envelope.tests/errors1.json - #: 5 schema violations found - #/deviceId: string [fcu-1] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$ - #/deviceNumId: string [921302198324X] does not match pattern ^[0-9]+$ - #/deviceRegistryId: string [test/registry] does not match pattern ^[a-zA-Z][-a-zA-Z0-9._+~%]*[a-zA-Z0-9]$ - #: required key [projectId] not found - #: required key [subFolder] not found diff --git a/schemas/udmi/envelope.tests/errors2.json b/schemas/udmi/envelope.tests/errors2.json deleted file mode 100644 index 9cbb163ade..0000000000 --- a/schemas/udmi/envelope.tests/errors2.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "deviceRegistryId": "test-registry", - "deviceNumId": "-9213923812", - "deviceId": "FCUs_02_NW_12" -} diff --git a/schemas/udmi/envelope.tests/errors2.out b/schemas/udmi/envelope.tests/errors2.out deleted file mode 100644 index 0c02908e03..0000000000 --- a/schemas/udmi/envelope.tests/errors2.out +++ /dev/null @@ -1,8 +0,0 @@ -Validating 1 schemas - Validating 1 files against envelope.json - Against input envelope.tests/errors2.json - #: 4 schema violations found - #/deviceId: string [FCUs_02_NW_12] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$ - #/deviceNumId: string [-9213923812] does not match pattern ^[0-9]+$ - #: required key [projectId] not found - #: required key [subFolder] not found diff --git a/schemas/udmi/envelope.tests/example.json b/schemas/udmi/envelope.tests/example.json deleted file mode 100644 index b48f45a49a..0000000000 --- a/schemas/udmi/envelope.tests/example.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "projectId": "daq-test-suite", - "deviceRegistryId": "test_registry", - "deviceNumId": "921302198324", - "deviceId": "FCU-2", - "subFolder": "pointset" -} diff --git a/schemas/udmi/envelope.tests/example.out b/schemas/udmi/envelope.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/envelope.tests/example2.json b/schemas/udmi/envelope.tests/example2.json deleted file mode 100644 index 572406ce32..0000000000 --- a/schemas/udmi/envelope.tests/example2.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "projectId": "daq-test-suite", - "deviceRegistryId": "test-registry", - "deviceNumId": "23812", - "deviceId": "FCU-002", - "subFolder": "system", -} diff --git a/schemas/udmi/envelope.tests/example2.out b/schemas/udmi/envelope.tests/example2.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/envelope.tests/lgtw.json b/schemas/udmi/envelope.tests/lgtw.json deleted file mode 100644 index 8e7b88cbd9..0000000000 --- a/schemas/udmi/envelope.tests/lgtw.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "projectId": "daq-test-suite", - "deviceRegistryId": "test_registry", - "deviceNumId": "921302198324", - "deviceId": "LGTW-2", - "subFolder": "discover" -} diff --git a/schemas/udmi/envelope.tests/lgtw.out b/schemas/udmi/envelope.tests/lgtw.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/metadata.json b/schemas/udmi/metadata.json deleted file mode 100644 index e01dbe16cc..0000000000 --- a/schemas/udmi/metadata.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "title": "Device metadata schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "timestamp", - "version", - "system" - ], - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "hash": { - "type": "string", - "pattern": "^[0-9a-z]{8}$" - }, - "cloud": { - "$ref": "file:metadata_cloud.json#" - }, - "system": { - "$ref": "file:metadata_system.json#" - }, - "gateway": { - "$ref": "file:metadata_gateway.json#" - }, - "localnet": { - "$ref": "file:metadata_localnet.json#" - }, - "pointset": { - "$ref": "file:metadata_pointset.json#" - } - } -} diff --git a/schemas/udmi/metadata.tests/empty.json b/schemas/udmi/metadata.tests/empty.json deleted file mode 100644 index 0967ef424b..0000000000 --- a/schemas/udmi/metadata.tests/empty.json +++ /dev/null @@ -1 +0,0 @@ -{} diff --git a/schemas/udmi/metadata.tests/empty.out b/schemas/udmi/metadata.tests/empty.out deleted file mode 100644 index 159f05c268..0000000000 --- a/schemas/udmi/metadata.tests/empty.out +++ /dev/null @@ -1,7 +0,0 @@ -Validating 1 schemas - Validating 1 files against metadata.json - Against input metadata.tests/empty.json - #: 3 schema violations found - #: required key [system] not found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/metadata.tests/errors.json b/schemas/udmi/metadata.tests/errors.json deleted file mode 100644 index 395b19e45f..0000000000 --- a/schemas/udmi/metadata.tests/errors.json +++ /dev/null @@ -1,25 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "guid": "bim://04aEp5ymD_$u5IxhJN2aGi", - "location": { - "site": "New Zealand" - }, - "physical_tag": { - "asset": { - "site": "US-SFO-XYY_Noope!", - "name": "AHU-A01_extension11-option" - } - } - }, - "pointset": { - "rabbits": true, - "points": { - "return_air_temperature_sensor": { - "units": "Celsius", - "monkeys": "elephants" - } - } - } -} diff --git a/schemas/udmi/metadata.tests/errors.out b/schemas/udmi/metadata.tests/errors.out deleted file mode 100644 index c315c2fd40..0000000000 --- a/schemas/udmi/metadata.tests/errors.out +++ /dev/null @@ -1,16 +0,0 @@ -Validating 1 schemas - Validating 1 files against metadata.json - Against input metadata.tests/errors.json - #: 8 schema violations found - #/pointset: 3 schema violations found - #/pointset/points/return_air_temperature_sensor: 2 schema violations found - #/pointset/points/return_air_temperature_sensor/units: Celsius is not a valid enum value - #/pointset/points/return_air_temperature_sensor: extraneous key [monkeys] is not permitted - #/pointset: extraneous key [rabbits] is not permitted - #/system: 5 schema violations found - #/system/location/site: string [New Zealand] does not match pattern ^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$ - #/system/physical_tag/asset: 3 schema violations found - #/system/physical_tag/asset/name: string [AHU-A01_extension11-option] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$ - #/system/physical_tag/asset/site: string [US-SFO-XYY_Noope!] does not match pattern ^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$ - #/system/physical_tag/asset: required key [guid] not found - #/system: extraneous key [guid] is not permitted diff --git a/schemas/udmi/metadata.tests/example.json b/schemas/udmi/metadata.tests/example.json deleted file mode 100644 index 4f1df64175..0000000000 --- a/schemas/udmi/metadata.tests/example.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "location": { - "site": "US-SFO-XYY", - "section": "NW-2F", - "position": { - "x": 10, - "y": 20 - } - }, - "physical_tag": { - "asset": { - "guid": "bim://04aEp5ymD_$u5IxhJN2aGi", - "site": "US-SFO-XYY", - "name": "AHU-1" - } - }, - "aux": { - "suffix": "extention11-optional", - } - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "units": "Degrees-Celsius" - } - } - } -} diff --git a/schemas/udmi/metadata.tests/example.out b/schemas/udmi/metadata.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/metadata.tests/example2.out b/schemas/udmi/metadata.tests/example2.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/metadata.tests/gateway.json b/schemas/udmi/metadata.tests/gateway.json deleted file mode 100644 index 2da945023e..0000000000 --- a/schemas/udmi/metadata.tests/gateway.json +++ /dev/null @@ -1,41 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "location": { - "site": "US-SFO-XYY", - "section": "NW-2F", - "position": { - "x": 10, - "y": 20 - } - }, - "physical_tag": { - "asset": { - "guid": "bim://04aEp5ymD_$u5IxhJN2aGi", - "site": "US-SFO-XYY", - "name": "AHU-01" - } - } - }, - "cloud": { - "auth_type": "RS256", - }, - "gateway": { - "proxy_ids": ["AHU-22"] - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "units": "Degrees-Celsius" - } - } - }, - "localnet": { - "subsystem": { - "bacnet": { - "local_id": "0x991132ec" - } - } - } -} diff --git a/schemas/udmi/metadata.tests/gateway.out b/schemas/udmi/metadata.tests/gateway.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/metadata.tests/proxy.json b/schemas/udmi/metadata.tests/proxy.json deleted file mode 100644 index bbc522af3c..0000000000 --- a/schemas/udmi/metadata.tests/proxy.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "location": { - "site": "US-SFO-XYY", - "section": "NW-2F", - "position": { - "x": 10, - "y": 20 - } - }, - "physical_tag": { - "asset": { - "guid": "bim://04aEp5ymD_$u5IxhJN2aGi", - "site": "US-SFO-XYY", - "name": "AHU-1", - } - }, - "aux": { - "suffix": "extention11-optional" - } - }, - "localnet": { - "subsystem": { - "bacnet": { - "local_id": "0x82eecd" - } - } - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "units": "Degrees-Celsius", - "ref": "BV23.present_value" - } - } - }, - "gateway": { - "subsystem": "bacnet", - "gateway_id": "GAT-123" - } -} diff --git a/schemas/udmi/metadata.tests/proxy.out b/schemas/udmi/metadata.tests/proxy.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/metadata.tests/toomany.json b/schemas/udmi/metadata.tests/toomany.json deleted file mode 100644 index 6c5b9671e0..0000000000 --- a/schemas/udmi/metadata.tests/toomany.json +++ /dev/null @@ -1,2035 +0,0 @@ -{ - "system": { - "location": { - "site": "UK-LON-S2" - }, - "physical_tag": { - "asset": { - "name": "UK-LON-S2_LTGW-3", - "guid": "ifc://27UivR75r3481CVsBDvlfl" - } - } - }, - "pointset": { - "points": { - "bw4_group_brightness12": { - "units": "Percent" - }, - "bw4_group_brightness13": { - "units": "Percent" - }, - "bw4_group_brightness10": { - "units": "Percent" - }, - "bw4_group_brightness11": { - "units": "Percent" - }, - "bw4_group_brightness16": { - "units": "Percent" - }, - "bw4_group_brightness14": { - "units": "Percent" - }, - "bw4_group_brightness15": { - "units": "Percent" - }, - "bw4_light_level1": { - "units": "Luxes" - }, - "bw4_light_level2": { - "units": "Luxes" - }, - "bw4_light_level7": { - "units": "Luxes" - }, - "bw4_light_level8": { - "units": "Luxes" - }, - "bw4_light_level9": { - "units": "Luxes" - }, - "bw4_light_level3": { - "units": "Luxes" - }, - "bw4_light_level4": { - "units": "Luxes" - }, - "bw4_light_level5": { - "units": "Luxes" - }, - "bw4_light_level6": { - "units": "Luxes" - }, - "bw5_lamp_brightness61": { - "units": "Percent" - }, - "bw5_lamp_brightness62": { - "units": "Percent" - }, - "bw5_lamp_brightness63": { - "units": "Percent" - }, - "bw5_lamp_brightness64": { - "units": "Percent" - }, - "bw5_lamp_brightness60": { - "units": "Percent" - }, - "bw6_lamp_brightness63": { - "units": "Percent" - }, - "bw6_lamp_brightness62": { - "units": "Percent" - }, - "bw6_lamp_brightness61": { - "units": "Percent" - }, - "bw6_lamp_brightness60": { - "units": "Percent" - }, - "bw6_lamp_brightness64": { - "units": "Percent" - }, - "bw4_lamp_brightness60": { - "units": "Percent" - }, - "bw4_occupancy5": { - "units": "No-units" - }, - "bw5_lamp_brightness58": { - "units": "Percent" - }, - "bw4_lamp_brightness61": { - "units": "Percent" - }, - "bw4_occupancy6": { - "units": "No-units" - }, - "bw5_lamp_brightness59": { - "units": "Percent" - }, - "bw4_occupancy7": { - "units": "No-units" - }, - "bw4_occupancy8": { - "units": "No-units" - }, - "bw4_lamp_brightness64": { - "units": "Percent" - }, - "bw4_occupancy9": { - "units": "No-units" - }, - "bw5_lamp_brightness54": { - "units": "Percent" - }, - "bw5_lamp_brightness55": { - "units": "Percent" - }, - "bw4_lamp_brightness62": { - "units": "Percent" - }, - "bw5_lamp_brightness56": { - "units": "Percent" - }, - "bw4_lamp_brightness63": { - "units": "Percent" - }, - "bw5_lamp_brightness57": { - "units": "Percent" - }, - "bw5_lamp_brightness50": { - "units": "Percent" - }, - "bw5_lamp_brightness51": { - "units": "Percent" - }, - "bw5_lamp_brightness52": { - "units": "Percent" - }, - "bw5_lamp_brightness53": { - "units": "Percent" - }, - "bw4_occupancy1": { - "units": "No-units" - }, - "bw4_occupancy2": { - "units": "No-units" - }, - "bw4_occupancy3": { - "units": "No-units" - }, - "bw4_occupancy4": { - "units": "No-units" - }, - "bw1_group_brightness16": { - "units": "Percent" - }, - "bw1_group_brightness15": { - "units": "Percent" - }, - "bw1_group_brightness12": { - "units": "Percent" - }, - "bw1_group_brightness11": { - "units": "Percent" - }, - "bw1_group_brightness14": { - "units": "Percent" - }, - "bw1_group_brightness13": { - "units": "Percent" - }, - "bw1_group_brightness10": { - "units": "Percent" - }, - "bw5_lamp_brightness47": { - "units": "Percent" - }, - "bw5_lamp_brightness48": { - "units": "Percent" - }, - "bw5_lamp_brightness9": { - "units": "Percent" - }, - "bw5_lamp_brightness49": { - "units": "Percent" - }, - "bw5_lamp_brightness43": { - "units": "Percent" - }, - "bw6_occupancy7": { - "units": "No-units" - }, - "bw5_lamp_brightness44": { - "units": "Percent" - }, - "bw6_occupancy8": { - "units": "No-units" - }, - "bw5_lamp_brightness45": { - "units": "Percent" - }, - "bw6_occupancy9": { - "units": "No-units" - }, - "bw5_lamp_brightness46": { - "units": "Percent" - }, - "bw6_occupancy3": { - "units": "No-units" - }, - "bw2_lamp_brightness64": { - "units": "Percent" - }, - "bw5_lamp_brightness40": { - "units": "Percent" - }, - "bw6_occupancy4": { - "units": "No-units" - }, - "bw5_lamp_brightness41": { - "units": "Percent" - }, - "bw6_occupancy5": { - "units": "No-units" - }, - "bw5_lamp_brightness42": { - "units": "Percent" - }, - "bw6_occupancy6": { - "units": "No-units" - }, - "bw2_lamp_brightness61": { - "units": "Percent" - }, - "bw2_lamp_brightness60": { - "units": "Percent" - }, - "bw2_lamp_brightness63": { - "units": "Percent" - }, - "bw6_occupancy1": { - "units": "No-units" - }, - "bw2_lamp_brightness62": { - "units": "Percent" - }, - "bw6_occupancy2": { - "units": "No-units" - }, - "bw3_lamp_brightness6": { - "units": "Percent" - }, - "bw5_lamp_brightness36": { - "units": "Percent" - }, - "bw3_lamp_brightness5": { - "units": "Percent" - }, - "bw5_lamp_brightness37": { - "units": "Percent" - }, - "bw3_lamp_brightness4": { - "units": "Percent" - }, - "bw5_lamp_brightness38": { - "units": "Percent" - }, - "bw3_lamp_brightness3": { - "units": "Percent" - }, - "bw5_lamp_brightness39": { - "units": "Percent" - }, - "bw2_lamp_brightness58": { - "units": "Percent" - }, - "bw3_lamp_brightness2": { - "units": "Percent" - }, - "bw5_lamp_brightness32": { - "units": "Percent" - }, - "bw2_lamp_brightness57": { - "units": "Percent" - }, - "bw3_lamp_brightness1": { - "units": "Percent" - }, - "bw5_lamp_brightness33": { - "units": "Percent" - }, - "bw5_lamp_brightness34": { - "units": "Percent" - }, - "bw6_lamp_brightness29": { - "units": "Percent" - }, - "bw2_lamp_brightness59": { - "units": "Percent" - }, - "bw5_lamp_brightness35": { - "units": "Percent" - }, - "bw6_lamp_brightness28": { - "units": "Percent" - }, - "bw2_lamp_brightness54": { - "units": "Percent" - }, - "bw2_lamp_brightness53": { - "units": "Percent" - }, - "bw2_lamp_brightness56": { - "units": "Percent" - }, - "bw5_lamp_brightness30": { - "units": "Percent" - }, - "bw2_lamp_brightness55": { - "units": "Percent" - }, - "bw5_lamp_brightness31": { - "units": "Percent" - }, - "bw2_lamp_brightness50": { - "units": "Percent" - }, - "bw2_lamp_brightness52": { - "units": "Percent" - }, - "bw2_lamp_brightness51": { - "units": "Percent" - }, - "bw2_light_level1": { - "units": "Luxes" - }, - "bw3_lamp_brightness55": { - "units": "Percent" - }, - "bw6_lamp_brightness30": { - "units": "Percent" - }, - "bw2_light_level2": { - "units": "Luxes" - }, - "bw3_lamp_brightness54": { - "units": "Percent" - }, - "bw2_light_level3": { - "units": "Luxes" - }, - "bw3_lamp_brightness53": { - "units": "Percent" - }, - "bw2_light_level4": { - "units": "Luxes" - }, - "bw3_lamp_brightness52": { - "units": "Percent" - }, - "bw2_light_level5": { - "units": "Luxes" - }, - "bw3_lamp_brightness51": { - "units": "Percent" - }, - "bw6_light_level1": { - "units": "Luxes" - }, - "bw2_light_level6": { - "units": "Luxes" - }, - "bw3_lamp_brightness50": { - "units": "Percent" - }, - "bw6_light_level2": { - "units": "Luxes" - }, - "bw2_light_level7": { - "units": "Luxes" - }, - "bw6_light_level3": { - "units": "Luxes" - }, - "bw2_light_level8": { - "units": "Luxes" - }, - "bw6_light_level4": { - "units": "Luxes" - }, - "bw6_lamp_brightness38": { - "units": "Percent" - }, - "bw6_light_level5": { - "units": "Luxes" - }, - "bw6_lamp_brightness37": { - "units": "Percent" - }, - "bw6_light_level6": { - "units": "Luxes" - }, - "bw6_lamp_brightness36": { - "units": "Percent" - }, - "bw6_light_level7": { - "units": "Luxes" - }, - "bw6_lamp_brightness35": { - "units": "Percent" - }, - "bw6_light_level8": { - "units": "Luxes" - }, - "bw3_lamp_brightness59": { - "units": "Percent" - }, - "bw5_lamp_brightness29": { - "units": "Percent" - }, - "bw6_lamp_brightness34": { - "units": "Percent" - }, - "bw6_light_level9": { - "units": "Luxes" - }, - "bw3_lamp_brightness58": { - "units": "Percent" - }, - "bw6_lamp_brightness33": { - "units": "Percent" - }, - "bw3_lamp_brightness57": { - "units": "Percent" - }, - "bw6_lamp_brightness32": { - "units": "Percent" - }, - "bw3_lamp_brightness56": { - "units": "Percent" - }, - "bw6_lamp_brightness31": { - "units": "Percent" - }, - "bw5_lamp_brightness25": { - "units": "Percent" - }, - "bw5_lamp_brightness26": { - "units": "Percent" - }, - "bw5_lamp_brightness27": { - "units": "Percent" - }, - "bw5_lamp_brightness28": { - "units": "Percent" - }, - "bw2_lamp_brightness47": { - "units": "Percent" - }, - "bw5_lamp_brightness21": { - "units": "Percent" - }, - "bw2_lamp_brightness46": { - "units": "Percent" - }, - "bw5_lamp_brightness22": { - "units": "Percent" - }, - "bw6_lamp_brightness19": { - "units": "Percent" - }, - "bw2_lamp_brightness49": { - "units": "Percent" - }, - "bw5_lamp_brightness23": { - "units": "Percent" - }, - "bw6_lamp_brightness18": { - "units": "Percent" - }, - "bw2_lamp_brightness48": { - "units": "Percent" - }, - "bw5_lamp_brightness24": { - "units": "Percent" - }, - "bw6_lamp_brightness17": { - "units": "Percent" - }, - "bw2_lamp_brightness43": { - "units": "Percent" - }, - "bw2_lamp_brightness42": { - "units": "Percent" - }, - "bw2_lamp_brightness45": { - "units": "Percent" - }, - "bw2_lamp_brightness44": { - "units": "Percent" - }, - "bw5_lamp_brightness20": { - "units": "Percent" - }, - "bw2_lamp_brightness41": { - "units": "Percent" - }, - "bw2_lamp_brightness40": { - "units": "Percent" - }, - "bw3_lamp_brightness44": { - "units": "Percent" - }, - "bw3_lamp_brightness43": { - "units": "Percent" - }, - "bw3_lamp_brightness42": { - "units": "Percent" - }, - "bw3_lamp_brightness41": { - "units": "Percent" - }, - "bw3_lamp_brightness40": { - "units": "Percent" - }, - "bw6_lamp_brightness27": { - "units": "Percent" - }, - "bw6_lamp_brightness26": { - "units": "Percent" - }, - "bw6_lamp_brightness25": { - "units": "Percent" - }, - "bw3_lamp_brightness49": { - "units": "Percent" - }, - "bw6_lamp_brightness24": { - "units": "Percent" - }, - "bw3_lamp_brightness48": { - "units": "Percent" - }, - "bw5_lamp_brightness18": { - "units": "Percent" - }, - "bw6_lamp_brightness23": { - "units": "Percent" - }, - "bw3_lamp_brightness47": { - "units": "Percent" - }, - "bw5_lamp_brightness19": { - "units": "Percent" - }, - "bw6_lamp_brightness22": { - "units": "Percent" - }, - "bw3_lamp_brightness46": { - "units": "Percent" - }, - "bw6_lamp_brightness21": { - "units": "Percent" - }, - "bw3_lamp_brightness45": { - "units": "Percent" - }, - "bw6_lamp_brightness20": { - "units": "Percent" - }, - "bw5_lamp_brightness14": { - "units": "Percent" - }, - "bw2_lamp_brightness39": { - "units": "Percent" - }, - "bw5_lamp_brightness15": { - "units": "Percent" - }, - "bw5_lamp_brightness16": { - "units": "Percent" - }, - "bw5_lamp_brightness17": { - "units": "Percent" - }, - "bw2_lamp_brightness36": { - "units": "Percent" - }, - "bw5_lamp_brightness10": { - "units": "Percent" - }, - "bw2_lamp_brightness35": { - "units": "Percent" - }, - "bw5_lamp_brightness11": { - "units": "Percent" - }, - "bw2_lamp_brightness38": { - "units": "Percent" - }, - "bw5_lamp_brightness12": { - "units": "Percent" - }, - "bw2_lamp_brightness37": { - "units": "Percent" - }, - "bw5_lamp_brightness13": { - "units": "Percent" - }, - "bw2_lamp_brightness32": { - "units": "Percent" - }, - "bw2_lamp_brightness31": { - "units": "Percent" - }, - "bw2_lamp_brightness34": { - "units": "Percent" - }, - "bw2_lamp_brightness33": { - "units": "Percent" - }, - "bw2_lamp_brightness30": { - "units": "Percent" - }, - "bw6_lamp_brightness52": { - "units": "Percent" - }, - "bw3_light_level1": { - "units": "Luxes" - }, - "bw6_lamp_brightness51": { - "units": "Percent" - }, - "bw6_lamp_brightness50": { - "units": "Percent" - }, - "bw1_occupancy1": { - "units": "No-units" - }, - "bw1_occupancy5": { - "units": "No-units" - }, - "bw3_light_level8": { - "units": "Luxes" - }, - "bw1_occupancy4": { - "units": "No-units" - }, - "bw3_light_level9": { - "units": "Luxes" - }, - "bw6_lamp_brightness59": { - "units": "Percent" - }, - "bw1_occupancy3": { - "units": "No-units" - }, - "bw3_light_level6": { - "units": "Luxes" - }, - "bw6_lamp_brightness58": { - "units": "Percent" - }, - "bw1_occupancy2": { - "units": "No-units" - }, - "bw3_light_level7": { - "units": "Luxes" - }, - "bw6_lamp_brightness57": { - "units": "Percent" - }, - "bw1_occupancy9": { - "units": "No-units" - }, - "bw3_light_level4": { - "units": "Luxes" - }, - "bw6_lamp_brightness56": { - "units": "Percent" - }, - "bw1_occupancy8": { - "units": "No-units" - }, - "bw3_light_level5": { - "units": "Luxes" - }, - "bw6_lamp_brightness55": { - "units": "Percent" - }, - "bw1_occupancy7": { - "units": "No-units" - }, - "bw3_light_level2": { - "units": "Luxes" - }, - "bw6_lamp_brightness54": { - "units": "Percent" - }, - "bw1_occupancy6": { - "units": "No-units" - }, - "bw3_light_level3": { - "units": "Luxes" - }, - "bw6_lamp_brightness53": { - "units": "Percent" - }, - "bw2_lamp_brightness29": { - "units": "Percent" - }, - "bw2_lamp_brightness28": { - "units": "Percent" - }, - "bw2_lamp_brightness25": { - "units": "Percent" - }, - "bw2_lamp_brightness24": { - "units": "Percent" - }, - "bw2_lamp_brightness27": { - "units": "Percent" - }, - "bw2_lamp_brightness26": { - "units": "Percent" - }, - "bw6_lamp_brightness39": { - "units": "Percent" - }, - "bw2_lamp_brightness21": { - "units": "Percent" - }, - "bw2_lamp_brightness20": { - "units": "Percent" - }, - "bw2_lamp_brightness23": { - "units": "Percent" - }, - "bw2_lamp_brightness22": { - "units": "Percent" - }, - "bw6_lamp_brightness41": { - "units": "Percent" - }, - "bw6_lamp_brightness40": { - "units": "Percent" - }, - "bw3_lamp_brightness64": { - "units": "Percent" - }, - "bw3_lamp_brightness63": { - "units": "Percent" - }, - "bw3_lamp_brightness62": { - "units": "Percent" - }, - "bw3_lamp_brightness61": { - "units": "Percent" - }, - "bw3_lamp_brightness60": { - "units": "Percent" - }, - "bw6_lamp_brightness49": { - "units": "Percent" - }, - "bw6_lamp_brightness48": { - "units": "Percent" - }, - "bw6_lamp_brightness47": { - "units": "Percent" - }, - "bw6_lamp_brightness46": { - "units": "Percent" - }, - "bw6_lamp_brightness45": { - "units": "Percent" - }, - "bw3_lamp_brightness9": { - "units": "Percent" - }, - "bw6_lamp_brightness44": { - "units": "Percent" - }, - "bw3_lamp_brightness8": { - "units": "Percent" - }, - "bw6_lamp_brightness43": { - "units": "Percent" - }, - "bw3_lamp_brightness7": { - "units": "Percent" - }, - "bw6_lamp_brightness42": { - "units": "Percent" - }, - "bw1_lamp_brightness20": { - "units": "Percent" - }, - "bw1_lamp_brightness24": { - "units": "Percent" - }, - "bw1_group_brightness1": { - "units": "Percent" - }, - "bw1_lamp_brightness23": { - "units": "Percent" - }, - "bw1_group_brightness2": { - "units": "Percent" - }, - "bw1_lamp_brightness22": { - "units": "Percent" - }, - "bw1_group_brightness3": { - "units": "Percent" - }, - "bw1_lamp_brightness21": { - "units": "Percent" - }, - "bw1_lamp_brightness28": { - "units": "Percent" - }, - "bw1_lamp_brightness27": { - "units": "Percent" - }, - "bw1_lamp_brightness26": { - "units": "Percent" - }, - "bw1_lamp_brightness25": { - "units": "Percent" - }, - "bw1_lamp_brightness29": { - "units": "Percent" - }, - "bw1_lamp_brightness31": { - "units": "Percent" - }, - "bw1_light_level13": { - "units": "Luxes" - }, - "bw1_lamp_brightness30": { - "units": "Percent" - }, - "bw1_light_level14": { - "units": "Luxes" - }, - "bw1_light_level11": { - "units": "Luxes" - }, - "bw1_light_level12": { - "units": "Luxes" - }, - "bw1_lamp_brightness35": { - "units": "Percent" - }, - "bw1_lamp_brightness34": { - "units": "Percent" - }, - "bw1_lamp_brightness33": { - "units": "Percent" - }, - "bw1_light_level15": { - "units": "Luxes" - }, - "bw1_lamp_brightness32": { - "units": "Percent" - }, - "bw1_light_level16": { - "units": "Luxes" - }, - "bw1_lamp_brightness39": { - "units": "Percent" - }, - "bw1_lamp_brightness38": { - "units": "Percent" - }, - "bw1_lamp_brightness37": { - "units": "Percent" - }, - "bw1_lamp_brightness36": { - "units": "Percent" - }, - "bw1_light_level10": { - "units": "Luxes" - }, - "bw3_group_brightness10": { - "units": "Percent" - }, - "bw5_group_brightness15": { - "units": "Percent" - }, - "bw5_group_brightness16": { - "units": "Percent" - }, - "bw3_group_brightness12": { - "units": "Percent" - }, - "bw3_group_brightness11": { - "units": "Percent" - }, - "bw3_group_brightness14": { - "units": "Percent" - }, - "bw5_group_brightness11": { - "units": "Percent" - }, - "bw3_group_brightness13": { - "units": "Percent" - }, - "bw5_group_brightness12": { - "units": "Percent" - }, - "bw3_group_brightness16": { - "units": "Percent" - }, - "bw5_group_brightness13": { - "units": "Percent" - }, - "bw3_group_brightness15": { - "units": "Percent" - }, - "bw5_group_brightness14": { - "units": "Percent" - }, - "bw6_lamp_brightness16": { - "units": "Percent" - }, - "bw6_lamp_brightness15": { - "units": "Percent" - }, - "bw6_lamp_brightness14": { - "units": "Percent" - }, - "bw5_group_brightness10": { - "units": "Percent" - }, - "bw6_lamp_brightness13": { - "units": "Percent" - }, - "bw6_lamp_brightness12": { - "units": "Percent" - }, - "bw6_lamp_brightness11": { - "units": "Percent" - }, - "bw6_lamp_brightness10": { - "units": "Percent" - }, - "bw3_occupancy10": { - "units": "No-units" - }, - "bw6_occupancy16": { - "units": "No-units" - }, - "bw2_occupancy10": { - "units": "No-units" - }, - "bw2_occupancy11": { - "units": "No-units" - }, - "bw1_lamp_brightness4": { - "units": "Percent" - }, - "bw1_lamp_brightness13": { - "units": "Percent" - }, - "bw2_occupancy12": { - "units": "No-units" - }, - "bw3_occupancy14": { - "units": "No-units" - }, - "bw6_occupancy12": { - "units": "No-units" - }, - "bw1_lamp_brightness3": { - "units": "Percent" - }, - "bw1_lamp_brightness12": { - "units": "Percent" - }, - "bw2_occupancy13": { - "units": "No-units" - }, - "bw3_occupancy13": { - "units": "No-units" - }, - "bw6_occupancy13": { - "units": "No-units" - }, - "bw1_lamp_brightness2": { - "units": "Percent" - }, - "bw1_lamp_brightness11": { - "units": "Percent" - }, - "bw2_occupancy14": { - "units": "No-units" - }, - "bw3_occupancy12": { - "units": "No-units" - }, - "bw6_occupancy14": { - "units": "No-units" - }, - "bw1_lamp_brightness1": { - "units": "Percent" - }, - "bw1_lamp_brightness10": { - "units": "Percent" - }, - "bw2_occupancy15": { - "units": "No-units" - }, - "bw3_occupancy11": { - "units": "No-units" - }, - "bw6_occupancy15": { - "units": "No-units" - }, - "bw1_lamp_brightness17": { - "units": "Percent" - }, - "bw2_light_level9": { - "units": "Luxes" - }, - "bw1_lamp_brightness16": { - "units": "Percent" - }, - "bw1_lamp_brightness15": { - "units": "Percent" - }, - "bw1_lamp_brightness14": { - "units": "Percent" - }, - "bw1_lamp_brightness19": { - "units": "Percent" - }, - "bw1_lamp_brightness18": { - "units": "Percent" - }, - "bw1_group_brightness4": { - "units": "Percent" - }, - "bw2_occupancy16": { - "units": "No-units" - }, - "bw1_group_brightness5": { - "units": "Percent" - }, - "bw1_group_brightness6": { - "units": "Percent" - }, - "bw3_occupancy16": { - "units": "No-units" - }, - "bw6_occupancy10": { - "units": "No-units" - }, - "bw1_group_brightness7": { - "units": "Percent" - }, - "bw3_occupancy15": { - "units": "No-units" - }, - "bw6_occupancy11": { - "units": "No-units" - }, - "bw1_group_brightness8": { - "units": "Percent" - }, - "bw1_group_brightness9": { - "units": "Percent" - }, - "bw1_lamp_brightness64": { - "units": "Percent" - }, - "bw1_lamp_brightness63": { - "units": "Percent" - }, - "bw1_lamp_brightness62": { - "units": "Percent" - }, - "bw1_lamp_brightness61": { - "units": "Percent" - }, - "bw1_lamp_brightness8": { - "units": "Percent" - }, - "bw1_lamp_brightness7": { - "units": "Percent" - }, - "bw1_lamp_brightness6": { - "units": "Percent" - }, - "bw1_lamp_brightness5": { - "units": "Percent" - }, - "bw1_lamp_brightness60": { - "units": "Percent" - }, - "bw1_lamp_brightness9": { - "units": "Percent" - }, - "bw1_lamp_brightness42": { - "units": "Percent" - }, - "bw1_lamp_brightness41": { - "units": "Percent" - }, - "bw1_lamp_brightness40": { - "units": "Percent" - }, - "bw1_lamp_brightness46": { - "units": "Percent" - }, - "bw1_lamp_brightness45": { - "units": "Percent" - }, - "bw1_lamp_brightness44": { - "units": "Percent" - }, - "bw1_lamp_brightness43": { - "units": "Percent" - }, - "bw1_lamp_brightness49": { - "units": "Percent" - }, - "bw1_lamp_brightness48": { - "units": "Percent" - }, - "bw1_lamp_brightness47": { - "units": "Percent" - }, - "bw1_light_level3": { - "units": "Luxes" - }, - "bw1_light_level2": { - "units": "Luxes" - }, - "bw1_light_level1": { - "units": "Luxes" - }, - "bw1_light_level7": { - "units": "Luxes" - }, - "bw1_light_level6": { - "units": "Luxes" - }, - "bw1_light_level5": { - "units": "Luxes" - }, - "bw1_light_level4": { - "units": "Luxes" - }, - "bw1_lamp_brightness53": { - "units": "Percent" - }, - "bw1_lamp_brightness52": { - "units": "Percent" - }, - "bw1_lamp_brightness51": { - "units": "Percent" - }, - "bw1_lamp_brightness50": { - "units": "Percent" - }, - "bw1_lamp_brightness57": { - "units": "Percent" - }, - "bw1_lamp_brightness56": { - "units": "Percent" - }, - "bw1_lamp_brightness55": { - "units": "Percent" - }, - "bw1_lamp_brightness54": { - "units": "Percent" - }, - "bw1_lamp_brightness59": { - "units": "Percent" - }, - "bw1_light_level9": { - "units": "Luxes" - }, - "bw1_lamp_brightness58": { - "units": "Percent" - }, - "bw1_light_level8": { - "units": "Luxes" - }, - "bw4_lamp_brightness1": { - "units": "Percent" - }, - "bw4_lamp_brightness3": { - "units": "Percent" - }, - "bw4_lamp_brightness2": { - "units": "Percent" - }, - "bw4_lamp_brightness5": { - "units": "Percent" - }, - "bw4_lamp_brightness4": { - "units": "Percent" - }, - "bw4_lamp_brightness7": { - "units": "Percent" - }, - "bw4_lamp_brightness6": { - "units": "Percent" - }, - "bw5_occupancy8": { - "units": "No-units" - }, - "bw5_occupancy9": { - "units": "No-units" - }, - "bw5_occupancy6": { - "units": "No-units" - }, - "bw5_occupancy7": { - "units": "No-units" - }, - "bw5_occupancy1": { - "units": "No-units" - }, - "bw5_occupancy4": { - "units": "No-units" - }, - "bw5_occupancy5": { - "units": "No-units" - }, - "bw5_occupancy2": { - "units": "No-units" - }, - "bw5_occupancy3": { - "units": "No-units" - }, - "bw4_lamp_brightness9": { - "units": "Percent" - }, - "bw4_lamp_brightness8": { - "units": "Percent" - }, - "bw6_group_brightness16": { - "units": "Percent" - }, - "bw6_group_brightness7": { - "units": "Percent" - }, - "bw6_group_brightness14": { - "units": "Percent" - }, - "bw6_group_brightness8": { - "units": "Percent" - }, - "bw6_group_brightness15": { - "units": "Percent" - }, - "bw6_group_brightness9": { - "units": "Percent" - }, - "bw6_group_brightness12": { - "units": "Percent" - }, - "bw6_group_brightness13": { - "units": "Percent" - }, - "bw2_light_level15": { - "units": "Luxes" - }, - "bw6_group_brightness3": { - "units": "Percent" - }, - "bw2_light_level14": { - "units": "Luxes" - }, - "bw6_group_brightness4": { - "units": "Percent" - }, - "bw6_group_brightness5": { - "units": "Percent" - }, - "bw2_light_level16": { - "units": "Luxes" - }, - "bw6_group_brightness6": { - "units": "Percent" - }, - "bw2_light_level11": { - "units": "Luxes" - }, - "bw2_light_level10": { - "units": "Luxes" - }, - "bw2_light_level13": { - "units": "Luxes" - }, - "bw6_group_brightness1": { - "units": "Percent" - }, - "bw2_light_level12": { - "units": "Luxes" - }, - "bw6_group_brightness2": { - "units": "Percent" - }, - "bw2_group_brightness16": { - "units": "Percent" - }, - "bw2_group_brightness15": { - "units": "Percent" - }, - "bw6_group_brightness10": { - "units": "Percent" - }, - "bw2_group_brightness14": { - "units": "Percent" - }, - "bw6_group_brightness11": { - "units": "Percent" - }, - "bw2_group_brightness13": { - "units": "Percent" - }, - "bw2_group_brightness12": { - "units": "Percent" - }, - "bw2_group_brightness11": { - "units": "Percent" - }, - "bw2_group_brightness10": { - "units": "Percent" - }, - "bw5_group_brightness1": { - "units": "Percent" - }, - "bw5_group_brightness3": { - "units": "Percent" - }, - "bw5_group_brightness2": { - "units": "Percent" - }, - "bw5_group_brightness5": { - "units": "Percent" - }, - "bw5_group_brightness4": { - "units": "Percent" - }, - "bw5_group_brightness7": { - "units": "Percent" - }, - "bw5_group_brightness6": { - "units": "Percent" - }, - "bw5_group_brightness9": { - "units": "Percent" - }, - "bw5_group_brightness8": { - "units": "Percent" - }, - "bw4_light_level16": { - "units": "Luxes" - }, - "bw3_occupancy3": { - "units": "No-units" - }, - "bw3_occupancy2": { - "units": "No-units" - }, - "bw3_occupancy1": { - "units": "No-units" - }, - "bw5_light_level10": { - "units": "Luxes" - }, - "bw5_light_level11": { - "units": "Luxes" - }, - "bw5_light_level12": { - "units": "Luxes" - }, - "bw5_light_level13": { - "units": "Luxes" - }, - "bw5_light_level14": { - "units": "Luxes" - }, - "bw3_occupancy9": { - "units": "No-units" - }, - "bw3_occupancy8": { - "units": "No-units" - }, - "bw3_occupancy7": { - "units": "No-units" - }, - "bw3_occupancy6": { - "units": "No-units" - }, - "bw3_occupancy5": { - "units": "No-units" - }, - "bw3_occupancy4": { - "units": "No-units" - }, - "bw4_light_level12": { - "units": "Luxes" - }, - "bw4_light_level13": { - "units": "Luxes" - }, - "bw4_light_level14": { - "units": "Luxes" - }, - "bw4_light_level15": { - "units": "Luxes" - }, - "bw4_light_level10": { - "units": "Luxes" - }, - "bw4_light_level11": { - "units": "Luxes" - }, - "bw2_group_brightness5": { - "units": "Percent" - }, - "bw2_group_brightness6": { - "units": "Percent" - }, - "bw2_group_brightness3": { - "units": "Percent" - }, - "bw2_group_brightness4": { - "units": "Percent" - }, - "bw6_lamp_brightness1": { - "units": "Percent" - }, - "bw2_group_brightness9": { - "units": "Percent" - }, - "bw2_group_brightness7": { - "units": "Percent" - }, - "bw2_group_brightness8": { - "units": "Percent" - }, - "bw3_light_level10": { - "units": "Luxes" - }, - "bw6_lamp_brightness6": { - "units": "Percent" - }, - "bw6_lamp_brightness7": { - "units": "Percent" - }, - "bw3_light_level12": { - "units": "Luxes" - }, - "bw6_lamp_brightness8": { - "units": "Percent" - }, - "bw3_light_level11": { - "units": "Luxes" - }, - "bw6_lamp_brightness9": { - "units": "Percent" - }, - "bw3_light_level14": { - "units": "Luxes" - }, - "bw6_lamp_brightness2": { - "units": "Percent" - }, - "bw3_light_level13": { - "units": "Luxes" - }, - "bw6_lamp_brightness3": { - "units": "Percent" - }, - "bw3_light_level16": { - "units": "Luxes" - }, - "bw6_lamp_brightness4": { - "units": "Percent" - }, - "bw3_light_level15": { - "units": "Luxes" - }, - "bw6_lamp_brightness5": { - "units": "Percent" - }, - "bw2_lamp_brightness18": { - "units": "Percent" - }, - "bw2_lamp_brightness17": { - "units": "Percent" - }, - "bw2_lamp_brightness19": { - "units": "Percent" - }, - "bw2_lamp_brightness14": { - "units": "Percent" - }, - "bw2_lamp_brightness13": { - "units": "Percent" - }, - "bw2_lamp_brightness16": { - "units": "Percent" - }, - "bw2_lamp_brightness15": { - "units": "Percent" - }, - "bw2_lamp_brightness10": { - "units": "Percent" - }, - "bw2_lamp_brightness12": { - "units": "Percent" - }, - "bw2_lamp_brightness11": { - "units": "Percent" - }, - "bw2_lamp_brightness7": { - "units": "Percent" - }, - "bw2_lamp_brightness6": { - "units": "Percent" - }, - "bw2_lamp_brightness9": { - "units": "Percent" - }, - "bw2_lamp_brightness8": { - "units": "Percent" - }, - "bw3_lamp_brightness11": { - "units": "Percent" - }, - "bw3_lamp_brightness10": { - "units": "Percent" - }, - "bw3_lamp_brightness19": { - "units": "Percent" - }, - "bw3_lamp_brightness18": { - "units": "Percent" - }, - "bw3_lamp_brightness17": { - "units": "Percent" - }, - "bw3_lamp_brightness16": { - "units": "Percent" - }, - "bw2_group_brightness1": { - "units": "Percent" - }, - "bw3_lamp_brightness15": { - "units": "Percent" - }, - "bw6_light_level13": { - "units": "Luxes" - }, - "bw2_group_brightness2": { - "units": "Percent" - }, - "bw3_lamp_brightness14": { - "units": "Percent" - }, - "bw6_light_level12": { - "units": "Luxes" - }, - "bw3_lamp_brightness13": { - "units": "Percent" - }, - "bw6_light_level11": { - "units": "Luxes" - }, - "bw3_lamp_brightness12": { - "units": "Percent" - }, - "bw6_light_level10": { - "units": "Luxes" - }, - "bw2_occupancy2": { - "units": "No-units" - }, - "bw2_occupancy1": { - "units": "No-units" - }, - "bw4_lamp_brightness10": { - "units": "Percent" - }, - "bw2_occupancy8": { - "units": "No-units" - }, - "bw4_lamp_brightness13": { - "units": "Percent" - }, - "bw2_occupancy7": { - "units": "No-units" - }, - "bw4_lamp_brightness14": { - "units": "Percent" - }, - "bw4_lamp_brightness11": { - "units": "Percent" - }, - "bw2_occupancy9": { - "units": "No-units" - }, - "bw4_lamp_brightness12": { - "units": "Percent" - }, - "bw2_occupancy4": { - "units": "No-units" - }, - "bw4_lamp_brightness17": { - "units": "Percent" - }, - "bw2_occupancy3": { - "units": "No-units" - }, - "bw4_lamp_brightness18": { - "units": "Percent" - }, - "bw2_occupancy6": { - "units": "No-units" - }, - "bw4_lamp_brightness15": { - "units": "Percent" - }, - "bw2_occupancy5": { - "units": "No-units" - }, - "bw4_lamp_brightness16": { - "units": "Percent" - }, - "bw2_lamp_brightness3": { - "units": "Percent" - }, - "bw2_lamp_brightness2": { - "units": "Percent" - }, - "bw2_lamp_brightness5": { - "units": "Percent" - }, - "bw2_lamp_brightness4": { - "units": "Percent" - }, - "bw3_group_brightness1": { - "units": "Percent" - }, - "bw2_lamp_brightness1": { - "units": "Percent" - }, - "bw3_group_brightness6": { - "units": "Percent" - }, - "bw5_light_level15": { - "units": "Luxes" - }, - "bw3_group_brightness7": { - "units": "Percent" - }, - "bw5_light_level16": { - "units": "Luxes" - }, - "bw3_group_brightness8": { - "units": "Percent" - }, - "bw3_group_brightness9": { - "units": "Percent" - }, - "bw3_group_brightness2": { - "units": "Percent" - }, - "bw3_group_brightness3": { - "units": "Percent" - }, - "bw3_group_brightness4": { - "units": "Percent" - }, - "bw3_group_brightness5": { - "units": "Percent" - }, - "bw3_lamp_brightness33": { - "units": "Percent" - }, - "bw3_lamp_brightness32": { - "units": "Percent" - }, - "bw3_lamp_brightness31": { - "units": "Percent" - }, - "bw3_lamp_brightness30": { - "units": "Percent" - }, - "bw3_lamp_brightness39": { - "units": "Percent" - }, - "bw3_lamp_brightness38": { - "units": "Percent" - }, - "bw3_lamp_brightness37": { - "units": "Percent" - }, - "bw3_lamp_brightness36": { - "units": "Percent" - }, - "bw3_lamp_brightness35": { - "units": "Percent" - }, - "bw3_lamp_brightness34": { - "units": "Percent" - }, - "bw4_occupancy12": { - "units": "No-units" - }, - "bw5_occupancy14": { - "units": "No-units" - }, - "bw4_occupancy13": { - "units": "No-units" - }, - "bw5_occupancy13": { - "units": "No-units" - }, - "bw6_light_level16": { - "units": "Luxes" - }, - "bw4_occupancy10": { - "units": "No-units" - }, - "bw5_occupancy16": { - "units": "No-units" - }, - "bw6_light_level15": { - "units": "Luxes" - }, - "bw4_occupancy11": { - "units": "No-units" - }, - "bw5_occupancy15": { - "units": "No-units" - }, - "bw6_light_level14": { - "units": "Luxes" - }, - "bw1_occupancy14": { - "units": "No-units" - }, - "bw1_occupancy13": { - "units": "No-units" - }, - "bw1_occupancy16": { - "units": "No-units" - }, - "bw1_occupancy15": { - "units": "No-units" - }, - "bw1_occupancy10": { - "units": "No-units" - }, - "bw1_occupancy12": { - "units": "No-units" - }, - "bw1_occupancy11": { - "units": "No-units" - }, - "bw3_lamp_brightness22": { - "units": "Percent" - }, - "bw3_lamp_brightness21": { - "units": "Percent" - }, - "bw3_lamp_brightness20": { - "units": "Percent" - }, - "bw3_lamp_brightness29": { - "units": "Percent" - }, - "bw3_lamp_brightness28": { - "units": "Percent" - }, - "bw3_lamp_brightness27": { - "units": "Percent" - }, - "bw3_lamp_brightness26": { - "units": "Percent" - }, - "bw4_occupancy16": { - "units": "No-units" - }, - "bw5_occupancy10": { - "units": "No-units" - }, - "bw3_lamp_brightness25": { - "units": "Percent" - }, - "bw3_lamp_brightness24": { - "units": "Percent" - }, - "bw4_occupancy14": { - "units": "No-units" - }, - "bw5_occupancy12": { - "units": "No-units" - }, - "bw3_lamp_brightness23": { - "units": "Percent" - }, - "bw4_occupancy15": { - "units": "No-units" - }, - "bw5_occupancy11": { - "units": "No-units" - }, - "bw4_lamp_brightness42": { - "units": "Percent" - }, - "bw4_lamp_brightness43": { - "units": "Percent" - }, - "bw4_lamp_brightness40": { - "units": "Percent" - }, - "bw4_lamp_brightness41": { - "units": "Percent" - }, - "bw4_lamp_brightness46": { - "units": "Percent" - }, - "bw4_lamp_brightness47": { - "units": "Percent" - }, - "bw4_lamp_brightness44": { - "units": "Percent" - }, - "bw4_lamp_brightness45": { - "units": "Percent" - }, - "bw4_lamp_brightness48": { - "units": "Percent" - }, - "bw4_lamp_brightness49": { - "units": "Percent" - }, - "bw5_lamp_brightness3": { - "units": "Percent" - }, - "bw5_lamp_brightness4": { - "units": "Percent" - }, - "bw5_lamp_brightness1": { - "units": "Percent" - }, - "bw5_lamp_brightness2": { - "units": "Percent" - }, - "bw5_lamp_brightness7": { - "units": "Percent" - }, - "bw5_lamp_brightness8": { - "units": "Percent" - }, - "bw5_lamp_brightness5": { - "units": "Percent" - }, - "bw5_lamp_brightness6": { - "units": "Percent" - }, - "bw4_lamp_brightness50": { - "units": "Percent" - }, - "bw4_lamp_brightness53": { - "units": "Percent" - }, - "bw4_lamp_brightness54": { - "units": "Percent" - }, - "bw4_lamp_brightness51": { - "units": "Percent" - }, - "bw4_lamp_brightness52": { - "units": "Percent" - }, - "bw4_lamp_brightness57": { - "units": "Percent" - }, - "bw4_lamp_brightness58": { - "units": "Percent" - }, - "bw4_lamp_brightness55": { - "units": "Percent" - }, - "bw4_lamp_brightness56": { - "units": "Percent" - }, - "bw4_lamp_brightness59": { - "units": "Percent" - }, - "bw4_group_brightness8": { - "units": "Percent" - }, - "bw4_group_brightness7": { - "units": "Percent" - }, - "bw4_group_brightness6": { - "units": "Percent" - }, - "bw4_group_brightness5": { - "units": "Percent" - }, - "bw4_group_brightness4": { - "units": "Percent" - }, - "bw4_group_brightness3": { - "units": "Percent" - }, - "bw4_group_brightness2": { - "units": "Percent" - }, - "bw4_group_brightness1": { - "units": "Percent" - }, - "bw4_group_brightness9": { - "units": "Percent" - }, - "bw4_lamp_brightness20": { - "units": "Percent" - }, - "bw4_lamp_brightness21": { - "units": "Percent" - }, - "bw4_lamp_brightness24": { - "units": "Percent" - }, - "bw4_lamp_brightness25": { - "units": "Percent" - }, - "bw4_lamp_brightness22": { - "units": "Percent" - }, - "bw4_lamp_brightness23": { - "units": "Percent" - }, - "bw4_lamp_brightness28": { - "units": "Percent" - }, - "bw4_lamp_brightness29": { - "units": "Percent" - }, - "bw4_lamp_brightness26": { - "units": "Percent" - }, - "bw4_lamp_brightness27": { - "units": "Percent" - }, - "bw4_lamp_brightness19": { - "units": "Percent" - }, - "bw5_light_level2": { - "units": "Luxes" - }, - "bw5_light_level3": { - "units": "Luxes" - }, - "bw5_light_level1": { - "units": "Luxes" - }, - "bw5_light_level6": { - "units": "Luxes" - }, - "bw5_light_level7": { - "units": "Luxes" - }, - "bw5_light_level4": { - "units": "Luxes" - }, - "bw5_light_level5": { - "units": "Luxes" - }, - "bw5_light_level8": { - "units": "Luxes" - }, - "bw5_light_level9": { - "units": "Luxes" - }, - "bw4_lamp_brightness31": { - "units": "Percent" - }, - "bw4_lamp_brightness32": { - "units": "Percent" - }, - "bw4_lamp_brightness30": { - "units": "Percent" - }, - "bw4_lamp_brightness35": { - "units": "Percent" - }, - "bw4_lamp_brightness36": { - "units": "Percent" - }, - "bw4_lamp_brightness33": { - "units": "Percent" - }, - "bw4_lamp_brightness34": { - "units": "Percent" - }, - "bw4_lamp_brightness39": { - "units": "Percent" - }, - "bw4_lamp_brightness37": { - "units": "Percent" - }, - "bw4_lamp_brightness38": { - "units": "Percent" - } - } - }, - "version": 1, - "timestamp": "2020-03-05T14:42:59.743Z" -} diff --git a/schemas/udmi/metadata.tests/toomany.out b/schemas/udmi/metadata.tests/toomany.out deleted file mode 100644 index c4e33ac250..0000000000 --- a/schemas/udmi/metadata.tests/toomany.out +++ /dev/null @@ -1,6 +0,0 @@ -Validating 1 schemas - Validating 1 files against metadata.json - Against input metadata.tests/toomany.json - #: 2 schema violations found - #/pointset/points: maximum size: [150], found: [672] - #/system/physical_tag/asset/name: string [UK-LON-S2_LTGW-3] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$ diff --git a/schemas/udmi/metadata_cloud.json b/schemas/udmi/metadata_cloud.json deleted file mode 100644 index 5ca41285a7..0000000000 --- a/schemas/udmi/metadata_cloud.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "title": "Cloud configuration metadata snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "auth_type": { - "enum": [ - "RS256", - "RS256_X506" - ] - }, - "is_gateway": { - "type": "boolean" - } - }, - "required": [ - "auth_type" - ] -} diff --git a/schemas/udmi/metadata_gateway.json b/schemas/udmi/metadata_gateway.json deleted file mode 100644 index f15dee7344..0000000000 --- a/schemas/udmi/metadata_gateway.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "title": "Gateway metadata snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "gateway_id": { - "type": "string", - "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$" - }, - "subsystem": { - "type": "string", - "pattern": "^[a-z0-9-]+$" - }, - "proxy_ids": { - "type": "array", - "items": { - "type": "string", - "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$" - } - } - }, - "oneOf": [ - { "required": ["gateway_id"] }, - { "required": ["proxy_ids"] } - ] -} diff --git a/schemas/udmi/metadata_localnet.json b/schemas/udmi/metadata_localnet.json deleted file mode 100644 index be7de102b5..0000000000 --- a/schemas/udmi/metadata_localnet.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "title": "Local network metadata snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "subsystem": { - "type": "object", - "patternProperties": { - "^[a-z0-9-]+$": { - "additionalProperties": false, - "properties": { - "local_id": { - "type": "string" - } - }, - "required": [ - "local_id" - ] - } - } - } - }, - "required": [ - "subsystem" - ] -} diff --git a/schemas/udmi/metadata_pointset.json b/schemas/udmi/metadata_pointset.json deleted file mode 100644 index 4d6ac05dce..0000000000 --- a/schemas/udmi/metadata_pointset.json +++ /dev/null @@ -1,28 +0,0 @@ -{ - "title": "Pointset metadata snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "points": { - "additionalProperties": false, - "maxProperties": 150, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "additionalProperties": false, - "properties": { - "units": { - "$ref": "file:units.json#" - }, - "ref": { - "type": "string" - } - } - } - } - } - }, - "required": [ - "points" - ] -} diff --git a/schemas/udmi/metadata_system.json b/schemas/udmi/metadata_system.json deleted file mode 100644 index f1d48da2fd..0000000000 --- a/schemas/udmi/metadata_system.json +++ /dev/null @@ -1,86 +0,0 @@ -{ - "title": "System metadata snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "location": { - "type": "object", - "additionalProperties": false, - "properties": { - "site": { - "type": "string", - "pattern": "^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$" - }, - "section": { - "type": "string", - "pattern": "^[A-Z0-9-]+$" - }, - "position": { - "type": "object", - "additionalProperties": false, - "properties": { - "x": { - "type": "number" - }, - "y": { - "type": "number" - } - }, - "required": [ - "x", - "y" - ] - } - }, - "required": [ - "site" - ] - }, - "physical_tag": { - "type": "object", - "additionalProperties": false, - "properties": { - "asset": { - "type": "object", - "additionalProperties": false, - "properties": { - "guid": { - "type": "string", - "pattern": "^[a-z]+://[-0-9a-zA-Z_$]+$" - }, - "site": { - "type": "string", - "pattern": "^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$" - }, - "name": { - "type": "string", - "pattern": "^[A-Z]{2,6}-[0-9]{1,6}$" - } - }, - "required": [ - "guid", - "name" - ] - } - }, - "required": [ - "asset" - ] - }, - "aux": { - "type": "object", - "additionalProperties": false, - "properties": { - "suffix": { - "type": "string", - "pattern": "^[a-zA-Z0-9-]+$" - } - } - } - }, - "required": [ - "location", - "physical_tag" - ] -} diff --git a/schemas/udmi/pointset.json b/schemas/udmi/pointset.json deleted file mode 100644 index b18d31eda8..0000000000 --- a/schemas/udmi/pointset.json +++ /dev/null @@ -1,47 +0,0 @@ -{ - "title": "Pointset telemetry schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "points": { - "additionalProperties": false, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "$ref": "#/definitions/point_property_names" - } - } - } - }, - "required": [ - "timestamp", - "version", - "points" - ], - "definitions": { - "point_property_names": { - "type": "object", - "propertyNames": { - "oneOf": [ - { - "enum": [ - "present_value" - ] - } - ] - }, - "required": [ - "present_value" - ] - } - } -} diff --git a/schemas/udmi/pointset.tests/empty.json b/schemas/udmi/pointset.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/pointset.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/pointset.tests/empty.out b/schemas/udmi/pointset.tests/empty.out deleted file mode 100644 index 5f645ca58a..0000000000 --- a/schemas/udmi/pointset.tests/empty.out +++ /dev/null @@ -1,7 +0,0 @@ -Validating 1 schemas - Validating 1 files against pointset.json - Against input pointset.tests/empty.json - #: 3 schema violations found - #: required key [points] not found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/pointset.tests/errors.json b/schemas/udmi/pointset.tests/errors.json deleted file mode 100644 index 321ea7c4a5..0000000000 --- a/schemas/udmi/pointset.tests/errors.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "id": "sneakyCASE", - "comment$string": "world", - "properties": { - "$comment": "Common error cases for target telemetry." - }, - "points": { - "comment$string": "world", - "analogValue_1": { - "present_value": true - }, - "bad_entity_name_": { - "present_value": 21.30108642578125 - }, - "guid": "ab9402fa-2c5a-42d1-b4f3-d40b440dea13", - "yoyo_motion_sensor": { - "bad_property_name": true - }, - "bad_____sensor": { - "present_value": true - }, - "missing_present_value": { - }, - "old_properties": { - "properties": { - "present_value": true - } - }, - "magic_voice_recognizer": { - "present_value": { - "present_value": true - } - } - } -} diff --git a/schemas/udmi/pointset.tests/errors.out b/schemas/udmi/pointset.tests/errors.out deleted file mode 100644 index e973a0e733..0000000000 --- a/schemas/udmi/pointset.tests/errors.out +++ /dev/null @@ -1,22 +0,0 @@ -Validating 1 schemas - Validating 1 files against pointset.json - Against input pointset.tests/errors.json - #: 13 schema violations found - #/points: 10 schema violations found - #/points/guid: expected type: JSONObject, found: String - #/points/missing_present_value: required key [present_value] not found - #/points/old_properties: 2 schema violations found - #/points/old_properties/properties: #: 0 subschemas matched instead of one - #/points/old_properties/properties: properties is not a valid enum value - #/points/old_properties: required key [present_value] not found - #/points/yoyo_motion_sensor: 2 schema violations found - #/points/yoyo_motion_sensor/bad_property_name: #: 0 subschemas matched instead of one - #/points/yoyo_motion_sensor/bad_property_name: bad_property_name is not a valid enum value - #/points/yoyo_motion_sensor: required key [present_value] not found - #/points: extraneous key [analogValue_1] is not permitted - #/points: extraneous key [bad_____sensor] is not permitted - #/points: extraneous key [bad_entity_name_] is not permitted - #/points: extraneous key [comment$string] is not permitted - #: extraneous key [comment$string] is not permitted - #: extraneous key [id] is not permitted - #: extraneous key [properties] is not permitted diff --git a/schemas/udmi/pointset.tests/example.json b/schemas/udmi/pointset.tests/example.json deleted file mode 100644 index 7af10743e5..0000000000 --- a/schemas/udmi/pointset.tests/example.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "points": { - "reading_value": { - "present_value": 21.30108642578125 - }, - "nexus_sensor": { - "present_value": 21.1 - }, - "yoyo_motion_sensor": { - "present_value": true - }, - "enum_value": { - "present_value": "hello" - } - } -} diff --git a/schemas/udmi/pointset.tests/example.out b/schemas/udmi/pointset.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/pointset.tests/fcu.json b/schemas/udmi/pointset.tests/fcu.json deleted file mode 100644 index 9b45dfbfe6..0000000000 --- a/schemas/udmi/pointset.tests/fcu.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "timestamp": "2019-01-17T14:02:29.364Z", - "points": { - "space_temperature_sensor": { - "present_value": 21.30108642578125 - }, - "fan_run_status": { - "present_value": true - }, - "fan_run_enable": { - "present_value": false - }, - "chilled_water_valve_percentage_command": { - "present_value": 76 - } - } -} diff --git a/schemas/udmi/pointset.tests/fcu.out b/schemas/udmi/pointset.tests/fcu.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/pointset.tests/smartprimus.json b/schemas/udmi/pointset.tests/smartprimus.json deleted file mode 100644 index 9b45dfbfe6..0000000000 --- a/schemas/udmi/pointset.tests/smartprimus.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": 1, - "timestamp": "2019-01-17T14:02:29.364Z", - "points": { - "space_temperature_sensor": { - "present_value": 21.30108642578125 - }, - "fan_run_status": { - "present_value": true - }, - "fan_run_enable": { - "present_value": false - }, - "chilled_water_valve_percentage_command": { - "present_value": 76 - } - } -} diff --git a/schemas/udmi/pointset.tests/smartprimus.out b/schemas/udmi/pointset.tests/smartprimus.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/properties.json b/schemas/udmi/properties.json deleted file mode 100644 index 8b4375c07a..0000000000 --- a/schemas/udmi/properties.json +++ /dev/null @@ -1,29 +0,0 @@ -{ - "title": "Device Properties Schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "key_type", - "version", - "connect" - ], - "properties": { - "key_type": { - "enum": [ - "RSA_PEM", - "RSA_X509_PEM" - ] - }, - "version": { - "enum": [ - 1 - ] - }, - "connect": { - "enum": [ - "direct" - ] - } - } -} diff --git a/schemas/udmi/state.json b/schemas/udmi/state.json deleted file mode 100644 index 117881e3cb..0000000000 --- a/schemas/udmi/state.json +++ /dev/null @@ -1,31 +0,0 @@ -{ - "title": "Device State schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "timestamp", - "version", - "system" - ], - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "system": { - "$ref": "file:state_system.json#" - }, - "gateway": { - "$ref": "file:state_gateway.json#" - }, - "pointset": { - "$ref": "file:state_pointset.json#" - } - } -} diff --git a/schemas/udmi/state.tests/empty.json b/schemas/udmi/state.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/state.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/state.tests/empty.out b/schemas/udmi/state.tests/empty.out deleted file mode 100644 index 09d7bf88cf..0000000000 --- a/schemas/udmi/state.tests/empty.out +++ /dev/null @@ -1,7 +0,0 @@ -Validating 1 schemas - Validating 1 files against state.json - Against input state.tests/empty.json - #: 3 schema violations found - #: required key [system] not found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/state.tests/errors.json b/schemas/udmi/state.tests/errors.json deleted file mode 100644 index 5977f68e90..0000000000 --- a/schemas/udmi/state.tests/errors.json +++ /dev/null @@ -1,43 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "id": "monkey_brains", - "system": { - "device_id": "33895507", - "device_status": "ok", - "object_name": "UK-BRH-XX_AHU-001", - "fling": "hello", - "firmware": { - "revision": "should be version" - }, - "system_status": "Operational", - "statuses": { - "default": { - "timestamp": "2018-08-26T21:39:30.364Z", - "level": 30 - } - }, - "status": "hunky-dory" - }, - "status": [ - { - "level": 30 - } - ], - "pointset": { - "points": { - "return_air_temperature_sensor": { - "object_type": "analog_input", - "instance_number": 4, - "cov_increment": 0.300000011920929, - "deadband": 0, - "rapt": "hello", - "high_limit": 0, - "low_limit": 0, - "resolution": 0.04952822998166084, - "units": "Degrees Celsius", - "status": "it's working!" - } - } - } -} diff --git a/schemas/udmi/state.tests/errors.out b/schemas/udmi/state.tests/errors.out deleted file mode 100644 index f58e247fc5..0000000000 --- a/schemas/udmi/state.tests/errors.out +++ /dev/null @@ -1,32 +0,0 @@ -Validating 1 schemas - Validating 1 files against state.json - Against input state.tests/errors.json - #: 24 schema violations found - #/pointset/points/return_air_temperature_sensor: 9 schema violations found - #/pointset/points/return_air_temperature_sensor/status: expected type: JSONObject, found: String - #/pointset/points/return_air_temperature_sensor: extraneous key [cov_increment] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [deadband] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [high_limit] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [instance_number] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [low_limit] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [object_type] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [rapt] is not permitted - #/pointset/points/return_air_temperature_sensor: extraneous key [resolution] is not permitted - #/system: 13 schema violations found - #/system/firmware: 2 schema violations found - #/system/firmware: extraneous key [revision] is not permitted - #/system/firmware: required key [version] not found - #/system/statuses/default: 3 schema violations found - #/system/statuses/default/level: 30 is not greater or equal to 100 - #/system/statuses/default: required key [category] not found - #/system/statuses/default: required key [message] not found - #/system: extraneous key [device_id] is not permitted - #/system: extraneous key [device_status] is not permitted - #/system: extraneous key [fling] is not permitted - #/system: extraneous key [object_name] is not permitted - #/system: extraneous key [status] is not permitted - #/system: extraneous key [system_status] is not permitted - #/system: required key [make_model] not found - #/system: required key [operational] not found - #: extraneous key [id] is not permitted - #: extraneous key [status] is not permitted diff --git a/schemas/udmi/state.tests/example.json b/schemas/udmi/state.tests/example.json deleted file mode 100644 index 92044b86a6..0000000000 --- a/schemas/udmi/state.tests/example.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "make_model": "ACME Bird Trap", - "firmware": { - "version": "3.2a" - }, - "last_config": "2018-08-26T21:49:29.364Z", - "operational": true, - "statuses": { - "base_system": { - "message": "Tickity Boo", - "category": "device.state.com", - "timestamp": "2018-08-26T21:39:30.364Z", - "level": 600 - } - } - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "units": "Celsius", - "status": { - "message": "Invalid sample time", - "category": "device.config.validate", - "timestamp": "2018-08-26T21:39:28.364Z", - "level": 800 - } - }, - "nexus_sensor": { - "units": "Celsius", - "source": "fix" - } - } - } -} diff --git a/schemas/udmi/state.tests/example.out b/schemas/udmi/state.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/state.tests/fcu.json b/schemas/udmi/state.tests/fcu.json deleted file mode 100644 index b2a55220e8..0000000000 --- a/schemas/udmi/state.tests/fcu.json +++ /dev/null @@ -1,52 +0,0 @@ -{ - "version": 1, - "timestamp": "2019-01-17T14:02:29.364Z", - "system": { - "make_model": "EasyIO FW-14", - "firmware": { - "version": "3.2a" - }, - "last_config": "2019-01-14T21:49:29.364Z", - "operational": true, - "statuses": { - "base_system": { - "message": "Time on the device is not synchronized", - "category": "com.acme.sync", - "timestamp": "2019-01-17T13:29:47.364Z", - "level": 600 - } - } - }, - "pointset": { - "points": { - "space_temperature_sensor": { - "units": "Degrees-Celsius", - "status": { - "message": "Present value out of limits", - "category": "com.acme.device.regulator", - "timestamp": "2019-01-17T11:39:28.364Z", - "level": 400 - } - }, - "fan_run_status": { - "status": { - "message": "Value overridden by fix_value", - "category": "com.acme.device.monitor", - "timestamp": "2019-01-17T10:59:11.364Z", - "level": 300 - } - }, - "fan_run_enable": { - "status": { - "message": "Value overridden by fix_value", - "category": "com.acme.device.manager", - "timestamp": "2019-01-17T13:14:55.364Z", - "level": 300 - } - }, - "chilled_water_valve_percentage_command": { - "units": "Percent" - } - } - } -} diff --git a/schemas/udmi/state.tests/fcu.out b/schemas/udmi/state.tests/fcu.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/state.tests/gateway.json b/schemas/udmi/state.tests/gateway.json deleted file mode 100644 index 72bbdbc4bb..0000000000 --- a/schemas/udmi/state.tests/gateway.json +++ /dev/null @@ -1,23 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "make_model": "ACME Gateway v2", - "firmware": { - "version": "3.2a" - }, - "last_config": "2018-08-26T21:49:29.364Z", - "operational": true, - "statuses": { - "base_system": { - "message": "Tickity Boo", - "category": "device.state.com", - "timestamp": "2018-08-26T21:39:30.364Z", - "level": 600 - } - } - }, - "gateway": { - "error_ids": [ "991", "SMS-91" ] - } -} diff --git a/schemas/udmi/state.tests/gateway.out b/schemas/udmi/state.tests/gateway.out deleted file mode 100644 index ae5507e2c2..0000000000 --- a/schemas/udmi/state.tests/gateway.out +++ /dev/null @@ -1,4 +0,0 @@ -Validating 1 schemas - Validating 1 files against state.json - Against input state.tests/gateway.json - #/gateway/error_ids/0: string [991] does not match pattern ^[A-Z]{3}-[1-9][0-9]{0,2}$ diff --git a/schemas/udmi/state.tests/rotate.json b/schemas/udmi/state.tests/rotate.json deleted file mode 100644 index 9c77e543ea..0000000000 --- a/schemas/udmi/state.tests/rotate.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "system": { - "make_model": "ACME Bird Trap", - "firmware": { - "version": "3.2a" - }, - "auth_key": { - "private_hash": "sha512:4e61746f21abe6708ca81a45a1851b82efd1f3ad7f9e6f6fc2dcf431e0ff95cdbcc6f5940a4bfb77df7aeb2f057d19cf5f234a664775edc66175025a14a87c3b" - }, - "last_config": "2018-08-26T21:49:29.364Z", - "operational": true, - "statuses": { - "base_system": { - "message": "Tickity Boo", - "category": "device.state.com", - "timestamp": "2018-08-26T21:39:30.364Z", - "level": 600 - } - } - }, - "pointset": { - "points": { - "return_air_temperature_sensor": { - "units": "Celsius", - "status": { - "message": "Invalid sample time", - "category": "device.config.validate", - "timestamp": "2018-08-26T21:39:28.364Z", - "level": 800 - } - } - } - } -} - diff --git a/schemas/udmi/state.tests/rotate.out b/schemas/udmi/state.tests/rotate.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/state_gateway.json b/schemas/udmi/state_gateway.json deleted file mode 100644 index 45dfd6f35c..0000000000 --- a/schemas/udmi/state_gateway.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "title": "Gateway Config Snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "required": [ - "error_ids" - ], - "properties": { - "error_ids": { - "type": "array", - "items": { - "type": "string", - "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$" - } - } - } -} diff --git a/schemas/udmi/state_pointset.json b/schemas/udmi/state_pointset.json deleted file mode 100644 index b6d44061bd..0000000000 --- a/schemas/udmi/state_pointset.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "title": "pointset state snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "points": { - "additionalProperties": false, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "additionalProperties": false, - "properties": { - "fault": { - "type": "boolean" - }, - "units": { - "type": "string", - }, - "source": { - "enum": [ - "fix" - ] - }, - "status": { - "$ref": "file:system.json#/definitions/entry" - } - } - } - } - } - }, - "required": [ - "points" - ] -} diff --git a/schemas/udmi/state_system.json b/schemas/udmi/state_system.json deleted file mode 100644 index 5fb326ac93..0000000000 --- a/schemas/udmi/state_system.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "title": "System state snippet", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "make_model": { - "type": "string" - }, - "auth_key": { - "type": "object", - "additionalProperties": false, - "properties": { - "private_hash": { - "type": "string" - } - }, - "required": [ - "private_hash" - ] - }, - "firmware": { - "type": "object", - "additionalProperties": false, - "properties": { - "version": { - "type": "string" - } - }, - "required": [ - "version" - ] - }, - "last_config": { - "type": "string", - "format": "date-time" - }, - "operational": { - "type": "boolean" - }, - "statuses": { - "type": "object", - "additionalProperties": false, - "patternProperties": { - "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": { - "$ref": "file:system.json#/definitions/entry" - } - } - } - }, - "required": [ - "make_model", - "firmware", - "operational" - ] -} diff --git a/schemas/udmi/system.json b/schemas/udmi/system.json deleted file mode 100644 index ca3d33d2d4..0000000000 --- a/schemas/udmi/system.json +++ /dev/null @@ -1,62 +0,0 @@ -{ - "title": "Log entry schema", - "type": "object", - "$schema": "http://json-schema.org/draft-07/schema#", - "additionalProperties": false, - "properties": { - "timestamp": { - "type": "string", - "format": "date-time" - }, - "version": { - "enum": [ - 1 - ] - }, - "logentries": { - "type": "array", - "items": { - "$ref": "#/definitions/entry" - } - } - }, - "required": [ - "timestamp", - "version" - ], - "definitions": { - "entry": { - "type": "object", - "additionalProperties": false, - "properties": { - "message": { - "type": "string" - }, - "detail": { - "type": "string" - }, - "category": { - "type": "string", - "pattern": "^[a-z][.a-zA-Z]*[a-zA-Z]$" - }, - "timestamp": { - "type": "string", - "format": "date-time" - }, - "level": { - "$comment": "https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity", - "type": "integer", - "multipleOf": 1, - "minimum": 100, - "maximum": 800 - } - }, - "required": [ - "message", - "category", - "timestamp", - "level" - ] - } - } -} diff --git a/schemas/udmi/system.tests/empty.json b/schemas/udmi/system.tests/empty.json deleted file mode 100644 index 2c63c08510..0000000000 --- a/schemas/udmi/system.tests/empty.json +++ /dev/null @@ -1,2 +0,0 @@ -{ -} diff --git a/schemas/udmi/system.tests/empty.out b/schemas/udmi/system.tests/empty.out deleted file mode 100644 index ad7bdffef5..0000000000 --- a/schemas/udmi/system.tests/empty.out +++ /dev/null @@ -1,6 +0,0 @@ -Validating 1 schemas - Validating 1 files against system.json - Against input system.tests/empty.json - #: 2 schema violations found - #: required key [timestamp] not found - #: required key [version] not found diff --git a/schemas/udmi/system.tests/errors.json b/schemas/udmi/system.tests/errors.json deleted file mode 100644 index ddf4c70815..0000000000 --- a/schemas/udmi/system.tests/errors.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "logentries": [ - { - "detail": "someplace, sometime", - "category": "com.testCategory$", - "level": 60 - }, - "nope" - ] -} diff --git a/schemas/udmi/system.tests/errors.out b/schemas/udmi/system.tests/errors.out deleted file mode 100644 index 3479a7a113..0000000000 --- a/schemas/udmi/system.tests/errors.out +++ /dev/null @@ -1,10 +0,0 @@ -Validating 1 schemas - Validating 1 files against system.json - Against input system.tests/errors.json - #/logentries: 5 schema violations found - #/logentries/0: 4 schema violations found - #/logentries/0/category: string [com.testCategory$] does not match pattern ^[a-z][.a-zA-Z]*[a-zA-Z]$ - #/logentries/0/level: 60 is not greater or equal to 100 - #/logentries/0: required key [message] not found - #/logentries/0: required key [timestamp] not found - #/logentries/1: expected type: JSONObject, found: String diff --git a/schemas/udmi/system.tests/example.json b/schemas/udmi/system.tests/example.json deleted file mode 100644 index 139253f721..0000000000 --- a/schemas/udmi/system.tests/example.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "logentries": [ - { - "message": "things are happening", - "detail": "someplace, sometime", - "timestamp": "2018-08-26T21:39:19.364Z", - "category": "com.testCategory", - "level": 600 - }, - { - "message": "something else happened", - "timestamp": "2018-08-26T21:39:39.364Z", - "detail": "someplace, sometime", - "category": "com.testCategory", - "level": 700 - } - ] -} diff --git a/schemas/udmi/system.tests/example.out b/schemas/udmi/system.tests/example.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/system.tests/fcu.json b/schemas/udmi/system.tests/fcu.json deleted file mode 100644 index ed4a8c7f1b..0000000000 --- a/schemas/udmi/system.tests/fcu.json +++ /dev/null @@ -1,19 +0,0 @@ -{ - "version": 1, - "timestamp": "2018-08-26T21:39:29.364Z", - "logentries": [ - { - "message": "System Booted", - "timestamp": "2018-08-26T20:39:19.364Z", - "category": "com.acme.system", - "level": 300 - }, - { - "message": "Device communication failed", - "detail": "Connection attempt to device 3564 failed", - "timestamp": "2018-08-26T21:39:19.364Z", - "category": "com.acme.comms", - "level": 700 - } - ] -} diff --git a/schemas/udmi/system.tests/fcu.out b/schemas/udmi/system.tests/fcu.out deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/schemas/udmi/units.json b/schemas/udmi/units.json deleted file mode 100644 index 3e375667d1..0000000000 --- a/schemas/udmi/units.json +++ /dev/null @@ -1,194 +0,0 @@ -{ - "description": "Taken from standard BACnet engineering units", - "enum": [ - "Square-meters", - "Square-feet", - "Milliamperes", - "Amperes", - "Ohms", - "Volts", - "Kilo-volts", - "Mega-volts", - "Volt-amperes", - "Kilo-volt-amperes", - "Mega-volt-amperes", - "Volt-amperes-reactive", - "Kilo-volt-amperes-reactive", - "Mega-volt-amperes-reactive", - "Degrees-phase", - "Power-factor", - "Joules", - "Kilojoules", - "Watt-hours", - "Kilowatt-hours", - "BTUs", - "Therms", - "Ton-hours", - "Joules-per-kilogram-dry-air", - "BTUs-per-pound-dry-air", - "Cycles-per-hour", - "Cycles-per-minute", - "Hertz", - "Grams-of-water-per-kilogram-dry-air", - "Percent-relative-humidity", - "Millimeters", - "Meters", - "Inches", - "Feet", - "Watts-per-square-foot", - "Watts-per-square-meter", - "Lumens", - "Luxes", - "Foot-candles", - "Kilograms", - "Pounds-mass", - "Tons", - "Kilograms-per-second", - "Kilograms-per-minute", - "Kilograms-per-hour", - "Pounds-mass-per-minute", - "Pounds-mass-per-hour", - "Watts", - "Kilowatts", - "Megawatts", - "BTUs-per-hour", - "Horsepower", - "Tons-refrigeration", - "Pascals", - "Kilopascals", - "Bars", - "Pounds-force-per-square-inch", - "Centimeters-of-water", - "Inches-of-water", - "Millimeters-of-mercury", - "Centimeters-of-mercury", - "Inches-of-mercury", - "Degrees-Celsius", - "Degrees-Kelvin", - "Degrees-Fahrenheit", - "Degree-days-Celsius", - "Degree-days-Fahrenheit", - "Years", - "Months", - "Weeks", - "Days", - "Hours", - "Minutes", - "Seconds", - "Meters-per-second", - "Kilometers-per-hour", - "Feet-per-second", - "Feet-per-minute", - "Miles-per-hour", - "Cubic-feet", - "Cubic-meters", - "Imperial-gallons", - "Liters", - "Us-gallons", - "Cubic-feet-per-minute", - "Cubic-meters-per-second", - "Imperial-gallons-per-minute", - "Liters-per-second", - "Liters-per-minute", - "Us-gallons-per-minute", - "Degrees-angular", - "Degrees-Celsius-per-hour", - "Degrees-Celsius-per-minute", - "Degrees-Fahrenheit-per-hour", - "Degrees-Fahrenheit-per-minute", - "No-units", - "Parts-per-million", - "Parts-per-billion", - "Percent", - "Percent-per-second", - "Per-minute", - "Per-second", - "Psi-per-Degree-Fahrenheit", - "Radians", - "Revolutions-per-minute", - "Currency1", - "Currency2", - "Currency3", - "Currency4", - "Currency5", - "Currency6", - "Currency7", - "Currency8", - "Currency9", - "Currency10", - "Square-inches", - "Square-centimeters", - "BTUs-per-pound", - "Centimeters", - "Pounds-mass-per-second", - "Delta-Degrees-Fahrenheit", - "Delta-Degrees-Kelvin", - "Kilohms", - "Megohms", - "Millivolts", - "Kilojoules-per-kilogram", - "Megajoules", - "Joules-per-degree-Kelvin", - "Joules-per-kilogram-degree-Kelvin", - "Kilohertz", - "Megahertz", - "Per-hour", - "Milliwatts", - "Hectopascals", - "Millibars", - "Cubic-meters-per-hour", - "Liters-per-hour", - "Kilowatt-hours-per-square-meter", - "Kilowatt-hours-per-square-foot", - "Megajoules-per-square-meter", - "Megajoules-per-square-foot", - "Watts-per-square-meter-Degree-Kelvin", - "Cubic-feet-per-second", - "Percent-obscuration-per-foot", - "Percent-obscuration-per-meter", - "Milliohms", - "Megawatt-hours", - "Kilo-BTUs", - "Mega-BTUs", - "Kilojoules-per-kilogram-dry-air", - "Megajoules-per-kilogram-dry-air", - "Kilojoules-per-degree-Kelvin", - "Megajoules-per-degree-Kelvin", - "Newton", - "Grams-per-second", - "Grams-per-minute", - "Tons-per-hour", - "Kilo-BTUs-per-hour", - "Hundredths-seconds", - "Milliseconds", - "Newton-meters", - "Millimeters-per-second", - "Millimeters-per-minute", - "Meters-per-minute", - "Meters-per-hour", - "Cubic-meters-per-minute", - "Meters-per-second-per-second", - "Amperes-per-meter", - "Amperes-per-square-meter", - "Ampere-square-meters", - "Farads", - "Henrys", - "Ohm-meters", - "Siemens", - "Siemens-per-meter", - "Teslas", - "Volts-per-degree-Kelvin", - "Volts-per-meter", - "Webers", - "Candelas", - "Candelas-per-square-meter", - "Kelvins-per-hour", - "Kelvins-per-minute", - "Joule-seconds", - "Square-meters-per-Newton", - "Kilogram-per-cubic-meter", - "Newton-seconds", - "Newtons-per-meter", - "Watts-per-meter-per-degree-Kelvin" - ] -} diff --git a/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties b/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties index 622ab64a3c..12d38de6a4 100644 --- a/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties +++ b/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java b/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java index 20ac76297b..35ef1bc27c 100644 --- a/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java +++ b/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java @@ -4,61 +4,71 @@ public class FileManager { - private String filePath = ""; - private String csvName = "pics"; - private String csvExtension = ".csv"; - private boolean debug = false; + private String filePath = ""; + private String csvName = "pics"; + private String csvExtension = ".csv"; + private boolean debug = false; - public boolean checkDevicePicCSV() { - String csvFolder = getCSVPath(); - try{ - File[] listFiles = new File(csvFolder).listFiles(); - for (int i = 0; i < listFiles.length; i++) { - if (listFiles[i].isFile()) { - String fileName = listFiles[i].getName(); - if (fileName.contains(csvName) - && fileName.endsWith(csvExtension)) { - System.out.println("pics.csv file found in " + csvFolder); - setFilePath(fileName); - return true; - } - } - } - String errorMessage = "pics.csv not found.\n"; - System.err.println(errorMessage); - } catch(Exception e) { - System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder); + /** + * Checks if pics.csv exists. + * @return if pics.csv exists + */ + public boolean checkDevicePicCSV() { + String csvFolder = getCSVPath(); + try { + File[] listFiles = new File(csvFolder).listFiles(); + for (int i = 0; i < listFiles.length; i++) { + if (listFiles[i].isFile()) { + String fileName = listFiles[i].getName(); + if (fileName.contains(csvName) + && fileName.endsWith(csvExtension)) { + System.out.println("pics.csv file found in " + csvFolder); + setFilePath(fileName); + return true; + } } - return false; + } + String errorMessage = "pics.csv not found.\n"; + System.err.println(errorMessage); + } catch (Exception e) { + System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder); } + return false; + } - private void setFilePath(String fileName) { - String absolute_path = getCSVPath(); - this.filePath = absolute_path + "/" + fileName; - } + private void setFilePath(String fileName) { + String absolutePath = getCSVPath(); + this.filePath = absolutePath + "/" + fileName; + } - public String getFilePath() { - return this.filePath; - } + public String getFilePath() { + return this.filePath; + } - public String getAbsolutePath() { - String absolute_path = ""; - String system_path = System.getProperty("user.dir"); - System.out.println("system_path: " + system_path); - String[] path_arr = system_path.split("/"); - for (int count = 0; count < path_arr.length; count++) { - if (path_arr[count].equals("bacnetTests")) { - break; - } - absolute_path += path_arr[count] + "/"; - } - return absolute_path; + /** + * Returns absolute path to the working directory. + */ + public String getAbsolutePath() { + String absolutePath = ""; + String systemPath = System.getProperty("user.dir"); + System.out.println("system_path: " + systemPath); + String[] pathArr = systemPath.split("/"); + for (int count = 0; count < pathArr.length; count++) { + if (pathArr[count].equals("bacnetTests")) { + break; + } + absolutePath += pathArr[count] + "/"; } + return absolutePath; + } - public String getCSVPath() { - if (debug) { - return "src/main/resources"; - } - return "/config/type"; + /** + * Returns directory pics.csv is located within test container + */ + public String getCSVPath() { + if (debug) { + return "src/main/resources"; } + return "/config/type"; + } } diff --git a/subset/bacnet/bacnetTests/src/main/resources/pics.csv b/subset/bacnet/bacnetTests/src/main/resources/pics.csv index bedf7117dd..aab3985dec 100644 --- a/subset/bacnet/bacnetTests/src/main/resources/pics.csv +++ b/subset/bacnet/bacnetTests/src/main/resources/pics.csv @@ -1,486 +1,486 @@ -Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported, -Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, -Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE, -Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE, -Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE, - ,Description,CharacterString,O,TRUE, -Bacnet_Analogue_Input,Device_Type,,O,TRUE, -Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE, -Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE, -Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE, -Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE, -Bacnet_Analogue_Input,Update_Interval,,O,TRUE, -Bacnet_Analogue_Input,Units,,R,TRUE, -Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE, -Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE, -Bacnet_Analogue_Input,Resolution,,O,TRUE, -Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE, -Bacnet_Analogue_Input,COV_Period,,O,TRUE, -Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE, -Bacnet_Analogue_Input,Time_Delay,,O,TRUE, -Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE, -Bacnet_Analogue_Input,High_Limit,,O,TRUE, -Bacnet_Analogue_Input,Low_Limit,,O,TRUE, -Bacnet_Analogue_Input,Deadband,,O,TRUE, -Bacnet_Analogue_Input,Limit_Enable,,O,TRUE, -Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE, -Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, -Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE, -Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE, -Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE, -Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE, -Bacnet_Analogue_Output,Description,CharacterString,O,TRUE, -Bacnet_Analogue_Output,Device_Type,,O,TRUE, -Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE, -Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE, -Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE, -Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE, -Bacnet_Analogue_Output,Units,,R,TRUE, -Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE, -Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE, -Bacnet_Analogue_Output,Resolution,,O,TRUE, -Bacnet_Analogue_Output,Priority_Array,,R,TRUE, -Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE, -Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE, -Bacnet_Analogue_Output,COV_Period,,O,TRUE, -Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE, -Bacnet_Analogue_Output,Time_Delay,,O,TRUE, -Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE, -Bacnet_Analogue_Output,High_Limit,,O,TRUE, -Bacnet_Analogue_Output,Low_Limit,,O,TRUE, -Bacnet_Analogue_Output,Deadband,,O,TRUE, -Bacnet_Analogue_Output,Limit_Enable,,O,TRUE, -Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE, -Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, -Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE, -Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE, -Bacnet_Analogue_Value,Description,CharacterString,O,TRUE, -Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE, -Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE, -Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Analogue_Value,Units,,O,TRUE, -Bacnet_Analogue_Value,Priority_Array,,O,TRUE, -Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE, -Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE, -Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE, -Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE, -Bacnet_Analogue_Value,COV_Period,,O,TRUE, -Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE, -Bacnet_Analogue_Value,Time_Delay,,O,TRUE, -Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE, -Bacnet_Analogue_Value,High_Limit,,O,TRUE, -Bacnet_Analogue_Value,Low_Limit,,O,TRUE, -Bacnet_Analogue_Value,Deadband,,O,TRUE, -Bacnet_Analogue_Value,Limit_Enable,,O,TRUE, -Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE, -Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, -Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, -Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE, -Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE, -Bacnet_Binary_Input,Present_Value,REAL,R,TRUE, -Bacnet_Binary_Input,Description,CharacterString,O,TRUE, -Bacnet_Binary_Input,Device_Type,,O,TRUE, -Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE, -Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE, -Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE, -Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE, -Bacnet_Binary_Input,Polarity,,R,TRUE, -Bacnet_Binary_Input,Inactive_Text,,O,TRUE, -Bacnet_Binary_Input,Active_Text,,O,TRUE, -Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE, -Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE, -Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE, -Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE, -Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE, -Bacnet_Binary_Input,COV_Period,,O,TRUE, -Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE, -Bacnet_Binary_Input,Time_Delay,,O,TRUE, -Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE, -Bacnet_Binary_Input,Alarm_Value,,O,TRUE, -Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE, -Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, -Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE, -Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Binary_Output,Present_Value,REAL,O,TRUE, -Bacnet_Binary_Output,Description,CharacterString,O,TRUE, -Bacnet_Binary_Output,Device_Type,,O,TRUE, -Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE, -Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE, -Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Binary_Output,Polarity,,O,TRUE, -Bacnet_Binary_Output,Inactive_Text,,O,TRUE, -Bacnet_Binary_Output,Active_Text,,O,TRUE, -Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE, -Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE, -Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE, -Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE, -Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE, -Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE, -Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE, -Bacnet_Binary_Output,Priority_Array,,O,TRUE, -Bacnet_Binary_Output,Relinquish_Default,,O,TRUE, -Bacnet_Binary_Output,COV_Period,,O,TRUE, -Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE, -Bacnet_Binary_Output,Time_Delay,,O,TRUE, -Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE, -Bacnet_Binary_Output,Feedback_Value,,O,TRUE, -Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE, -Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE, -Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Binary_Value,Present_Value,REAL,O,TRUE, -Bacnet_Binary_Value,Description,CharacterString,O,TRUE, -Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE, -Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE, -Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Binary_Value,Inactive_Text,,O,TRUE, -Bacnet_Binary_Value,Active_Text,,O,TRUE, -Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE, -Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE, -Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE, -Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE, -Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE, -Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE, -Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE, -Bacnet_Binary_Value,Priority_Array,,O,TRUE, -Bacnet_Binary_Value,Relinquish_Default,,O,TRUE, -Bacnet_Binary_Value,COV_Period,,O,TRUE, -Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE, -Bacnet_Binary_Value,Time_Delay,,O,TRUE, -Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE, -Bacnet_Binary_Value,Alarm_Value,,O,TRUE, -Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE, -Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Calendar,Object_Name,CharacterString,O,TRUE, -Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Calendar,Present_Value,REAL,O,TRUE, -Bacnet_Calendar,Description,CharacterString,O,TRUE, -Bacnet_Calendar,Date_List,,O,TRUE, -Bacnet_Calendar,Time_To_Next_State,,O,TRUE, -Bacnet_Calendar,Next_State,,O,TRUE, -Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE, -Device,Object_Name,CharacterString,W,TRUE, -Device,Object_Type,BACnetObjectType,R,TRUE, -Device,System_Status,,R,TRUE, -Device,Vendor_Name,,R,TRUE, -Device,Vendor_Identifier,,R,TRUE, -Device,Model_Name,,R,TRUE, -Device,Firmware_Revision,,R,TRUE, -Device,Application_Software_Version,,R,TRUE, -Device,Location,,O,TRUE, -Device,Description,CharacterString,O,TRUE, -Device,Protocol_Version,,R,TRUE, -Device,Protocol_Revision,,R,TRUE, -Device,Protocol_Services_Supported,,R,TRUE, -Device,Protocol_Object_Types_Supported,,R,TRUE, -Device,Object_List,,R,TRUE, -Device,Max_APDU_Length_Accepted,,R,TRUE, -Device,Segmentation_Supported,,R,TRUE, -Device,Max_Segments_Accepted,,O,TRUE, -Device,Local_Date,,O,TRUE, -Device,Local_Time,,O,TRUE, -Device,UTC_Offset,,O,TRUE, -Device,Daylight_Savings_Status,,O,TRUE, -Device,Apdu_Segment_Timeout,,O,TRUE, -Device,APDU_Timeout,,W,TRUE, -Device,Number_Of_APDU_Retries,,W,TRUE, -Device,Time_Synchronization_Recipients,,O,TRUE, -Device,Device_Address_Binding,,R,TRUE, -Device,Database_Revision,,R,TRUE, -Device,Configuration_Files,,O,TRUE, -Device,Last_Restore_Time,,O,TRUE, -Device,Backup_Failure_Timeout,,O,TRUE, -Device,Backup_Preparation_Time,,O,TRUE, -Device,Restore_Preparation_Time,,O,TRUE, -Device,Restore_Completion_Time,,O,TRUE, -Device,Backup_And_Restore_State,,O,TRUE, -Device,Active_COV_Subscriptions,,O,TRUE, -Device,Last_Restart_Reason,,O,TRUE, -Device,Time_Of_Device_Restart,,O,TRUE, -Device,Restart_Notification_Recipients,,O,TRUE, -Device,Utc_Time_Synchronization_Recipients,,O,TRUE, -Device,Max_Master,,O,TRUE, -Device,Max_Info_Frames,,O,TRUE, -Device,Time_Synchronization_Interval,,O,TRUE, -Device,Align_Intervals,,O,TRUE, -Device,Interval_Offset,,O,TRUE, -Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, -Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Event_Enrollment,Object_Name,CharacterString,O,TRUE, -Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE, -Event_Enrollment,Description,CharacterString,O,TRUE, -Event_Enrollment,Event_Type,,O,TRUE, -Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE, -Event_Enrollment,Event_Parameters,,O,TRUE, -Event_Enrollment,Object_Property_Reference,,O,TRUE, -Event_Enrollment,Event_State,BACnetEventState,O,TRUE, -Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Event_Enrollment,Notification_Class,Unsigned,O,TRUE, -Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE, -Event_Enrollment,Time_Delay_Normal,,O,TRUE, -Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE, -Event_Enrollment,Reliability,BACnetReliability,O,TRUE, -Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_File,Object_Name,CharacterString,O,TRUE, -Bacnet_File,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_File,Description,CharacterString,O,TRUE, -Bacnet_File,File_Type,,O,TRUE, -Bacnet_File,File_Size,,O,TRUE, -Bacnet_File,Modification_Date,,O,TRUE, -Bacnet_File,Archive,,O,TRUE, -Bacnet_File,Read_Only,,O,TRUE, -Bacnet_File,File_Access_Method,,O,TRUE, -Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Loop,Object_Name,CharacterString,O,TRUE, -Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Loop,Present_Value,REAL,O,TRUE, -Bacnet_Loop,Description,CharacterString,O,TRUE, -Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Loop,Event_State,BACnetEventState,O,TRUE, -Bacnet_Loop,Reliability,BACnetReliability,O,TRUE, -Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Loop,Update_Interval,,O,TRUE, -Bacnet_Loop,Output_Units,,O,TRUE, -Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE, -Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE, -Bacnet_Loop,Controlled_Variable_Value,,O,TRUE, -Bacnet_Loop,Controlled_Variable_Units,,O,TRUE, -Bacnet_Loop,Setpoint_Reference,,O,TRUE, -Bacnet_Loop,Setpoint,,O,TRUE, -Bacnet_Loop,Action,,O,TRUE, -Bacnet_Loop,Proportional_Constant,,O,TRUE, -Bacnet_Loop,Proportional_Constant_Units,,O,TRUE, -Bacnet_Loop,Integral_Constant,,O,TRUE, -Bacnet_Loop,Integral_Constant_Units,,O,TRUE, -Bacnet_Loop,Derivative_Constant,,O,TRUE, -Bacnet_Loop,Derivative_Constant_Units,,O,TRUE, -Bacnet_Loop,Bias,,O,TRUE, -Bacnet_Loop,Maximum_Output,,O,TRUE, -Bacnet_Loop,Minimum_Output,,O,TRUE, -Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE, -Bacnet_Loop,LoopDeadband,,O,TRUE, -Bacnet_Loop,Saturation_Time,,O,TRUE, -Bacnet_Loop,COV_Increment,REAL,O,TRUE, -Bacnet_Loop,COV_Period,,O,TRUE, -Bacnet_Loop,COV_Min_Send_Time,,O,TRUE, -Bacnet_Loop,Ramp_Time,,O,TRUE, -Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE, -Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE, -Bacnet_Loop,Time_Delay,,O,TRUE, -Bacnet_Loop,Notification_Class,Unsigned,O,TRUE, -Bacnet_Loop,Error_Limit,,O,TRUE, -Bacnet_Loop,Deadband,,O,TRUE, -Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Loop,Time_Delay_Normal,,O,TRUE, -Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE, -Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE, -Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE, -Bacnet_Multi-state_Input,Device_Type,,O,TRUE, -Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE, -Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE, -Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Multi-state_Input,Number_of_States,,O,TRUE, -Bacnet_Multi-state_Input,State_Text,,O,TRUE, -Bacnet_Multi-state_Input,COV_Period,,O,TRUE, -Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE, -Bacnet_Multi-state_Input,Time_Delay,,O,TRUE, -Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE, -Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE, -Bacnet_Multi-state_Input,Fault_Values,,O,TRUE, -Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE, -Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE, -Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE, -Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE, -Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE, -Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE, -Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Multi-state_Value,Number_of_States,,O,TRUE, -Bacnet_Multi-state_Value,State_Text,,O,TRUE, -Bacnet_Multi-state_Value,Priority_Array,,O,TRUE, -Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE, -Bacnet_Multi-state_Value,COV_Period,,O,TRUE, -Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE, -Bacnet_Multi-state_Value,Time_Delay,,O,TRUE, -Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE, -Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE, -Bacnet_Multi-state_Value,Fault_Values,,O,TRUE, -Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE, -Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, -Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Program,Object_Name,CharacterString,O,TRUE, -Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Program,Description,CharacterString,O,TRUE, -Bacnet_Program,Program_State,,O,TRUE, -Bacnet_Program,Program_Change,,O,TRUE, -Bacnet_Program,Description_Of_Halt,,O,TRUE, -Bacnet_Program,Reason_For_Halt,,O,TRUE, -Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Program,Reliability,BACnetReliability,O,TRUE, -Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Notification,Object_Name,CharacterString,O,TRUE, -Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Notification,Description,CharacterString,O,TRUE, -Bacnet_Notification,Notification_Class,Unsigned,O,TRUE, -Bacnet_Notification,Priority,,O,TRUE, -Bacnet_Notification,Ack_Required,,O,TRUE, -Bacnet_Notification,Recipient_List,,O,TRUE, -Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Schedule,Object_Name,CharacterString,O,TRUE, -Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Schedule,Description,CharacterString,O,TRUE, -Bacnet_Schedule,Present_Value,REAL,O,TRUE, -Bacnet_Schedule,Effective_Period,,O,TRUE, -Bacnet_Schedule,Weekly_Schedule,,O,TRUE, -Bacnet_Schedule,Exception_Schedule,,O,TRUE, -Bacnet_Schedule,Schedule_Default,,O,TRUE, -Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE, -Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE, -Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE, -Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE, -Bacnet_Schedule,Time_To_Next_State,,O,TRUE, -Bacnet_Schedule,Next_State,,O,TRUE, -Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, -Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE, -Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE, -Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE, -Bacnet_Trend_Log,Description,CharacterString,O,TRUE, -Bacnet_Trend_Log,Enable,,O,TRUE, -Bacnet_Trend_Log,Start_Time,,O,TRUE, -Bacnet_Trend_Log,Stop_Time,,O,TRUE, -Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE, -Bacnet_Trend_Log,Log_Interval,,O,TRUE, -Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE, -Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE, -Bacnet_Trend_Log,Stop_When_Full,,O,TRUE, -Bacnet_Trend_Log,Buffer_Size,,O,TRUE, -Bacnet_Trend_Log,Log_Buffer,,O,TRUE, -Bacnet_Trend_Log,Record_Count,,O,TRUE, -Bacnet_Trend_Log,Total_Record_Count,,O,TRUE, -Bacnet_Trend_Log,Logging_Type,,O,TRUE, -Bacnet_Trend_Log,Align_Intervals,,O,TRUE, -Bacnet_Trend_Log,Interval_Offset,,O,TRUE, -Bacnet_Trend_Log,Trigger,,O,TRUE, -Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE, -Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE, -Bacnet_Trend_Log,Notification_Threshold,,O,TRUE, -Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE, -Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE, -Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE, -Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE, -Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE, -Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, -Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE, -Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, -Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, -Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE, -Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE, -Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported, +Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE, +Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE, + ,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Input,Device_Type,,O,TRUE, +Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE, +Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Analogue_Input,Update_Interval,,O,TRUE, +Bacnet_Analogue_Input,Units,,R,TRUE, +Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE, +Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE, +Bacnet_Analogue_Input,Resolution,,O,TRUE, +Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Input,COV_Period,,O,TRUE, +Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Input,Time_Delay,,O,TRUE, +Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Input,High_Limit,,O,TRUE, +Bacnet_Analogue_Input,Low_Limit,,O,TRUE, +Bacnet_Analogue_Input,Deadband,,O,TRUE, +Bacnet_Analogue_Input,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE, +Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE, +Bacnet_Analogue_Output,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Output,Device_Type,,O,TRUE, +Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE, +Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Analogue_Output,Units,,R,TRUE, +Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE, +Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE, +Bacnet_Analogue_Output,Resolution,,O,TRUE, +Bacnet_Analogue_Output,Priority_Array,,R,TRUE, +Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE, +Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Output,COV_Period,,O,TRUE, +Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Output,Time_Delay,,O,TRUE, +Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Output,High_Limit,,O,TRUE, +Bacnet_Analogue_Output,Low_Limit,,O,TRUE, +Bacnet_Analogue_Output,Deadband,,O,TRUE, +Bacnet_Analogue_Output,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE, +Bacnet_Analogue_Value,Description,CharacterString,O,TRUE, +Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Analogue_Value,Units,,O,TRUE, +Bacnet_Analogue_Value,Priority_Array,,O,TRUE, +Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE, +Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE, +Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE, +Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE, +Bacnet_Analogue_Value,COV_Period,,O,TRUE, +Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Analogue_Value,Time_Delay,,O,TRUE, +Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Analogue_Value,High_Limit,,O,TRUE, +Bacnet_Analogue_Value,Low_Limit,,O,TRUE, +Bacnet_Analogue_Value,Deadband,,O,TRUE, +Bacnet_Analogue_Value,Limit_Enable,,O,TRUE, +Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE, +Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE, +Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE, +Bacnet_Binary_Input,Present_Value,REAL,R,TRUE, +Bacnet_Binary_Input,Description,CharacterString,O,TRUE, +Bacnet_Binary_Input,Device_Type,,O,TRUE, +Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE, +Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE, +Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE, +Bacnet_Binary_Input,Polarity,,R,TRUE, +Bacnet_Binary_Input,Inactive_Text,,O,TRUE, +Bacnet_Binary_Input,Active_Text,,O,TRUE, +Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Input,COV_Period,,O,TRUE, +Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Input,Time_Delay,,O,TRUE, +Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Input,Alarm_Value,,O,TRUE, +Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE, +Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Binary_Output,Present_Value,REAL,O,TRUE, +Bacnet_Binary_Output,Description,CharacterString,O,TRUE, +Bacnet_Binary_Output,Device_Type,,O,TRUE, +Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE, +Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Binary_Output,Polarity,,O,TRUE, +Bacnet_Binary_Output,Inactive_Text,,O,TRUE, +Bacnet_Binary_Output,Active_Text,,O,TRUE, +Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE, +Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE, +Bacnet_Binary_Output,Priority_Array,,O,TRUE, +Bacnet_Binary_Output,Relinquish_Default,,O,TRUE, +Bacnet_Binary_Output,COV_Period,,O,TRUE, +Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Output,Time_Delay,,O,TRUE, +Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Output,Feedback_Value,,O,TRUE, +Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Binary_Value,Present_Value,REAL,O,TRUE, +Bacnet_Binary_Value,Description,CharacterString,O,TRUE, +Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Binary_Value,Inactive_Text,,O,TRUE, +Bacnet_Binary_Value,Active_Text,,O,TRUE, +Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE, +Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE, +Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE, +Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE, +Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE, +Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE, +Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE, +Bacnet_Binary_Value,Priority_Array,,O,TRUE, +Bacnet_Binary_Value,Relinquish_Default,,O,TRUE, +Bacnet_Binary_Value,COV_Period,,O,TRUE, +Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Binary_Value,Time_Delay,,O,TRUE, +Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Binary_Value,Alarm_Value,,O,TRUE, +Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Calendar,Object_Name,CharacterString,O,TRUE, +Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Calendar,Present_Value,REAL,O,TRUE, +Bacnet_Calendar,Description,CharacterString,O,TRUE, +Bacnet_Calendar,Date_List,,O,TRUE, +Bacnet_Calendar,Time_To_Next_State,,O,TRUE, +Bacnet_Calendar,Next_State,,O,TRUE, +Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE, +Device,Object_Name,CharacterString,W,TRUE, +Device,Object_Type,BACnetObjectType,R,TRUE, +Device,System_Status,,R,TRUE, +Device,Vendor_Name,,R,TRUE, +Device,Vendor_Identifier,,R,TRUE, +Device,Model_Name,,R,TRUE, +Device,Firmware_Revision,,R,TRUE, +Device,Application_Software_Version,,R,TRUE, +Device,Location,,O,TRUE, +Device,Description,CharacterString,O,TRUE, +Device,Protocol_Version,,R,TRUE, +Device,Protocol_Revision,,R,TRUE, +Device,Protocol_Services_Supported,,R,TRUE, +Device,Protocol_Object_Types_Supported,,R,TRUE, +Device,Object_List,,R,TRUE, +Device,Max_APDU_Length_Accepted,,R,TRUE, +Device,Segmentation_Supported,,R,TRUE, +Device,Max_Segments_Accepted,,O,TRUE, +Device,Local_Date,,O,TRUE, +Device,Local_Time,,O,TRUE, +Device,UTC_Offset,,O,TRUE, +Device,Daylight_Savings_Status,,O,TRUE, +Device,Apdu_Segment_Timeout,,O,TRUE, +Device,APDU_Timeout,,W,TRUE, +Device,Number_Of_APDU_Retries,,W,TRUE, +Device,Time_Synchronization_Recipients,,O,TRUE, +Device,Device_Address_Binding,,R,TRUE, +Device,Database_Revision,,R,TRUE, +Device,Configuration_Files,,O,TRUE, +Device,Last_Restore_Time,,O,TRUE, +Device,Backup_Failure_Timeout,,O,TRUE, +Device,Backup_Preparation_Time,,O,TRUE, +Device,Restore_Preparation_Time,,O,TRUE, +Device,Restore_Completion_Time,,O,TRUE, +Device,Backup_And_Restore_State,,O,TRUE, +Device,Active_COV_Subscriptions,,O,TRUE, +Device,Last_Restart_Reason,,O,TRUE, +Device,Time_Of_Device_Restart,,O,TRUE, +Device,Restart_Notification_Recipients,,O,TRUE, +Device,Utc_Time_Synchronization_Recipients,,O,TRUE, +Device,Max_Master,,O,TRUE, +Device,Max_Info_Frames,,O,TRUE, +Device,Time_Synchronization_Interval,,O,TRUE, +Device,Align_Intervals,,O,TRUE, +Device,Interval_Offset,,O,TRUE, +Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, +Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Event_Enrollment,Object_Name,CharacterString,O,TRUE, +Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE, +Event_Enrollment,Description,CharacterString,O,TRUE, +Event_Enrollment,Event_Type,,O,TRUE, +Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE, +Event_Enrollment,Event_Parameters,,O,TRUE, +Event_Enrollment,Object_Property_Reference,,O,TRUE, +Event_Enrollment,Event_State,BACnetEventState,O,TRUE, +Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Event_Enrollment,Notification_Class,Unsigned,O,TRUE, +Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE, +Event_Enrollment,Time_Delay_Normal,,O,TRUE, +Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE, +Event_Enrollment,Reliability,BACnetReliability,O,TRUE, +Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_File,Object_Name,CharacterString,O,TRUE, +Bacnet_File,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_File,Description,CharacterString,O,TRUE, +Bacnet_File,File_Type,,O,TRUE, +Bacnet_File,File_Size,,O,TRUE, +Bacnet_File,Modification_Date,,O,TRUE, +Bacnet_File,Archive,,O,TRUE, +Bacnet_File,Read_Only,,O,TRUE, +Bacnet_File,File_Access_Method,,O,TRUE, +Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Loop,Object_Name,CharacterString,O,TRUE, +Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Loop,Present_Value,REAL,O,TRUE, +Bacnet_Loop,Description,CharacterString,O,TRUE, +Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Loop,Event_State,BACnetEventState,O,TRUE, +Bacnet_Loop,Reliability,BACnetReliability,O,TRUE, +Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Loop,Update_Interval,,O,TRUE, +Bacnet_Loop,Output_Units,,O,TRUE, +Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Value,,O,TRUE, +Bacnet_Loop,Controlled_Variable_Units,,O,TRUE, +Bacnet_Loop,Setpoint_Reference,,O,TRUE, +Bacnet_Loop,Setpoint,,O,TRUE, +Bacnet_Loop,Action,,O,TRUE, +Bacnet_Loop,Proportional_Constant,,O,TRUE, +Bacnet_Loop,Proportional_Constant_Units,,O,TRUE, +Bacnet_Loop,Integral_Constant,,O,TRUE, +Bacnet_Loop,Integral_Constant_Units,,O,TRUE, +Bacnet_Loop,Derivative_Constant,,O,TRUE, +Bacnet_Loop,Derivative_Constant_Units,,O,TRUE, +Bacnet_Loop,Bias,,O,TRUE, +Bacnet_Loop,Maximum_Output,,O,TRUE, +Bacnet_Loop,Minimum_Output,,O,TRUE, +Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE, +Bacnet_Loop,LoopDeadband,,O,TRUE, +Bacnet_Loop,Saturation_Time,,O,TRUE, +Bacnet_Loop,COV_Increment,REAL,O,TRUE, +Bacnet_Loop,COV_Period,,O,TRUE, +Bacnet_Loop,COV_Min_Send_Time,,O,TRUE, +Bacnet_Loop,Ramp_Time,,O,TRUE, +Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE, +Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE, +Bacnet_Loop,Time_Delay,,O,TRUE, +Bacnet_Loop,Notification_Class,Unsigned,O,TRUE, +Bacnet_Loop,Error_Limit,,O,TRUE, +Bacnet_Loop,Deadband,,O,TRUE, +Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Loop,Time_Delay_Normal,,O,TRUE, +Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE, +Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Device_Type,,O,TRUE, +Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE, +Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE, +Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Input,Number_of_States,,O,TRUE, +Bacnet_Multi-state_Input,State_Text,,O,TRUE, +Bacnet_Multi-state_Input,COV_Period,,O,TRUE, +Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE, +Bacnet_Multi-state_Input,Time_Delay,,O,TRUE, +Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE, +Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE, +Bacnet_Multi-state_Input,Fault_Values,,O,TRUE, +Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE, +Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE, +Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE, +Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE, +Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Value,Number_of_States,,O,TRUE, +Bacnet_Multi-state_Value,State_Text,,O,TRUE, +Bacnet_Multi-state_Value,Priority_Array,,O,TRUE, +Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE, +Bacnet_Multi-state_Value,COV_Period,,O,TRUE, +Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE, +Bacnet_Multi-state_Value,Time_Delay,,O,TRUE, +Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE, +Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE, +Bacnet_Multi-state_Value,Fault_Values,,O,TRUE, +Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE, +Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE, +Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Program,Object_Name,CharacterString,O,TRUE, +Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Program,Description,CharacterString,O,TRUE, +Bacnet_Program,Program_State,,O,TRUE, +Bacnet_Program,Program_Change,,O,TRUE, +Bacnet_Program,Description_Of_Halt,,O,TRUE, +Bacnet_Program,Reason_For_Halt,,O,TRUE, +Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Program,Reliability,BACnetReliability,O,TRUE, +Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Notification,Object_Name,CharacterString,O,TRUE, +Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Notification,Description,CharacterString,O,TRUE, +Bacnet_Notification,Notification_Class,Unsigned,O,TRUE, +Bacnet_Notification,Priority,,O,TRUE, +Bacnet_Notification,Ack_Required,,O,TRUE, +Bacnet_Notification,Recipient_List,,O,TRUE, +Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Schedule,Object_Name,CharacterString,O,TRUE, +Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Schedule,Description,CharacterString,O,TRUE, +Bacnet_Schedule,Present_Value,REAL,O,TRUE, +Bacnet_Schedule,Effective_Period,,O,TRUE, +Bacnet_Schedule,Weekly_Schedule,,O,TRUE, +Bacnet_Schedule,Exception_Schedule,,O,TRUE, +Bacnet_Schedule,Schedule_Default,,O,TRUE, +Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE, +Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE, +Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE, +Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE, +Bacnet_Schedule,Time_To_Next_State,,O,TRUE, +Bacnet_Schedule,Next_State,,O,TRUE, +Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE, +Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE, +Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE, +Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE, +Bacnet_Trend_Log,Description,CharacterString,O,TRUE, +Bacnet_Trend_Log,Enable,,O,TRUE, +Bacnet_Trend_Log,Start_Time,,O,TRUE, +Bacnet_Trend_Log,Stop_Time,,O,TRUE, +Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE, +Bacnet_Trend_Log,Log_Interval,,O,TRUE, +Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE, +Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE, +Bacnet_Trend_Log,Stop_When_Full,,O,TRUE, +Bacnet_Trend_Log,Buffer_Size,,O,TRUE, +Bacnet_Trend_Log,Log_Buffer,,O,TRUE, +Bacnet_Trend_Log,Record_Count,,O,TRUE, +Bacnet_Trend_Log,Total_Record_Count,,O,TRUE, +Bacnet_Trend_Log,Logging_Type,,O,TRUE, +Bacnet_Trend_Log,Align_Intervals,,O,TRUE, +Bacnet_Trend_Log,Interval_Offset,,O,TRUE, +Bacnet_Trend_Log,Trigger,,O,TRUE, +Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE, +Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE, +Bacnet_Trend_Log,Notification_Threshold,,O,TRUE, +Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE, +Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE, +Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE, +Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE, +Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE, +Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE, +Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE, +Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE, +Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE, +Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE, +Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE, +Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE, Bacnet_Trend_Log,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE, \ No newline at end of file diff --git a/subset/cloud/Dockerfile.test_udmi b/subset/cloud/Dockerfile.test_udmi index 220bd01ba0..ee5fa4c721 100644 --- a/subset/cloud/Dockerfile.test_udmi +++ b/subset/cloud/Dockerfile.test_udmi @@ -9,11 +9,15 @@ FROM daqf/aardvark:latest RUN $AG update && $AG install openjdk-11-jre RUN $AG update && $AG install openjdk-11-jdk git -COPY validator/ validator/ +RUN $AG update && $AG install curl + +COPY udmi/validator/ validator/ RUN validator/bin/build -COPY schemas/udmi/ schemas/udmi/ +COPY udmi/schema/ schema/ COPY subset/cloud/test_udmi . +COPY resources/test_site/ local/ + CMD ./test_udmi diff --git a/subset/cloud/test_udmi b/subset/cloud/test_udmi index 4b0cfb2a32..0f016e18e6 100755 --- a/subset/cloud/test_udmi +++ b/subset/cloud/test_udmi @@ -1,4 +1,5 @@ #!/bin/bash -e + source reporting.sh REPORT=/tmp/report.txt @@ -7,23 +8,27 @@ LOG=/tmp/udmi.log # Necessary to reach gcp. Should be done by framework but this works for now. route add default gw $GATEWAY_IP +ip addr route -n -ping -c 2 172.217.164.106 || true +ping -c 2 $GATEWAY_IP arp -n -ip addr gcp_cred=/config/inst/gcp_service_account.json gcp_topic=target -schema_path=schemas/udmi +schema_path=schema +subscription=daq-validator-dev message_types="state pointset system" device_id=`jq -r .device_id /config/device/module_config.json` # Do basic network connectivity check +echo "nameserver 8.8.8.8" >> /etc/resolv.conf cat /etc/resolv.conf -ping -c 2 172.217.164.106 -ping -c 2 pubsub.googleapis.com +ping -c 2 google.com || true # blocked on github actions +echo "********GOOGLE PAGE********" +curl google.com +echo "***************************" if [ "$device_id" == null ]; then skip="No device id" elif [ ! -f $gcp_cred ]; then @@ -55,15 +60,14 @@ echo Using credentials from $GOOGLE_APPLICATION_CREDENTIALS echo Extracted project $project_id echo Extracted service $service_id echo Configured topic is $gcp_topic -echo Configured schema is $schema_path echo Target device is $device_id echo -timeout 60 validator/bin/validate $PWD/$schema_path pubsub:$gcp_topic $service_id-$HOSTNAME || true +timeout 90 validator/bin/validate $project_id $schema_path pubsub $subscription local/ || true function message_report { message_type=$1 - base=validations/devices/$device_id/$message_type + base=out/devices/$device_id/$message_type ls -l $base* || true if [ -f "$base.out" ]; then @@ -87,3 +91,7 @@ function message_report { for message_type in $message_types; do message_report $message_type done + +fgrep RESULT $REPORT + +echo Done with test_udmi diff --git a/subset/connection/Dockerfile.test_macoui b/subset/connection/Dockerfile.test_macoui deleted file mode 100644 index 6624495d00..0000000000 --- a/subset/connection/Dockerfile.test_macoui +++ /dev/null @@ -1,17 +0,0 @@ -FROM daqf/aardvark:latest - -RUN $AG update && $AG install openjdk-8-jre - -RUN $AG update && $AG install openjdk-8-jdk git - -RUN $AG update && $AG install curl - -COPY subset/connection/ . - -RUN mkdir -p mac_oui/src/main/resources - -RUN curl https://svn.nmap.org/nmap/nmap-mac-prefixes > mac_oui/src/main/resources/macList.txt - -RUN cd mac_oui && ./gradlew shadowJar - -CMD ["./test_macoui"] diff --git a/subset/connection/build.conf b/subset/connection/build.conf deleted file mode 100644 index 5c585856af..0000000000 --- a/subset/connection/build.conf +++ /dev/null @@ -1,2 +0,0 @@ -build subset/connection -add macoui diff --git a/subset/connection/mac_oui/.project b/subset/connection/mac_oui/.project deleted file mode 100644 index cd2d52f077..0000000000 --- a/subset/connection/mac_oui/.project +++ /dev/null @@ -1,23 +0,0 @@ - - - mac_oui - Project mac_oui created by Buildship. - - - - - org.eclipse.jdt.core.javabuilder - - - - - org.eclipse.buildship.core.gradleprojectbuilder - - - - - - org.eclipse.jdt.core.javanature - org.eclipse.buildship.core.gradleprojectnature - - diff --git a/subset/connection/readme.md b/subset/connection/readme.md deleted file mode 100644 index f1166e933c..0000000000 --- a/subset/connection/readme.md +++ /dev/null @@ -1,20 +0,0 @@ -# Connection testing - -## test_macoui -The MAC OUI test looks up the manufacturer information for the mac address of the device under test. - -### Note for test developers -The functional test code is included in the `mac_oui/src/main/java` folder. - -The `macList.txt` file containing the MAC OUI database is downloaded at build time by the container specified in -the `Dockerfile.test_macoui` file. - -If java code requires debugging in an IDE, then it will require the `macList.txt` to be placed under the -`mac_oui/src/main/resources/` folder. Use the curl command from the `Dockerfile.test_macoui` file to download and -place the file locally into your project. This `.txt` file is git ignored to avoid being included as a -static resource on the source code repo. - -### Conditions for mac_oui - - pass -> if the MAC OUI matches the mac prefix IEEE registration. - - fail -> if the MAC OUI does not match with any of the mac prefixes. - diff --git a/subset/manual/readme.md b/subset/manual/readme.md index ea2056eff6..a5b300a370 100644 --- a/subset/manual/readme.md +++ b/subset/manual/readme.md @@ -2,10 +2,14 @@ ## Manual Tests -Some tests cannot be automated with DAQ although these may be required. To facilitate a single test report which incorporates all tests undertaken on a device, the `manual` test can be used to input the results into reports produced by DAQ. +Some tests cannot be automated with DAQ although these may be required. +To facilitate a single test report which incorporates all tests +undertaken on a device, the `manual` test can be used to input +the results into reports produced by DAQ. ## Configuration -Manual tests including results are inserted into the device's `module_config.json` and marked by `"type": "manual"`. +Manual tests including results are inserted into the device's +`module_config.json` and marked by `"type": "manual"`. ``` "tests": { @@ -14,7 +18,7 @@ Manual tests including results are inserted into the device's `module_config.jso "enabled": true, "type": "manual", "result": "required", - "outcome": "pass" + "outcome": "pass", "summary" : "summary note in results table", "test_log" : "additional information in report appendix" } @@ -38,4 +42,4 @@ Test description Test description -------------------- RESULT pass manual.test.name Manual test - Test summary -``` \ No newline at end of file +``` diff --git a/subset/network/Dockerfile.test_network b/subset/network/Dockerfile.test_network index fbbb2fdec7..ef4b204207 100644 --- a/subset/network/Dockerfile.test_network +++ b/subset/network/Dockerfile.test_network @@ -1,8 +1,21 @@ FROM daqf/aardvark:latest -RUN $AG update && $AG install python netcat +RUN $AG update && $AG install openjdk-8-jre -COPY subset/network/network_tests.py . -COPY subset/network/test_network . +RUN $AG update && $AG install openjdk-8-jdk git + +RUN $AG update && $AG install python python-setuptools python-pip netcat + +RUN $AG update && $AG install curl + +RUN pip install scapy + +COPY subset/network/ . + +RUN mkdir -p mac_oui/src/main/resources + +RUN curl https://svn.nmap.org/nmap/nmap-mac-prefixes > mac_oui/src/main/resources/macList.txt + +RUN cd mac_oui && ./gradlew shadowJar CMD ["./test_network"] diff --git a/subset/network/NTPClient/src/main/java/Main.java b/subset/network/NTPClient/src/main/java/Main.java deleted file mode 100644 index b9e53b2833..0000000000 --- a/subset/network/NTPClient/src/main/java/Main.java +++ /dev/null @@ -1,81 +0,0 @@ -import java.io.IOException; -import java.net.*; -import java.text.DecimalFormat; -import java.util.concurrent.Executors; -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - - -public class Main { - static final double SECONDS_FROM_01_01_1900_TO_01_01_1970 = 2208988800.0; - static String serverName = "time.google.com"; - static int PORT = 123; - static int timerPeriod = 10; - - public static void main(String[] args) { - if (args.length < 2) { - throw new IllegalArgumentException("Usage: server_name port timerPeriod"); - } - serverName = args[0]; - PORT = Integer.parseInt(args[1]); - timerPeriod = Integer.parseInt(args[2]); - - Runnable senderRunnable = new Runnable() { - @Override - public void run() { - try { - sendRequest(); - } catch (IOException e) { - System.out.println(e.getMessage()); - } - } - }; - ScheduledExecutorService executor = Executors.newScheduledThreadPool(1); - executor.scheduleAtFixedRate(senderRunnable, 0, timerPeriod, TimeUnit.SECONDS); - } - - private static void sendRequest() throws IOException { - // Send request - DatagramSocket socket = new DatagramSocket(); - InetAddress address = InetAddress.getByName(serverName); - byte[] buf = new NtpMessage(SECONDS_FROM_01_01_1900_TO_01_01_1970).toByteArray(); - DatagramPacket packet = - new DatagramPacket(buf, buf.length, address, PORT); - - // Set the transmit timestamp *just* before sending the packet - NtpMessage.encodeTimestamp(packet.getData(), 40, - (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970); - sendPacket(socket, packet, buf); - } - - private static void sendPacket(DatagramSocket socket, DatagramPacket packet, byte[] buf) throws IOException { - socket.send(packet); - - // Get response - System.out.println("NTP request sent, waiting for response...\n"); - packet = new DatagramPacket(buf, buf.length); - socket.receive(packet); - - // Immediately record the incoming timestamp - double destinationTimestamp = - (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970; - - // Process response - NtpMessage msg = new NtpMessage(packet.getData()); - double roundTripDelay = (destinationTimestamp-msg.originateTimestamp) - - (msg.transmitTimestamp-msg.receiveTimestamp); - double localClockOffset = - ((msg.receiveTimestamp - msg.originateTimestamp) + - (msg.transmitTimestamp - destinationTimestamp)) / 2; - - // Display response - System.out.println("NTP server: " + serverName); - System.out.println(msg.toString()); - System.out.println("Dest. timestamp: " + - NtpMessage.timestampToString(destinationTimestamp)); - System.out.println("Round-trip delay: " + - new DecimalFormat("0.00").format(roundTripDelay * 1000) + " ms"); - System.out.println("Local clock offset: " + - new DecimalFormat("0.00").format(localClockOffset * 1000) + " ms"); - } -} diff --git a/subset/network/NTPClient/src/main/java/NtpMessage.java b/subset/network/NTPClient/src/main/java/NtpMessage.java deleted file mode 100644 index cfea458b1e..0000000000 --- a/subset/network/NTPClient/src/main/java/NtpMessage.java +++ /dev/null @@ -1,206 +0,0 @@ -import java.text.DecimalFormat; -import java.text.SimpleDateFormat; -import java.util.Date; - -public class NtpMessage { - public byte leapIndicator = 0; - public byte version = 3; - public byte mode = 0; - public short stratum = 0; - public byte pollInterval = 0; - public byte precision = 0; - public double rootDelay = 0; - public double rootDispersion = 0; - public byte[] referenceIdentifier = {0, 0, 0, 0}; - public double referenceTimestamp = 0; - public double originateTimestamp = 0; - public double receiveTimestamp = 0; - public double transmitTimestamp = 0; - - /** - * Constructs a new NtpMessage from an array of bytes. - */ - public NtpMessage(byte[] array) { - // See the packet format diagram in RFC 2030 for details - leapIndicator = (byte)((array[0] >> 6) & 0x3); - version = (byte)((array[0] >> 3) & 0x7); - mode = (byte)(array[0] & 0x7); - stratum = unsignedByteToShort(array[1]); - pollInterval = array[2]; - precision = array[3]; - - rootDelay = (array[4] * 256.0) + - unsignedByteToShort(array[5]) + - (unsignedByteToShort(array[6]) / 256.0) + - (unsignedByteToShort(array[7]) / 65536.0); - - rootDispersion = (unsignedByteToShort(array[8]) * 256.0) + - unsignedByteToShort(array[9]) + - (unsignedByteToShort(array[10]) / 256.0) + - (unsignedByteToShort(array[11]) / 65536.0); - - referenceIdentifier[0] = array[12]; - referenceIdentifier[1] = array[13]; - referenceIdentifier[2] = array[14]; - referenceIdentifier[3] = array[15]; - - referenceTimestamp = decodeTimestamp(array, 16); - originateTimestamp = decodeTimestamp(array, 24); - receiveTimestamp = decodeTimestamp(array, 32); - transmitTimestamp = decodeTimestamp(array, 40); - } - - /** - * Constructs a new NtpMessage in client -> server mode, and sets the - * transmit timestamp to the current time. - */ - public NtpMessage(double SECONDS_FROM_01_01_1900_TO_01_01_1970) { - this.mode = 3; - this.transmitTimestamp = (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970; - } - - /** - * This method constructs the data bytes of a raw NTP packet. - */ - public byte[] toByteArray() { - // All bytes are automatically set to 0 - byte[] p = new byte[48]; - - p[0] = (byte)(leapIndicator << 6 | version << 3 | mode); - p[1] = (byte)stratum; - p[2] = (byte)pollInterval; - p[3] = (byte)precision; - - // root delay is a signed 16.16-bit FP, in Java an int is 32-bits - int l = (int)(rootDelay * 65536.0); - p[4] = (byte)((l >> 24) & 0xFF); - p[5] = (byte)((l >> 16) & 0xFF); - p[6] = (byte)((l >> 8) & 0xFF); - p[7] = (byte)(l & 0xFF); - - // root dispersion is an unsigned 16.16-bit FP, in Java there are no - // unsigned primitive types, so we use a long which is 64-bits - long ul = (long)(rootDispersion * 65536.0); - p[8] = (byte)((ul >> 24) & 0xFF); - p[9] = (byte)((ul >> 16) & 0xFF); - p[10] = (byte)((ul >> 8) & 0xFF); - p[11] = (byte)(ul & 0xFF); - - p[12] = referenceIdentifier[0]; - p[13] = referenceIdentifier[1]; - p[14] = referenceIdentifier[2]; - p[15] = referenceIdentifier[3]; - - encodeTimestamp(p, 16, referenceTimestamp); - encodeTimestamp(p, 24, originateTimestamp); - encodeTimestamp(p, 32, receiveTimestamp); - encodeTimestamp(p, 40, transmitTimestamp); - - return p; - } - - /** - * Returns a string representation of a NtpMessage - */ - public String toString() { - String precisionStr = - new DecimalFormat("0.#E0").format(Math.pow(2, precision)); - - return "Leap indicator: " + leapIndicator + "\n" + - "Version: " + version + "\n" + - "Mode: " + mode + "\n" + - "Stratum: " + stratum + "\n" + - "Poll: " + pollInterval + "\n" + - "Precision: " + precision + " (" + precisionStr + " seconds)\n" + - "Root delay: " + new DecimalFormat("0.00").format(rootDelay * 1000) + " ms\n" + - "Root dispersion: " + new DecimalFormat("0.00").format(rootDispersion * 1000) + " ms\n" + - "Reference identifier: " + referenceIdentifierToString(referenceIdentifier, stratum, version) + "\n" + - "Reference timestamp: " + timestampToString(referenceTimestamp) + "\n" + - "Originate timestamp: " + timestampToString(originateTimestamp) + "\n" + - "Receive timestamp: " + timestampToString(receiveTimestamp) + "\n" + - "Transmit timestamp: " + timestampToString(transmitTimestamp); - } - - /** - * Converts an unsigned byte to a short. By default, Java assumes that - * a byte is signed. - */ - public static short unsignedByteToShort(byte b) { - if((b & 0x80) == 0x80) return (short)(128 + (b & 0x7f)); - else return (short)b; - } - - /** - * Will read 8 bytes of a message beginning at pointer - * and return it as a double, according to the NTP 64-bit timestamp - * format. - */ - public static double decodeTimestamp(byte[] array, int pointer) { - double r = 0.0; - - for(int i = 0; i < 8; i++) - { - r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8); - } - - return r; - } - - /** - * Encodes a timestamp in the specified position in the message - */ - public static void encodeTimestamp(byte[] array, int pointer, double timestamp) { - // Converts a double into a 64-bit fixed point - for(int i = 0; i < 8; i++) { - // 2^24, 2^16, 2^8, .. 2^-32 - double base = Math.pow(2, (3 - i) * 8); - // Capture byte value - array[pointer + i] = (byte)(timestamp / base); - // Subtract captured value from remaining total - timestamp = timestamp - (double)(unsignedByteToShort(array[pointer + i]) * base); - } - array[7] = (byte)(Math.random() * 255.0); - } - - /** - * Returns a timestamp (number of seconds since 00:00 1-Jan-1900) as a - * formatted date/time string. - */ - public static String timestampToString(double timestamp) { - if(timestamp == 0) return "0"; - double utc = timestamp - (2208988800.0); - long ms = (long)(utc * 1000.0); - String date = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss").format(new Date(ms)); - double fraction = timestamp - ((long)timestamp); - String fractionSting = new DecimalFormat(".000000").format(fraction); - return date + fractionSting; - } - - /** - * Returns a string representation of a reference identifier according - * to the rules set out in RFC 2030. - */ - public static String referenceIdentifierToString(byte[] ref, short stratum, byte version) { - if(stratum == 0 || stratum == 1) - { - return new String(ref); - } - else if(version == 3) - { - return unsignedByteToShort(ref[0]) + "." + - unsignedByteToShort(ref[1]) + "." + - unsignedByteToShort(ref[2]) + "." + - unsignedByteToShort(ref[3]); - } - // In NTP Version 4 secondary servers, this is the low order 32 bits - // of the latest transmit timestamp of the reference source. - else if(version == 4) - { - return "" + ((unsignedByteToShort(ref[0]) / 256.0) + - (unsignedByteToShort(ref[1]) / 65536.0) + - (unsignedByteToShort(ref[2]) / 16777216.0) + - (unsignedByteToShort(ref[3]) / 4294967296.0)); - } - return ""; - } -} diff --git a/subset/network/README.md b/subset/network/README.md new file mode 100644 index 0000000000..677a519931 --- /dev/null +++ b/subset/network/README.md @@ -0,0 +1,76 @@ +# Network Tests + +## General Network Tests + +### connection.min_send +- Located in network_tests.py, started up in test_network. +- Check if a device sends any data packet at a frequency of less than five minutes. + +#### Result cases: +- PASS: The time between packets is measured - pass if time between any two packets is less than five minutes (deals with case where a monitor scan is long) +- FAIL: If data packets are sent, and there are packets with time interval of less than five minutes found, then fail. +- SKIP: If no data packets are sent and the monitor scan period is short, the test will skip instead of failing. + +### communication.type.broadcast +- Located in network_tests.py, started up in test_network. +- This test counts the number of unicast, broadcast and multicast packets sent out by reading from the .pcap file that DAQ has created during runtime. + +#### Result cases: +This is an 'info' test, it does not have a pass/fail/skip case. + + +## NTP Tests +The NTP tests inspect the client NTP version and the device's ability to update its clock precisely. + +### Note for test developers +The functional test code is included in the `ntp_tests.py` file. + +The test reads packets from startup.pcap and monitor.pcap. + +If the python code needs debugging, the pip module `scapy` is required (`pip install scapy`). + +### NTP Test conditions +| Test ID | Info | Pass | Fail | Skip | +|---|---|---|---|---| +| connection.network.ntp_support | Are the received NTP packets using NTP v4? | NTP version is 4 | NTP version is not 4 | No NTP packets are received | +| connection.network.ntp_update | Does the device demonstrate updating its clock using NTP? | Device clock is synchronized | Device clock is not synchronized | Not enough NTP packets are received | + +#### NTP Support #### +The version of NTP used by the client is extracted from the fist client (outbound) NTP packets discovered in startup.pcap. + +#### NTP Update #### +The following criteria are used to determine whether a DUT has synced its clock with the NTP server provided by DAQ: + - A minimum of 2 NTP packets are present in startup.pcap and monitor.pcap (one potential poll). + - A minimum of 2 NTP packets have been exchanged between the DUT and the DAQ-provided NTP server. + - A valid NTP poll is present. Consisting of a client-server exchange. + - The calculated offset is less than 0.128 seconds and the final poll does not have a leap indicator of 3 (unsynchronized). + +When calculating the offset, the latest valid poll is inspected. A value of 0.128s is the maximum offset used to determine whether a device is considered in-sync with the NTP server because NTPv4 is capable of accuracy of tens of milliseconds. + + +## MAC OUI +The MAC OUI test looks up the manufacturer information for the mac address of the device under test. + +### Note for test developers +The functional test code is included in the `mac_oui/src/main/java` folder. + +The `macList.txt` file containing the MAC OUI database is downloaded at build time by the container specified in +the `Dockerfile.test_macoui` file. + +If java code requires debugging in an IDE, then it will require the `macList.txt` to be placed under the +`mac_oui/src/main/resources/` folder. Use the curl command from the `Dockerfile.test_macoui` file to download and +place the file locally into your project. This `.txt` file is git ignored to avoid being included as a +static resource on the source code repo. + +### Conditions for mac_oui + - pass -> if the MAC OUI matches the mac prefix IEEE registration. + - fail -> if the MAC OUI does not match with any of the mac prefixes. + + +## DNS Tests +Check Device uses the DNS server from DHCP and resolves hostnames + +### Conditions for connection.dns.hostname_connect + - pass -> if the device uses the DNS server from DHCP, and resolves a hostname + - fail -> device uses a DNS serveer other than the server fron DHCP + - skip -> device did not send any DNS requests \ No newline at end of file diff --git a/subset/network/debug_generate_capture.py b/subset/network/debug_generate_capture.py deleted file mode 100644 index 410837145f..0000000000 --- a/subset/network/debug_generate_capture.py +++ /dev/null @@ -1,20 +0,0 @@ -import subprocess -import time -import sys - -arguments = sys.argv - -capture_time = int(arguments[1]) -eth_interface = arguments[2] - -cap_pcap_file = 'capture.pcap' - -tcpdump_capture_unlimited_byte_packets = 'tcpdump -i {e} -s0 -w {c}'.format(e=eth_interface, c=cap_pcap_file) - -def shell_command_without_result(command, wait_time, terminate_flag): - process = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - time.sleep(wait_time) - if terminate_flag: - process.terminate() - -shell_command_without_result(tcpdump_capture_unlimited_byte_packets, capture_time, True) diff --git a/subset/network/dns_tests.py b/subset/network/dns_tests.py new file mode 100644 index 0000000000..f8ebfde258 --- /dev/null +++ b/subset/network/dns_tests.py @@ -0,0 +1,183 @@ +""" + This script can be called to run DNS related test. + +""" +from __future__ import absolute_import +import subprocess +import sys + +import re +import datetime + +arguments = sys.argv + +test_request = str(arguments[1]) +cap_pcap_file = str(arguments[2]) +device_address = str(arguments[3]) + +report_filename = 'dns_tests.txt' +min_packet_length_bytes = 20 +max_packets_in_report = 10 +port_list = [] +ignore = '%%' +summary_text = '' +result = 'fail' +dash_break_line = '--------------------\n' + +DESCRIPTION_HOSTNAME_CONNECT = 'Check device uses the DNS server from DHCP and resolves hostnames' + +TCPDUMP_DATE_FORMAT = "%Y-%m-%d %H:%M:%S.%f" + +IP_REGEX = r'(([0-9]{1,3}\.){3}[0-9]{1,3})' +RDATA_REGEX = r'' + +DNS_SERVER_HOST = '.2' + + +def write_report(string_to_append): + print(string_to_append.strip()) + with open(report_filename, 'a+') as file_open: + file_open.write(string_to_append) + + +def exec_tcpdump(tcpdump_filter, capture_file=None): + """ + Args + tcpdump_filter: Filter to pass onto tcpdump file + capture_file: Optional capture file to look + + Returns + List of packets matching the filter + """ + + capture_file = cap_pcap_file if capture_file is None else capture_file + command = 'tcpdump -tttt -n -r {} {}'.format(capture_file, tcpdump_filter) + + process = subprocess.Popen(command, + universal_newlines=True, + shell=True, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + text = str(process.stdout.read()).rstrip() + + if text: + return text.split("\n") + + return [] + + +def add_summary(text): + global summary_text + summary_text = summary_text + " " + text if summary_text else text + + +def get_dns_server_from_ip(ip_address): + """ + Returns the IP address of the DNS server provided by DAQ + + Args + ip_address: IP address of the device under test + + Returns + IP address of DNS server + """ + + return re.sub(r'\.\d+$', DNS_SERVER_HOST, ip_address) + + +def check_communication_for_response(response_line): + """ + Given a line from the TCPdump output for DNS responses + Look through the packet capture to see if any communitication to the + IP addresses from the DNS + + Args + tcpdump_line: Line from tcpdump filtered to DNS resposnes + + Returns + True/False if the device has communicated with an IP from the + DNS response after it has recieved it + """ + + response_time = datetime.datetime.strptime(response_line[:26], TCPDUMP_DATE_FORMAT) + + # Use regex to extract all IP addresses in the response + matches = re.findall(IP_REGEX, response_line) + + # The first two IP addresses are the source/destination + ip_addresses = matches[2:] + + for address in ip_addresses: + packets = exec_tcpdump('dst host {}'.format(address[0])) + for packet in packets: + packet_time = datetime.datetime.strptime(packet[:26], TCPDUMP_DATE_FORMAT) + if packet_time > response_time: + return True + + return False + + +def test_dns(target_ip): + """ Runs the connection.dns.hostname_connect test + + Checks that: + i) the device sends DNS requests + ii) the device uses the DNS server from DHCP + iii) the device uses an IP address recieved from the DNS server + + Args + target_ip: IP address of the device + """ + + # Get server IP of the DHCP server + dhcp_dns_ip = get_dns_server_from_ip(target_ip) + + # Check if the device has sent any DNS requests + filter_to_dns = 'dst port 53 and src host {}'.format(target_ip) + to_dns = exec_tcpdump(filter_to_dns) + num_query_dns = len(to_dns) + + if num_query_dns == 0: + add_summary('Device did not send any DNS requests') + return 'skip' + + # Check if the device only sent DNS requests to the DHCP Server + filter_to_dhcp_dns = 'dst port 53 and src host {} and dst host {}' \ + .format(target_ip, dhcp_dns_ip) + + to_dhcp_dns = exec_tcpdump(filter_to_dhcp_dns) + num_query_dhcp_dns = len(to_dhcp_dns) + + if num_query_dns > num_query_dhcp_dns: + add_summary('Device sent DNS requests to servers other than the DHCP provided server') + return 'fail' + + # Retrieve responses from DNS + filter_dns_response = 'src port 53 and src host {}'.format(dhcp_dns_ip) + dns_responses = exec_tcpdump(filter_dns_response) + + num_dns_responses = len(dns_responses) + + if num_dns_responses == 0: + add_summary('No results recieved from DNS server') + return 'fail' + + # Check that the device has sent data packets to any of the IP addresses it has recieved + # it has recieved from the DNS requests + + for response in dns_responses: + if check_communication_for_response(response): + add_summary('Device sends DNS requests and resolves host names') + return 'pass' + + add_summary('Device did not send data to IP addresses retrieved from the DNS server') + return 'fail' + + +write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request)) + +if test_request == 'connection.dns.hostname_connect': + write_report("{d}\n{b}".format(b=dash_break_line, d=DESCRIPTION_HOSTNAME_CONNECT)) + result = test_dns(device_address) + +write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip())) diff --git a/subset/connection/mac_oui/.classpath b/subset/network/mac_oui/.classpath similarity index 100% rename from subset/connection/mac_oui/.classpath rename to subset/network/mac_oui/.classpath diff --git a/subset/connection/mac_oui/.gitignore b/subset/network/mac_oui/.gitignore similarity index 100% rename from subset/connection/mac_oui/.gitignore rename to subset/network/mac_oui/.gitignore diff --git a/subset/connection/mac_oui/.settings/org.eclipse.buildship.core.prefs b/subset/network/mac_oui/.settings/org.eclipse.buildship.core.prefs similarity index 100% rename from subset/connection/mac_oui/.settings/org.eclipse.buildship.core.prefs rename to subset/network/mac_oui/.settings/org.eclipse.buildship.core.prefs diff --git a/subset/connection/mac_oui/build.gradle b/subset/network/mac_oui/build.gradle similarity index 90% rename from subset/connection/mac_oui/build.gradle rename to subset/network/mac_oui/build.gradle index 476315bb29..0f0dc95fe6 100644 --- a/subset/connection/mac_oui/build.gradle +++ b/subset/network/mac_oui/build.gradle @@ -3,7 +3,7 @@ buildscript { jcenter() } dependencies { - classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0" + classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0" } } diff --git a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.jar b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.jar similarity index 100% rename from subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.jar rename to subset/network/mac_oui/gradle/wrapper/gradle-wrapper.jar diff --git a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties similarity index 92% rename from subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties rename to subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties index 622ab64a3c..8d8e8abe86 100644 --- a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties +++ b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip diff --git a/subset/security/security_passwords/gradlew b/subset/network/mac_oui/gradlew similarity index 100% rename from subset/security/security_passwords/gradlew rename to subset/network/mac_oui/gradlew diff --git a/subset/connection/mac_oui/gradlew.bat b/subset/network/mac_oui/gradlew.bat similarity index 100% rename from subset/connection/mac_oui/gradlew.bat rename to subset/network/mac_oui/gradlew.bat diff --git a/subset/connection/mac_oui/mac_oui.iml b/subset/network/mac_oui/mac_oui.iml similarity index 100% rename from subset/connection/mac_oui/mac_oui.iml rename to subset/network/mac_oui/mac_oui.iml diff --git a/subset/connection/mac_oui/settings.gradle b/subset/network/mac_oui/settings.gradle similarity index 100% rename from subset/connection/mac_oui/settings.gradle rename to subset/network/mac_oui/settings.gradle diff --git a/subset/connection/mac_oui/src/main/java/MacLookup.java b/subset/network/mac_oui/src/main/java/MacLookup.java similarity index 100% rename from subset/connection/mac_oui/src/main/java/MacLookup.java rename to subset/network/mac_oui/src/main/java/MacLookup.java diff --git a/subset/connection/mac_oui/src/main/java/Main.java b/subset/network/mac_oui/src/main/java/Main.java similarity index 100% rename from subset/connection/mac_oui/src/main/java/Main.java rename to subset/network/mac_oui/src/main/java/Main.java diff --git a/subset/connection/mac_oui/src/main/java/ReportHandler.java b/subset/network/mac_oui/src/main/java/ReportHandler.java similarity index 91% rename from subset/connection/mac_oui/src/main/java/ReportHandler.java rename to subset/network/mac_oui/src/main/java/ReportHandler.java index 3f85070b5c..6b691cbfd4 100644 --- a/subset/connection/mac_oui/src/main/java/ReportHandler.java +++ b/subset/network/mac_oui/src/main/java/ReportHandler.java @@ -5,7 +5,7 @@ public class ReportHandler { String report = "Mac OUI Test\n"; - File reportFile = new File("report/report.txt"); + File reportFile = new File("/report/macoui.txt"); public void addText(String text) { report += text + '\n'; diff --git a/subset/connection/mac_oui/src/main/java/RetrieveList.java b/subset/network/mac_oui/src/main/java/RetrieveList.java similarity index 100% rename from subset/connection/mac_oui/src/main/java/RetrieveList.java rename to subset/network/mac_oui/src/main/java/RetrieveList.java diff --git a/subset/network/network_tests.py b/subset/network/network_tests.py index 6dbaca3f85..8fafee8856 100644 --- a/subset/network/network_tests.py +++ b/subset/network/network_tests.py @@ -1,16 +1,26 @@ +""" + This script can be called to run a specific network module test. + Currently supports: + - connection.min_send + - connection.dhcp_long + - protocol.app_min_send + - communication.type.broadcast + - network.ntp.support + Usage: python network_tests.py + E.g. python network_tests.py connection.min_send $MONITOR $TARGET_IP +""" import subprocess, time, sys, json +import re +import datetime + arguments = sys.argv test_request = str(arguments[1]) cap_pcap_file = str(arguments[2]) device_address = str(arguments[3]) -if test_request == 'protocol.app_min_send': - module_config = str(arguments[4]) - infastructure_excludes = str(arguments[5]) - -report_filename = 'report.txt' +report_filename = 'network_tests.txt' min_packet_length_bytes = 20 max_packets_in_report = 10 port_list = [] @@ -18,24 +28,28 @@ summary_text = '' result = 'fail' dash_break_line = '--------------------\n' + description_min_send = 'Device sends data at a frequency of less than 5 minutes.' -description_dhcp_long = 'Device sends ARP request on DHCP lease expiry.' -description_app_min_send = 'Device sends application packets at a frequency of less than 5 minutes.' description_communication_type = 'Device sends unicast or broadcast packets.' -description_ntp_support = 'Device sends NTP request packets.' -tcpdump_display_all_packets = 'tcpdump -n src host ' + device_address + ' -r ' + cap_pcap_file +tcpdump_display_all_packets = 'tcpdump -tttt -n src host ' + device_address + ' -r ' + cap_pcap_file tcpdump_display_udp_bacnet_packets = 'tcpdump -n udp dst portrange 47808-47809 -r ' + cap_pcap_file -tcpdump_display_arp_packets = 'tcpdump arp -r ' + cap_pcap_file -tcpdump_display_ntp_packets = 'tcpdump dst port 123 -r ' + cap_pcap_file -tcpdump_display_eapol_packets = 'tcpdump port 1812 or port 1813 or port 3799 -r ' + cap_pcap_file +tcpdump_display_arp_packets = 'tcpdump arp -n src host ' + device_address + ' -r ' + cap_pcap_file + tcpdump_display_broadcast_packets = 'tcpdump broadcast and src host ' + device_address + ' -r ' + cap_pcap_file +tcpdump_display_multicast_packets = 'tcpdump -n \'ip[16] & 240 = 224\' -r ' + cap_pcap_file + +system_conf_file = "/config/inst/system.conf" +tcpdump_date_format = "%Y-%m-%d %H:%M:%S.%f" +min_send_seconds = 300 +min_send_duration = "5 minutes" def write_report(string_to_append): print(string_to_append.strip()) with open(report_filename, 'a+') as file_open: file_open.write(string_to_append) + def shell_command_with_result(command, wait_time, terminate_flag): process = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) text = process.stdout.read() @@ -45,16 +59,19 @@ def shell_command_with_result(command, wait_time, terminate_flag): process.terminate() return str(text) + def add_packet_count_to_report(packet_type, packet_count): - write_report("{i} {t} Packets recieved={p}\n".format(i=ignore, t=packet_type, p=packet_count)) + write_report("{i} {t} packets received={p}\n".format(i=ignore, t=packet_type, p=packet_count)) + def add_packet_info_to_report(packets_received): - packet_list = packets_received.rstrip().split("\n") + packet_list = packets_received.strip().split("\n") outnum = min(len(packet_list), max_packets_in_report) for x in range(0, outnum): write_report("{i} {p}\n".format(i=ignore, p=packet_list[x])) write_report("{i} packets_count={p}\n".format(i=ignore, p=len(packet_list))) + def decode_shell_result(shell_result): if len(shell_result) > min_packet_length_bytes: packet_request_list = shell_result.rstrip().split("\n") @@ -62,135 +79,143 @@ def decode_shell_result(shell_result): return packets_received return 0 + def packets_received_count(shell_result): if shell_result is None: return 0 else: return decode_shell_result(shell_result) -def load_json_config(json_filename): - with open(json_filename, 'r') as json_file: - return json.load(json_file) - -def add_to_port_list(port_map): - global port_list - for port, port_info in port_map.items(): - for key, value in port_info.items(): - if key == 'allowed': - if value == True: - port_list.append(port) - -def remove_from_port_list(port_map): - global port_list - for exclude in port_map: - for port in port_list: - if port == exclude: - port_list.remove(exclude) - -def decode_json_config(config_file, map_name, action): - dictionary = load_json_config(config_file) - for key, value in dictionary.items(): - if key == map_name: - for protocol, info in value.items(): - if protocol == 'udp' or protocol == 'tcp': - for ports, port_map in info.items(): - if action == 'add': - add_to_port_list(port_map) - elif action == 'remove': - remove_from_port_list(port_map) + +def get_scan_length(config_file): + """ Gets length of the monitor.pcap scan + + Reads the system.conf file to and returns the length of the monitor_scan + + Args: + config_file: Location of system.conf file within test container + + Returns: + Length of monitor scan in seconds + + If not defined, or system.conf could not be found + returns false + """ + + scan_length = False + try: + with open(config_file) as file: + for line in file: + match = re.search(r'^monitor_scan_sec=(\d+)', line) + if match: + matched_length = int(match.group(1)) + # If scan length = 0 or not found, then monitor scan does not exist + scan_length = matched_length if matched_length > 0 else False + return scan_length + except Exception as e: + write_report("Error encountered reading system.conf {}".format(e)) + return False + + +def add_summary(text): + global summary_text + summary_text = summary_text + " " + text if summary_text else text + def test_connection_min_send(): + """ Runs the connection.min_send test + + Tests if the device sends data packets of any type (inc data, NTP, etc) + within a period of 5 minutes by looking through the monitor.pcap file + + The length of test can be configured using the min_send_seconds variable + at the start of the file + """ + + # Get scan length + scan_length = get_scan_length(system_conf_file) + min_send_delta = datetime.timedelta(seconds=min_send_seconds) + min_send_pass = False + + # The test scans the monitor.pcap, so if it's not found skip + if not scan_length: + add_summary("DAQ monitor scan not running, test skipped") + return 'skip' + arp_shell_result = shell_command_with_result(tcpdump_display_arp_packets, 0, False) arp_packets_received = packets_received_count(arp_shell_result) if arp_packets_received > 0: add_summary("ARP packets received.") + shell_result = shell_command_with_result(tcpdump_display_all_packets, 0, False) - all_packets_received = packets_received_count(shell_result) - app_packets_received = all_packets_received - arp_packets_received - if app_packets_received > 0: - add_summary("Other packets received.") - print('min_send_packets', arp_packets_received, all_packets_received) + all_packets = shell_result.splitlines() + + # Loop through tcpdump result and measure the time between succesive packets + for i, packet in enumerate(all_packets): + # datetime is the first 26 characters of the line + packet_time = datetime.datetime.strptime(packet[:26], tcpdump_date_format) + + if i == 0: + previous_packet_time = packet_time + continue + + delta = packet_time - previous_packet_time + if delta < min_send_delta: + min_send_pass = True + break + + previous_packet_time = packet_time + add_packet_info_to_report(shell_result) - return 'pass' if app_packets_received > 0 else 'fail' -def test_connection_dhcp_long(): - shell_result = shell_command_with_result(tcpdump_display_arp_packets, 0, False) - arp_packets_received = packets_received_count(shell_result) - if arp_packets_received > 0: - add_summary("ARP packets received.") - add_packet_info_to_report(shell_result) - return 'pass' - else: - return 'fail' + if not min_send_pass: + if scan_length > min_send_seconds: + add_summary('Data packets were not sent at a frequency less than ' + + min_send_duration) + return 'fail' + else: + add_summary('Please set DAQ monitor scan to be greater than ' + + min_send_duration) + return 'skip' + + add_summary('Data packets were sent at a frequency of less than ' + + min_send_duration) + return 'pass' -def test_protocol_app_min_send(): - """ - reads module_config json file and adds ports to port_list - read infastructure_excludes json file and removes ports from port_list (temporarily commented) - """ - decode_json_config(module_config, 'servers', 'add') - print('port_list:') - app_packets_received = 0 - for port in port_list: - try: - tcpdump_command = 'tcpdump port {p} -r {c}'.format(p=port, c=cap_pcap_file) - shell_result = shell_command_with_result(tcpdump_command, 2, False) - for_port = packets_received_count(shell_result) - app_packets_received += for_port - print('app_packets_received', port, for_port) - add_packet_info_to_report(shell_result) - except Exception as e: - print(e) - print('app_packets_received', app_packets_received) - if app_packets_received > 0: - add_summary("Application packets received.") - return 'pass' - else: - return 'fail' def test_communication_type_broadcast(): - shell_result = shell_command_with_result(tcpdump_display_broadcast_packets, 0, False) - broadcast_packets_received = packets_received_count(shell_result) - if broadcast_packets_received > 0: + """ Runs the communication.type.broadcast DAQ test. + Counts the number of unicast, broadcast and multicast packets sent. + """ + + broadcast_result = shell_command_with_result(tcpdump_display_broadcast_packets, 0, False) + broadcast_packets = packets_received_count(broadcast_result) + if broadcast_packets > 0: add_summary("Broadcast packets received.") - add_packet_count_to_report("Broadcast", broadcast_packets_received) - shell_result = shell_command_with_result(tcpdump_display_all_packets, 0, False) - all_packets_received = packets_received_count(shell_result) - if (all_packets_received - broadcast_packets_received) > 0: + add_packet_count_to_report("Broadcast", broadcast_packets) + + multicast_result = shell_command_with_result(tcpdump_display_multicast_packets, 0, False) + multicast_packets = packets_received_count(multicast_result) + if multicast_packets > 0: + add_summary("Multicast packets received.") + add_packet_count_to_report("Multicast", multicast_packets) + + unicast_result = shell_command_with_result(tcpdump_display_all_packets, 0, False) + unicast_packets = packets_received_count(unicast_result) - broadcast_packets - multicast_packets + if unicast_packets > 0: add_summary("Unicast packets received.") - add_packet_count_to_report("Unicast", all_packets_received - broadcast_packets_received) - return 'info' + add_packet_count_to_report("Unicast", unicast_packets) -def test_ntp_support(): - shell_result = shell_command_with_result(tcpdump_display_ntp_packets, 0, False) - ntp_packets_received = packets_received_count(shell_result) - if ntp_packets_received > 0: - add_summary("NTP packets received.") - add_packet_info_to_report(shell_result) - return 'pass' - else: - return 'fail' + return 'info' -def add_summary(text): - global summary_text - summary_text = summary_text + " " + text if summary_text else text write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request)) if test_request == 'connection.min_send': write_report("{d}\n{b}".format(b=dash_break_line, d=description_min_send)) result = test_connection_min_send() -elif test_request == 'connection.dhcp_long': - write_report("{d}\n{b}".format(b=dash_break_line, d=description_dhcp_long)) - result = test_connection_dhcp_long() -elif test_request == 'protocol.app_min_send': - write_report("{d}\n{b}".format(b=dash_break_line, d=description_app_min_send)) - result = test_protocol_app_min_send() elif test_request == 'communication.type.broadcast': write_report("{d}\n{b}".format(b=dash_break_line, d=description_communication_type)) result = test_communication_type_broadcast() -elif test_request == 'network.ntp.support': - write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_support)) - result = test_ntp_support() write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip())) diff --git a/subset/network/ntp_tests.py b/subset/network/ntp_tests.py new file mode 100644 index 0000000000..6d0eb6b13b --- /dev/null +++ b/subset/network/ntp_tests.py @@ -0,0 +1,170 @@ +from __future__ import absolute_import, division +from scapy.all import NTP, rdpcap +import sys +import os + +arguments = sys.argv + +test_request = str(arguments[1]) +startup_pcap_file = str(arguments[2]) +monitor_pcap_file = str(arguments[3]) + +report_filename = 'ntp_tests.txt' +ignore = '%%' +summary_text = '' +result = 'fail' +dash_break_line = '--------------------\n' +description_ntp_support = 'Device supports NTP version 4.' +description_ntp_update = 'Device synchronizes its time to the NTP server.' + +NTP_VERSION_PASS = 4 +LOCAL_PREFIX = '10.20.' +NTP_SERVER_SUFFIX = '.2' +MODE_CLIENT = 3 +MODE_SERVER = 4 +YEAR_2500 = 16725225600 +SECONDS_BETWEEN_1900_1970 = 2208988800 +OFFSET_ALLOWANCE = 0.128 +LEAP_ALARM = 3 + + +def write_report(string_to_append): + with open(report_filename, 'a+') as file_open: + file_open.write(string_to_append) + + +# Extracts the NTP version from the first client NTP packet +def ntp_client_version(capture): + client_packets = ntp_packets(capture, MODE_CLIENT) + if len(client_packets) == 0: + return None + return ntp_payload(client_packets[0]).version + + +# Filters the packets by type (NTP) +def ntp_packets(capture, mode=None): + packets = [] + for packet in capture: + if NTP in packet: + ip = packet.payload + udp = ip.payload + ntp = udp.payload + if mode is None or mode == ntp.mode: + packets.append(packet) + return packets + + +# Extracts the NTP payload from a packet of type NTP +def ntp_payload(packet): + ip = packet.payload + udp = ip.payload + ntp = udp.payload + return ntp + + +def test_ntp_support(): + capture = rdpcap(startup_pcap_file) + if len(capture) > 0: + version = ntp_client_version(capture) + if version is None: + add_summary("No NTP packets received.") + return 'skip' + if version == NTP_VERSION_PASS: + add_summary("Using NTPv" + str(NTP_VERSION_PASS) + ".") + return 'pass' + else: + add_summary("Not using NTPv" + str(NTP_VERSION_PASS) + ".") + return 'fail' + else: + add_summary("No NTP packets received.") + return 'skip' + + +def test_ntp_update(): + startup_capture = rdpcap(startup_pcap_file) + packets = ntp_packets(startup_capture) + if os.path.isfile(monitor_pcap_file): + monitor_capture = rdpcap(monitor_pcap_file) + packets += ntp_packets(monitor_capture) + if len(packets) < 2: + add_summary("Not enough NTP packets received.") + return 'skip' + # Check that DAQ NTP server has been used + using_local_server = False + local_ntp_packets = [] + for packet in packets: + # Packet is to or from local NTP server + if ((packet.payload.dst.startswith(LOCAL_PREFIX) and + packet.payload.dst.endswith(NTP_SERVER_SUFFIX)) or + (packet.payload.src.startswith(LOCAL_PREFIX) and + packet.payload.src.endswith(NTP_SERVER_SUFFIX))): + using_local_server = True + local_ntp_packets.append(packet) + if not using_local_server or len(local_ntp_packets) < 2: + add_summary("Device clock not synchronized with local NTP server.") + return 'fail' + # Obtain the latest NTP poll + p1 = p2 = p3 = p4 = None + for i in range(len(local_ntp_packets)): + if p1 is None: + if ntp_payload(local_ntp_packets[i]).mode == MODE_CLIENT: + p1 = local_ntp_packets[i] + elif p2 is None: + if ntp_payload(local_ntp_packets[i]).mode == MODE_SERVER: + p2 = local_ntp_packets[i] + else: + p1 = local_ntp_packets[i] + elif p3 is None: + if ntp_payload(local_ntp_packets[i]).mode == MODE_CLIENT: + p3 = local_ntp_packets[i] + elif p4 is None: + if ntp_payload(local_ntp_packets[i]).mode == MODE_SERVER: + p4 = local_ntp_packets[i] + p1 = p3 + p2 = p4 + p3 = p4 = None + else: + p3 = local_ntp_packets[i] + if p1 is None or p2 is None: + add_summary("Device clock not synchronized with local NTP server.") + return 'fail' + t1 = ntp_payload(p1).sent + t2 = ntp_payload(p1).time + t3 = ntp_payload(p2).sent + t4 = ntp_payload(p2).time + + # Timestamps are inconsistenly either from 1900 or 1970 + if t1 > YEAR_2500: + t1 = t1 - SECONDS_BETWEEN_1900_1970 + if t2 > YEAR_2500: + t2 = t2 - SECONDS_BETWEEN_1900_1970 + if t3 > YEAR_2500: + t3 = t3 - SECONDS_BETWEEN_1900_1970 + if t4 > YEAR_2500: + t4 = t4 - SECONDS_BETWEEN_1900_1970 + + offset = abs((t2 - t1) + (t3 - t4))/2 + if offset < OFFSET_ALLOWANCE and not ntp_payload(p1).leap == LEAP_ALARM: + add_summary("Device clock synchronized.") + return 'pass' + else: + add_summary("Device clock not synchronized with local NTP server.") + return 'fail' + + +def add_summary(text): + global summary_text + summary_text = summary_text + " " + text if summary_text else text + + +write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request)) + + +if test_request == 'connection.network.ntp_support': + write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_support)) + result = test_ntp_support() +elif test_request == 'connection.network.ntp_update': + write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_update)) + result = test_ntp_update() + +write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip())) diff --git a/subset/connection/test_macoui b/subset/network/run_macoui_test similarity index 89% rename from subset/connection/test_macoui rename to subset/network/run_macoui_test index 8abab5c5ce..fba57c38c6 100755 --- a/subset/connection/test_macoui +++ b/subset/network/run_macoui_test @@ -1,8 +1,10 @@ #!/bin/bash -e source reporting.sh -REPORT=/tmp/report.txt -LOCAL_REPORT=report/report.txt +TARGET_MAC=$1 +REPORT=$2 + +LOCAL_REPORT=/report/macoui.txt CONFIG=/config/device/module_config.json LOG=/tmp/nmap.log RESULT_LINES=/tmp/result_lines.txt @@ -18,8 +20,6 @@ java -jar mac_oui/build/libs/mac_oui-all.jar $TARGET_MAC RESULT_AND_SUMMARY="$(grep "RESULT" $LOCAL_REPORT)" grep -v "RESULT" $LOCAL_REPORT >> $REDACTED_LOG -# For testing module timeout. -sleep 10 TEST_RESULT=$(cat $REDACTED_LOG) diff --git a/subset/network/test_network b/subset/network/test_network index 9e25ef0add..15b642f456 100755 --- a/subset/network/test_network +++ b/subset/network/test_network @@ -2,14 +2,25 @@ REPORT=/tmp/report.txt +STARTUP=/scans/startup.pcap MONITOR=/scans/monitor.pcap -MODULE_CONFIG=/config/device/module_config.json -EXCLUDES=infastructure_excludes.json -python network_tests.py connection.dhcp_long $MONITOR $TARGET_IP +# General Network Tests python network_tests.py connection.min_send $MONITOR $TARGET_IP python network_tests.py communication.type.broadcast $MONITOR $TARGET_IP -python network_tests.py protocol.app_min_send $MONITOR $TARGET_IP $MODULE_CONFIG $EXCLUDES -python network_tests.py network.ntp.support $MONITOR $TARGET_IP -cat report.txt >> $REPORT +cat network_tests.txt >> $REPORT + +# NTP Tests +python ntp_tests.py connection.network.ntp_support $STARTUP $MONITOR +python ntp_tests.py connection.network.ntp_update $STARTUP $MONITOR + +cat ntp_tests.txt >> $REPORT + +# MACOUI Test +./run_macoui_test $TARGET_MAC $REPORT + +# DNS Tests +python dns_tests.py connection.dns.hostname_connect $MONITOR $TARGET_IP + +cat dns_tests.txt >> $REPORT diff --git a/subset/pentests/readme.md b/subset/pentests/readme.md index 83b4c8ae32..845555fe11 100644 --- a/subset/pentests/readme.md +++ b/subset/pentests/readme.md @@ -33,12 +33,20 @@ Tests included in this module: ## test_nmap The nmap module uses the nmap tool to check open ports and validates them in relation to the policy that is set in the module_config.json file or files. +It also checks that there isn't any HTTP server running on any open port Tests included in this module: - security.nmap.ports +- security.nmap.http ### Conditions for security.nmap.ports - pass -> all the ports configured in module_config.json agree with the allow/deny policy - fail -> one or more of the ports configured in module_config.json do not agree with the allow/deny policy + +### Conditions for security.nmap.http + +- pass -> there is no HTTP server running on any of the open ports +- fail -> one or more of the ports is running an HTTP server + diff --git a/subset/pentests/test_nmap b/subset/pentests/test_nmap index 5fec9ac7ac..06159d0b21 100755 --- a/subset/pentests/test_nmap +++ b/subset/pentests/test_nmap @@ -2,18 +2,31 @@ source reporting.sh CONFIG=/config/device/module_config.json - REPORT=/tmp/report.txt -LOG=/tmp/nmap.log -OPENPORTSLIST_LOG=/tmp/nmap.ports.log -REDACTED_LOG=/tmp/nmap.report.log -TEST_NAME="security.ports.nmap" -TEST_DESCRIPTION="Automatic TCP/UDP port scan using nmap" -SUMMARY="" +# security.nmap.ports test variables +REPORT_NMAP=/tmp/report_nmap.txt +LOG_NMAP=/tmp/nmap.log +OPENPORTSLIST_LOG_NMAP=/tmp/nmap.ports.log +REDACTED_LOG_NMAP=/tmp/nmap.report.log +TEST_NAME_NMAP="security.nmap.ports" +TEST_DESCRIPTION_NMAP="Automatic TCP/UDP port scan using nmap" +SUMMARY_NMAP="" + +# security.nmap.http test variables +REPORT_HTTP=/tmp/report_http.txt +LOG_HTTP=/tmp/http.log +OPENPORTSLIST_LOG_HTTP=/tmp/http.ports.log +REDACTED_LOG_HTTP=/tmp/http.report.log +TEST_NAME_HTTP="security.nmap.http" +TEST_DESCRIPTION_HTTP="Check that the device does not have open ports exposing an unencrypted web interface using HTTP" +SUMMARY_HTTP="" -rm -f $LOG $REDACTED_LOG $OPENPORTSLIST_LOG $REPORT +# remove temporary files +rm -f $LOG_NMAP $REDACTED_LOG_NMAP $OPENPORTSLIST_LOG_NMAP $REPORT_NMAP +rm -f $LOG_HTTP $REDACTED_LOG_HTTP $OPENPORTSLIST_LOG_HTTP $REPORT_HTTP +# configure and run the security.nmap.ports test if [ -f $CONFIG ]; then echo Extracting servers config from $CONFIG else @@ -31,7 +44,7 @@ nc -nzv $TARGET_IP -w 5 23 || true sleep 1 option="-sT" -portslist=-p1-1024 +portslist=-p1-65535 if [ -f $CONFIG ]; then # get list of ports to be scanned from module_config.json ports="U:" @@ -50,21 +63,20 @@ if [ -f $CONFIG ]; then if [ $portslist != "U:T:" ]; then portslist="-p$portslist" else - portslist=-p1-1024 + portslist=-p1-65535 fi fi echo -e "\nTesting target $TARGET_IP to check open ports $portslist" -nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG $TARGET_IP > /dev/null -cat $LOG +nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG_NMAP $TARGET_IP > /dev/null +cat $LOG_NMAP -touch $REDACTED_LOG -cat $LOG | tee -a $REDACTED_LOG -touch $REPORT -touch $OPENPORTSLIST_LOG +touch $REDACTED_LOG_NMAP +cat $LOG_NMAP | tee -a $REDACTED_LOG_NMAP +touch $REPORT_NMAP +touch $OPENPORTSLIST_LOG_NMAP rm -f .fail -openportslist=" - " -grep -oh '[0-9]*/open[^[:space:]]*' $LOG | while IFS=/ read -ra parts; do +grep -oh '[0-9]*/open[^[:space:]]*' $LOG_NMAP | while IFS=/ read -ra parts; do state=${parts[1]} if [ "$state" == open ]; then if [ -f $CONFIG ]; then @@ -73,14 +85,14 @@ grep -oh '[0-9]*/open[^[:space:]]*' $LOG | while IFS=/ read -ra parts; do allowed=$(jq ".servers.$proto.ports.\"$port\".allowed" $CONFIG) if [ "$allowed" != true ]; then touch .fail - echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG - echo ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG + echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP + echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_NMAP else - echo Allowing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG + echo Allowing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP fi else - echo Open port ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG - echo ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG + echo Open port ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP + echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_NMAP touch .fail fi fi @@ -88,19 +100,67 @@ done if [ -f .fail ]; then echo Open ports: - cat $REDACTED_LOG + cat $REDACTED_LOG_NMAP + result=fail + SUMMARY_NMAP="Some disallowed ports are open: `cat $OPENPORTSLIST_LOG_NMAP | sed 's/,$//'`." +else + echo No invalid ports found. | tee -a $REDACTED_LOG_NMAP + result=pass + SUMMARY_NMAP="Only allowed ports found open." +fi + +RESULT_AND_SUMMARY_NMAP="RESULT $result $TEST_NAME_NMAP $SUMMARY_NMAP" + +# configure and run the security.nmap.http test +option="-A --script http-methods" # Full NSE scan with the http-methods script +portslist=-p- # scan all ports +echo -e "\nTesting target $TARGET_IP to check open ports exposing HTTP servers" +nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG_HTTP $TARGET_IP > /dev/null +cat $LOG_HTTP + +touch $REDACTED_LOG_HTTP +cat $LOG_HTTP | tee -a $REDACTED_LOG_HTTP +touch $REPORT_HTTP +touch $OPENPORTSLIST_LOG_HTTP +rm -f .fail +grep -oh '[0-9]*/open[^[:space:]]*' $LOG_HTTP | while IFS=/ read -ra parts; do + state=${parts[1]} + if [ "$state" == open ]; then + port=${parts[0]} + proto=${parts[4]} + echo $proto + if [ "$proto" == http ]; then + touch .fail + echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_HTTP + echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_HTTP + fi + fi +done + +if [ -f .fail ]; then + echo Open http ports: + cat $REDACTED_LOG_HTTP result=fail - SUMMARY="Some disallowed ports are open: `cat $OPENPORTSLIST_LOG | sed 's/,$//'`" + SUMMARY_HTTP="Some ports are running http servers: `cat $OPENPORTSLIST_LOG_HTTP | sed 's/,$//'`." else - echo No invalid ports found. | tee -a $REDACTED_LOG + echo No running http servers have been found. | tee -a $REDACTED_LOG_HTTP result=pass - SUMMARY="Only allowed ports found open." + SUMMARY_HTTP="No running http servers have been found." fi -RESULT_AND_SUMMARY="RESULT $result $TEST_NAME $SUMMARY" +RESULT_AND_SUMMARY_HTTP="RESULT $result $TEST_NAME_HTTP $SUMMARY_HTTP" + +# output test results to the report file +write_out_result $REPORT_NMAP \ + "$TEST_NAME_NMAP" \ + "$TEST_DESCRIPTION_NMAP" \ + "$(cat $REDACTED_LOG_NMAP)" \ + "$RESULT_AND_SUMMARY_NMAP" + +write_out_result $REPORT_HTTP \ + "$TEST_NAME_HTTP" \ + "$TEST_DESCRIPTION_HTTP" \ + "$(cat $REDACTED_LOG_HTTP)" \ + "$RESULT_AND_SUMMARY_HTTP" -write_out_result $REPORT \ - "$TEST_NAME" \ - "$TEST_DESCRIPTION" \ - "$(cat $REDACTED_LOG)" \ - "$RESULT_AND_SUMMARY" +cat $REPORT_NMAP $REPORT_HTTP > $REPORT \ No newline at end of file diff --git a/subset/security/Dockerfile.test_password b/subset/security/Dockerfile.test_password index cbe38728b0..fb4f7481f1 100644 --- a/subset/security/Dockerfile.test_password +++ b/subset/security/Dockerfile.test_password @@ -1,11 +1,9 @@ FROM daqf/aardvark:latest -RUN $AG update && $AG install openjdk-8-jre -RUN $AG update && $AG install openjdk-8-jdk git -RUN $AG update && $AG install curl -RUN $AG update && $AG install ncrack hydra nmap +# Get dependencies +RUN $AG update && $AG install curl ncrack medusa nmap git -COPY subset/security . -RUN cd security_passwords && ./gradlew shadowJar -RUN ls -l security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar +COPY subset/security/password . + +# Run the test CMD ["./test_password"] diff --git a/subset/security/Dockerfile.test_ssh b/subset/security/Dockerfile.test_ssh new file mode 100644 index 0000000000..aa701b5550 --- /dev/null +++ b/subset/security/Dockerfile.test_ssh @@ -0,0 +1,7 @@ +FROM daqf/aardvark:latest + +RUN $AG update && $AG install nmap + +COPY subset/security/test_ssh . + +CMD ./test_ssh diff --git a/subset/security/build.conf b/subset/security/build.conf index 763d155e46..26876f4343 100644 --- a/subset/security/build.conf +++ b/subset/security/build.conf @@ -1,3 +1,4 @@ build subset/security add tls add password +add ssh diff --git a/subset/security/password/create_brute_force_dictionaries b/subset/security/password/create_brute_force_dictionaries new file mode 100755 index 0000000000..14345dc17e --- /dev/null +++ b/subset/security/password/create_brute_force_dictionaries @@ -0,0 +1,148 @@ +#!/bin/bash + +# A script to retrieve, and collate several raw brute force dictionaries into a colon separated file with format :username:password, and also to two separate username and password lists. +# +# Make sure this script is run in the password directory. +# +# Usage ./create_brute_force_dictionaries + +RAW_DICTIONARY_DIR="resources/raw" +DEFAULT_DICTIONARY_DIR="resources/default" + +MANUFACTURER_DEFAULTS_DICTIONARY="$RAW_DICTIONARY_DIR/manufacturer.csv" +SSH_DICTIONARY="$RAW_DICTIONARY_DIR/ssh.txt" +TELNET_DICTIONARY="$RAW_DICTIONARY_DIR/telnet.txt" + +OUTPUT_DICTIONARY="$DEFAULT_DICTIONARY_DIR/dictionary.txt" +TMP_DICTIONARY="$DEFAULT_DICTIONARY_DIR/tmp_dictionary.txt" +USERNAMES_LIST="$DEFAULT_DICTIONARY_DIR/usernames.txt" +PASSWORDS_LIST="$DEFAULT_DICTIONARY_DIR/passwords.txt" + +# Retrieve the raw dictionary files from the gitlab source. +# $1 Manufacturer dictionary +# $2 SSH dictionary +# $3 Telnet dictionary +# $4 Raw directory +function retrieve_raw_dictionaries() { + mkdir -p $4 + + curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/default-passwords.csv" \ + --create-dirs --output $1 + curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/ssh-betterdefaultpasslist.txt" \ + --create-dirs --output $2 + curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/telnet-betterdefaultpasslist.txt" \ + --create-dirs --output $3 +} + +# Create the temporary and output dictionaries, and remove existing ones. +# $1 Temporary dictionary file +# $2 Output dictionary file +# $3 Default dictionary directory +function create_dictionary() { + mkdir -p $3 + + if [ -f $1 ]; then + rm $1 + fi + + if [ -f $2 ]; then + rm $2 + fi + + touch $1 + touch $2 +} + +# Helps avoid certain special characters which cause grep to fail +# $1 Line string in file +function line_is_invalid_csv() { + echo $1 | grep -sqE "^\".*\"|\s|,," +} + +# Helps avoid certain special usernames/passwords which cause grep to fail. +# $1 Username or password string +function username_or_password_is_invalid() { + echo $1 | grep -sqE "|" +} + +# Append the username and password pair into the output dictionary with colon separation. +# $1 Username +# $2 Password +# $3 Output dictionary +function add_colon_pair_to_output_dictionary() { + if ! username_or_password_is_invalid $1; then + if ! username_or_password_is_invalid $2; then + echo "$1:$2" >> $3 + fi + fi +} + +# Convert lines in the manufacturer csv file into colon separated username:password pairs, then append them to file. +# $1 Raw dictionary +# $2 Output dictionary +function append_manufacturer_csv_to_output_dictionary() { + while read LINE + do + if ! line_is_invalid_csv "$LINE"; then + IFS=',' read -ra CREDENTIAL_ARRAY <<< "$LINE" + USERNAME="${CREDENTIAL_ARRAY[1]}" + PASSWORD="${CREDENTIAL_ARRAY[2]}" + add_colon_pair_to_output_dictionary $USERNAME $PASSWORD $2 + fi + done < $1 +} + +# Add colon separated txt file into the output dictionary. +# $1 Output dictionary file +# $2 Colon separated file +function append_colon_separated_file_to_output_dictionary() { + cat $2 >> $1 +} + +# Runs a few cleanup commands which do the following: +# - Remove trailing whitespace +# - Sort and remove duplicates +# - Finally, add a colon at the start of each line to make it suitable for use in medusa +# +# $1 Temporary dictionary file +# $2 Output dictionary file +function clean_output_dictionary() { + cat $1 | sed -E 's/\s+$//' | sort -u | sed -E 's/^/:/' > $2 +} + +# Removes the necessary bits from the full dictionary to create separate files for usernames and passwords. +# $1 Output dictionary +# $2 Usernames file +# $3 Passwords file +function create_username_and_password_list_from_dictionary() { + cat $1 | sed -E 's/^://' | sed -E 's/:.*$//' > $2 + cat $1 | sed -E 's/^.*://' > $3 +} + +# Main function: + +echo Creating credential files... +create_dictionary $TMP_DICTIONARY $OUTPUT_DICTIONARY $DEFAULT_DICTIONARY_DIR + +echo Retrieving raw dictionaries from sources +retrieve_raw_dictionaries $MANUFACTURER_DEFAULTS_DICTIONARY $SSH_DICTIONARY $TELNET_DICTIONARY $RAW_DICTIONARY_DIR + +echo Parsing CSV file... +append_manufacturer_csv_to_output_dictionary $MANUFACTURER_DEFAULTS_DICTIONARY $TMP_DICTIONARY + +echo Parsing SSH passwords file... +append_colon_separated_file_to_output_dictionary $TMP_DICTIONARY $SSH_DICTIONARY + +echo Parsing telnet passwords file... +append_colon_separated_file_to_output_dictionary $TMP_DICTIONARY $TELNET_DICTIONARY + +echo Cleaning up output dictionary... +clean_output_dictionary $TMP_DICTIONARY $OUTPUT_DICTIONARY + +echo Creating extra dictionaries... +create_username_and_password_list_from_dictionary $OUTPUT_DICTIONARY $USERNAMES_LIST $PASSWORDS_LIST + +echo Removing temporary dictionary... +rm $TMP_DICTIONARY + +echo Done! diff --git a/subset/security/password/resources/default/dictionary.txt b/subset/security/password/resources/default/dictionary.txt new file mode 100644 index 0000000000..681fba73a6 --- /dev/null +++ b/subset/security/password/resources/default/dictionary.txt @@ -0,0 +1,1271 @@ +:11111111:11111111 +:11111:x-admin +:123:234 +:1234:1234 +:22222222:22222222 +:**23646:23646 +:**266344:266344 +:266344:266344 +:2800:2800 +:31994:31994 +:666666:666666 +:7654321:7654321 +:880175445:11223344 +:888888:888888 +:acer:acer +:acitoolkit:acitoolkit +:Adam:29111991 +:ADAMS:WOOD +:ADLDEMO:ADLDEMO +:adm:adm +:admin:0 +:admin:000000 +:admin:1111 +:admin:1111111 +:admin:123 +:admin:123123 +:admin:1234 +:admin:12345 +:admin:123456 +:Admin:123456 +:admin:1234admin +:Admin:123qwe +:admin:1988 +:Admin:1988 +:Admin1:Admin1 +:admin1:password +:admin:2222 +:admin:22222 +:admin2:changeme +:admin:4321 +:Admin:5001 +:admin:abc123 +:admin:access +:admin:admin +:admin:!admin +:Admin:admin +:Admin:Admin +:ADMIN:admin +:ADMIN:ADMIN +:admin:admin000 +:Admin:admin1 +:admin:admin123 +:admin:admin1234 +:admin:adminadmin +:admin:adslolitec +:admin:adslroot +:admin:AitbISP4eCiG +:admin:allot +:admin:alphaadmin +:ADMIN:alphacom +:admin:AlpheusDigital1010 +:admin:amigosw1 +:admin:asante +:admin:Ascend +:admin:asd +:Admin:atc456 +:admin:atlantis +:admin:avocent +:admin:axis2 +:admin:barney +:admin:barricade +:Admin:Barricade +:admin:bintec +:admin:broadband +:admin:brocade1 +:admin:cat1029 +:admin:changeit +:admin:changeme +:admin:cisco +:admin:comcomcom +:admin:conexant +:admin:default +:admin:demo +:admin:detmond +:admin:diamond +:admin:dmr99 +:admin:draadloos +:Admin:Emerson1 +:admin:epicrouter +:Admin:epicrouter +:admin@example.com:admin +:admin:extendnet +:admin:funkwerk +:admin:gvt12345 +:admin:hagpolm1 +:admin:hello +:admin:hipchat +:admin:hp.com +:Admin:ImageFolio +:admin:imss7.0 +:admin:infrant1 +:admin:insecure +:admin:ip20 +:admin:ip21 +:admin:ip3000 +:admin:ip305Beheer +:admin:ip400 +:admin:ironport +:admin:isee +:Administrator:0000 +:administrator:1234 +:Administrator:3ware +:Administrator:adaptec +:Administrator:admin +:ADMINISTRATOR:admin +:administrator:administrator +:Administrator:Administrator +:ADMINISTRATOR:ADMINISTRATOR +:administrator:Amx1234! +:administrator:asecret +:Administrator:changeme +:Administrator:Fiery.1 +:Administrator:Gateway +:Administrator:ggdaseuaimhrke +:Administrator:letmein +:Administrator:manage +:administrator:password +:Administrator:password +:administrator:PlsChgMe! +:Administrator:p@ssw0rd +:Administrator:public +:administrator:root +:administrator:RSAAppliance +:Administrator:smcadmin +:Administrator:storageserver +:Administrator:Unidesk1 +:Administrator:vision2 +:Administrator:Vision2 +:admin:j5Brn9 +:admin:Janitza +:admin:jboss4 +:ADMIN:JETSPEED +:admin:jvc +:admin:leviton +:admin:linga +:admin:ManagementConsole2015 +:admin:meinsm +:admin:michelangelo +:admin:microbusiness +:admin:mono +:admin:motorola +:admin:mp3mystic +:admin:mu +:admin:muze +:admin:my_DEMARC +:admin:netadmin +:admin:NetCache +:admin:netscreen +:admin:NetSeq +:admin:NetSurvibox +:Admin:No +:admin:none +:admin:novell +:admin:noway +:admin:OCS +:admin:OkiLAN +:admin:pass +:Admin:Pass +:admin:password +:Admin:password +:ADMIN:PASSWORD +:admin:passwort +:admin:peribit +:admin:pfsense +:admin:phplist +:admin:private +:admin:public +:admin:pwp +:admin:rainbow +:admin:raritan +:admin:readwrite +:admin:rmnetlm +:admin:root +:Admin:SECRET123 +:admin:secure +:admin:security +:admin:setup +:admin:Sharp +:admin:smallbusiness +:admin:smcadmin +:adminstat:OCS +:adminstrator:changeme +:Admin:Su +:admin:superuser +:admin:su@psir +:admin:surecom +:admin:switch +:admin:symantec +:admin:symbol +:admin:Symbol +:admin:synnet +:admin:sysAdmin +:admin:system +:admin:TANDBERG +:admin:tegile +:admin:tlJwpbo6 +:admin:tomcat +:admin:tsunami +:adminttd:adminttd +:admin:urchin +:adminuser:OCS +:admin:utstar +:adminview:OCS +:admin:waav +:Admin:wago +:admin:welcome +:ADMIN:WELCOME +:admin:x-admin +:admin:year2000 +:admin:ZmqVfoSIP +:admin:zoomadsl +:adsl:adsl1234 +:adtec:none +:ADVMAIL:HP +:Alphanetworks:wapnd03cm_dkbs_dap2555 +:Alphanetworks:wapnd04cm_dkbs_dap3525 +:Alphanetworks:wapnd15_dlob_dap1522b +:Alphanetworks:wrgac01_dlob.hans_dir865 +:Alphanetworks:wrgg15_di524 +:Alphanetworks:wrgg19_c_dlwbr_dir300 +:Alphanetworks:wrgn22_dlwbr_dir615 +:Alphanetworks:wrgn23_dlwbr_dir300b +:Alphanetworks:wrgn23_dlwbr_dir600b +:Alphanetworks:wrgn28_dlob_dir412 +:Alphanetworks:wrgn39_dlob.hans_dir645 +:Alphanetworks:wrgn39_dlob.hans_dir645_V1 +:Alphanetworks:wrgn49_dlob_dir600b +:Alphanetworks:wrgnd08_dlob_dir815 +:amx:Amx1234! +:amx:password +:ANDY:SWORDFISH +:anon:anon +:anonymous:anonymous +:anonymous:any +:anonymous:any@ +:anonymous:Exabyte +:anonymous:password +:Any:12345 +:Any:Any +:(any):TENmanUFactOryPOWER +:AP:AP +:aparker@geometrixx.info:aparker +:apc:apc +:APPLSYS:APPLSYS +:APPLSYS:FND +:APPLSYSPUB:FNDPUB +:APPS:APPS +:APPUSER:APPUSER +:AQ:AQ +:AQDEMO:AQDEMO +:AQJAVA:AQJAVA +:AQUSER:AQUSER +:ARAdmin:AR#Admin# +:ARCHIVIST:ARCHIVIST +:AUDIOUSER:AUDIOUSER +:AURORA@ORB@UNAUTHENTICATED:INVALID +:AURORA$ORB$UNAUTHENTICATED:INVALID +:author:author +:autocad:autocad +:BACKUP:BACKUP +:backuponly:backuponly1 +:backuprestore:backuprestore1 +:basisk:basisk +:Basisk:Basisk +:bbs:bbs +:bbsd-client:changeme2 +:bbsd-client:NULL +:BC4J:BC4J +:bciim:bciimpw +:bcim:bcimpw +:bcms:bcmspw +:bcnas:bcnaspw +:bewan:bewan +:bin:sys +:Blaeri:22332323 +:BLAKE:PAPER +:blue:bluepw +:Bobo:hello +:both:tomcat +:bpel:bpel +:BRIO_ADMIN:BRIO_ADMIN +:browse:browsepw +:browse:looker +:bubba:(unknown) +:cablecom:router +:cac_admin:cacadmin +:CATALOG:CATALOG +:c-comatic:xrtwk318 +:ccrusr:ccrusr +:CDEMO82:CDEMO82 +:CDEMOCOR:CDEMOCOR +:CDEMORID:CDEMORID +:CDEMOUCB:CDEMOUCB +:cellit:cellit +:CENTRA:CENTRA +:cgadmin:cgadmin +:checkfs:checkfs +:checkfsys:checkfsys +:checksys:checksys +:CHEY_ARCHSVR:CHEY_ARCHSVR +:CICSUSER:CISSUS +:CIDS:CIDS +:cirros:cubswin:) +:CIS:CIS +:CISCO15:otbu+1 +:cisco:cisco +:Cisco:Cisco +:CISINFO:CISINFO +:citel:password +:CLARK:CLOTH +:client:client +:cloudera:cloudera +:cmaker:cmaker +:CMSBATCH:CMSBATCH +:cn=orcladmin:welcome +:Coco:hello +:comcast:1234 +:COMPANY:COMPANY +:COMPIERE:COMPIERE +:computer:repair +:conferencing:admin +:config:biodata +:corecess:corecess +:core:phpreactor +:CQSCHEMAUSER:PASSWORD +:craft:craft +:craft:craftpw +:craft:crftpw +:Craft:crftpw +:(created):telus00 +:(created):telus99 +:crowd­-openid-­server:password +:Crowd:password +:CSG:SESAME +:CSMIG:CSMIG +:ctb_admin:sap123 +:CTXDEMO:CTXDEMO +:CTXSYS:CTXSYS +:cusadmin:highspeed +:cust:custpw +:customer:none +:dadmin:dadmin +:dadmin:dadmin01 +:daemon:daemon +:davox:davox +:db2fenc1:db2fenc1 +:db2inst1:db2inst1 +:dbase:dbase +:DBA:SQL +:DBDCCICS:DBDCCIC +:DBI:MUMBLEFRATZ +:DBSNMP:DBSNMP +:DDIC:19920706 +:debian:debian +:debian:sixaola +:debian:temppwd +:debug:d.e.b.u.g +:debug:gubed +:debug:synnet +:d.e.b.u.g:User +:default: +:default:antslq +:default:OxhlwSG8 +:default:S2fGqNFs +:default:video +:default:WLAN_AP +:defug:synnet +:DEMO8:DEMO8 +:DEMO9:DEMO9 +:demo:demo +:DEMO:DEMO +:demo:fai +:Demo:password +:demos:demos +:DES:DES +:deskalt:password +:deskman:changeme +:desknorm:password +:deskres:password +:DEV2000_DEMOS:DEV2000_DEMOS +:dev:dev +:Developer:isdev +:device:apc +:device:device +:diag:danger +:diag:switch +:DIP:DIP +:DISCOVERER_ADMIN:DISCOVERER_ADMIN +:distrib:distrib0 +:disttech:4tas +:disttech:disttech +:disttech:etas +:D-Link:D-Link +:dm:telnet +:dni:dni +:dos:dos +:dpn:changeme +:draytek:1234 +:Draytek:1234 +:DSGATEWAY:DSGATEWAY +:DSL:DSL +:DSSYS:DSSYS +:DTA:TJM +:dvstation:dvst10n +:eagle:eagle +:EARLYWATCH:SUPPORT +:echo:echo +:echo:User +:egcr:ergc +:EJSADMIN:EJSADMIN +:elk_user:forensics +:emaq:4133 +:EMP:EMP +:enable:cisco +:eng:engineer +:engmode:hawk201 +:enisa:enisa +:enquiry:enquirypw +:ESTOREUSER:ESTORE +:eurek:eurek +:EVENT:EVENT +:EXFSYS:EXFSYS +:expert:expert +:Factory:56789 +:factory:Fact4EMC +:fal:fal +:fam:fam +:fastwire:fw +:fax:fax +:FAX:FAX +:FAXUSER:FAXUSER +:FAXWORKS:FAXWORKS +:fg_sysadmin:password +:field:field +:FIELD:HPONLY +:FIELD:LOTUS +:FIELD:MANAGER +:FIELD:MGR +:FIELD:SERVICE +:field:support +:FIELD:SUPPORT +:FINANCE:FINANCE +:firstsite:firstsite +:Flo:hello +:FND:FND +:FORSE:FORSE +:FROSTY:SNOWMAN +:ftp_admi:kilo1987 +:ftp:ftp +:ftp_inst:pbxk1064 +:ftp_nmc:tuxalize +:ftp_oper:help1954 +:ftpuser:password +:ftp:video +:fwadmin:xceladmin +:fwupgrade:fwupgrade +:games:games +:GATEWAY:GATEWAY +:Gearguy:Geardog +:GE:GE +:geosolutions:Geos +:glftpd:glftpd +:GL:GL +:god1:12345 +:god2:12345 +:gopher:gopher +:GPFD:GPFD +:GPLD:GPLD +:guest:1234 +:guest:12345 +:guest1:guest +:guest1:guest1 +:guest:guest +:Guest:guest +:Guest:Guest +:GUEST:GUEST +:guest:guestgue +:GUEST:GUESTGUE +:GUEST:GUESTGUEST +:guest:Janitza +:guest:truetime +:GUEST:TSEUG +:guest:User +:guru:*3noguru +:halt:halt +:HCPARK:HCPARK +:HELLO:FIELD.SUPPORT +:hello:hello +:HELLO:MANAGER.SYS +:HELLO:MGR.SYS +:HELLO:OP.OPERATOR +:helpdesk:OCS +:HLW:HLW +:(hostname/ipaddress):sysadmin +:HPLASER:HPLASER +:HPSupport:badg3r5 +:HR:HR +:hsa:hsadb +:hscroot:abc123 +:HTTP:HTTP +:hunter:hunter +:hxeadm:HXEHana1 +:ibm:2222 +:ibm:password +:ibm:service +:IBMUSER:SYS1 +:iclock:timely +:ilom-admin:ilom-admin +:ilom-operator:ilom-operator +:images:images +:IMAGEUSER:IMAGEUSER +:IMEDIA:IMEDIA +:inads:inads +:inads:indspw +:informix:informix +:init:initpw +:installer:1000 +:installer:installer +:install:install +:install:secret +:intel:intel +:intermec:intermec +:internal:oracle +:IntraStack:Asante +:IntraSwitch:Asante +:ioFTPD:ioFTPD +:IS_$hostname:IS_$hostname +:itsadmin:init +:james:james +:jdoe@geometrixx.info:jdoe +:JMUSER:JMUSER +:Joe:hello +:joe:password +:JONES:STEEL +:JWARD:AIROPLANE +:keyscan:KEYSCAN +:khan:kahn +:kodi:kodi +:l2:l2 +:L2LDEMO:L2LDEMO +:l3:l3 +:LASER:LASER +:LASERWRITER:LASERWRITER +:LBACSYS:LBACSYS +:LDAP_Anonymous:LdapPassword_1 +:leo:leo +:LIBRARIAN:SHELVES +:Liebert:Liebert +:live:live +:LocalAdministrator:#l@$ak#.lk;0@P +:localadmin:localadmin +:locate:locatepw +:login:0000 +:login:access +:login:admin +:login:password +:lpadmin:lpadmin +:lpadm:lpadm +:lp:bin +:lp:lineprin +:lp:lp +:LR-ISDN:LR-ISDN +:lynx:lynx +:m1122:m1122 +:m202:m202 +:MAIL:HPOFFICE +:mail:mail +:MAIL:MAIL +:MAIL:MPE +:MAIL:REMOTE +:MAIL:TELESUP +:maintainer:admin +:maintainer:pbcpbn(add-serial-number) +:maint:maint +:MAINT:MAINT +:maint:maintpw +:maint:ntacdmax +:maint:password +:maint:rwmaint +:Manager:Admin +:MANAGER:COGNOS +:manager:friend +:MANAGER:HPOFFICE +:MANAGER:ITF3000 +:manager:manager +:Manager:Manager +:MANAGER:SECURITY +:managers:managers +:MANAGER:SYS +:MANAGER:TCH +:MANAGER:TELESUP +:man:man +:manuf:xxyyzz +:mary:password +:master:master +:MASTER:PASSWORD +:master:themaster01 +:MayGion:maygion.com +:McdataSE:redips +:MCUser:MCUser1 +:MD110:help +:MDDEMO_CLERK:CLERK +:MDDEMO:MDDEMO +:MDDEMO_MGR:MGR +:MDSYS:MDSYS +:mediator:mediator +:me:me +:memotec:supervisor +:Menara:Menara +:mfd:mfd +:MFG:MFG +:mg3500:merlin +:MGE:VESOFT +:MGR:CAROLIAN +:MGR:CCC +:MGR:CNAS +:MGR:COGNOS +:MGR:CONV +:MGR:HPDESK +:MGR:HPOFFICE +:MGR:HPONLY +:MGR:HPP187 +:MGR:HPP189 +:MGR:HPP196 +:MGR:INTX3 +:MGR:ITF3000 +:MGR:NETBASE +:MGR:REGO +:MGR:RJE +:MGR:ROBELLE +:MGR:SECURITY +:MGR:SYS +:MGR:TELESUP +:MGR:VESOFT +:MGR:WORD +:MGR:XLSERVER +:MGWUSER:MGWUSER +:MICRO:RSX +:MIGRATE:MIGRATE +:MILLER:MILLER +:misp:Password1234 +:mlusr:mlusr +:MMO2:MMO2 +:mobile:dottie +:MODTEST:YES +:Moe:hello +:monitor:monitor +:MOREAU:MOREAU +:mountfs:mountfs +:mountfsys:mountfsys +:mountsys:mountsys +:MSHOME:MSHOME +:mso:w0rkplac3rul3s +:MTSSYS:MTSSYS +:MTS_USER:MTS_PASSWORD +:MTYSYS:MTYSYS +:museadmin:Muse!Admin +:musi1921:Musi%1921 +:musi1921:Musii%1921 +:MXAGENT:MXAGENT +:myshake:shakeme +:naadmin:naadmin +:n.a:guardone +:NAMES:NAMES +:nao:nao +:NAU:NAU +:ncrm:ncrm +:netbotz:netbotz +:netlink:netlink +:NetLinx:password +:netman:netman +:netopia:netopia +:netrangr:attack +:netscreen:netscreen +:NETWORK:NETWORK +:news:news +:newuser:wampp +:nexthink:123456 +:NICONEX:NICONEX +:nm2user:nm2user +:nms:nmspw +:nobody:nobody +:none:0 +:none:4321 +:none:admin +:none:blank +:none:none +:none:private +:none:sysadm +:nop:12345 +:nop:123454 +:NSA:nsa +:OAS_PUBLIC:OAS_PUBLIC +:OCITEST:OCITEST +:ODM_MTR:MTRPW +:ODM:ODM +:ODSCOMMON:ODSCOMMON +:ods:ods +:ODS:ODS +:OEMADM:OEMADM +:OEMREP:OEMREP +:OE:OE +:OLAPDBA:OLAPDBA +:OLAPSVR:INSTANCE +:OLAPSYS:MANAGER +:OMWB_EMULATION:ORACLE +:onlime_r:12345 +:OO:OO +:openhabian:openhabian +:OPENSPIRIT:OPENSPIRIT +:OPERATIONS:OPERATIONS +:OPERATNS:OPERATNS +:operator:admin +:operator:$chwarzepumpe +:OPERATOR:COGNOS +:OPERATOR:DISC +:operator:mercury +:operator:operator +:Operator:Operator +:OPERATOR:SUPPORT +:OPERATOR:SYS +:OPERATOR:SYSTEM +:Oper:Oper +:OPER:OPER +:op:op +:op:operator +:oracle:oracle +:ORAREGSYS:ORAREGSYS +:ORASSO:ORASSO +:ORDPLUGINS:ORDPLUGINS +:ORDSYS:ORDSYS +:osbash:osbash +:osboxes:osboxes.org +:osmc:osmc +:OSP22:OSP22 +:OUTLN:OUTLN +:overseer:overseer +:OWA:OWA +:OWA_PUBLIC:OWA_PUBLIC +:OWNER:OWNER +:PACSLinkIP:NetServer +:PANAMA:PANAMA +:patrol:patrol +:PATROL:PATROL +:PBX:PBX +:PCUSER:SYS +:pepino:pepino +:PERFSTAT:PERFSTAT +:PFCUser:240653C9467E45 +:piranha:piranha +:piranha:q +:pi:raspberry +:PLEX:PLEX +:plexuser:rasplex +:PLMIMService:NetServer +:PLSQL:SUPERSECRET +:PM:PM +:pnadmin:pnadmin +:PO7:PO7 +:PO8:PO8 +:politically:correct +:poll:tech +:Polycom:SpIp +:PO:PO +:PORTAL30_DEMO:PORTAL30_DEMO +:PORTAL30:PORTAL30 +:PORTAL30:PORTAL31 +:PORTAL30_PUBLIC:PORTAL30_PUBLIC +:PORTAL30_SSO:PORTAL30_SSO +:PORTAL30_SSO_PS:PORTAL30_SSO_PS +:PORTAL30_SSO_PUBLIC:PORTAL30_SSO_PUBLIC +:POST:BASE +:postmaster:postmast +:POST:POST +:POWERCARTUSER:POWERCARTUSER +:POWERCHUTE:APC +:powerdown:powerdown +:praisenetwork:perfectpraise +:PRIMARY:PRIMARY +:primenet:primenet +:primenet:primeos +:primeos:prime +:primeos:primeos +:prime:prime +:prime:primeos +:primos_cs:prime +:primos_cs:primos +:PRINTER:PRINTER +:PRINT:PRINT +:PRODCICS:PRODCICS +:PRODDTA:PRODDTA +:PROG:PROG +:prtgadmin:prtgadmin +:PSEAdmin:$secure$ +:public:publicpass +:PUBSUB1:PUBSUB1 +:PUBSUB:PUBSUB +:pw:pwpw +:pwrchute:pwrchute +:pyimagesearch:deeplearning +:qbf77101:hexakisoctahedron +:QDBA:QDBA +:qpgmr:qpgmr +:QS_ADM:QS_ADM +:QS_CBADM:QS_CBADM +:QS_CB:QS_CB +:QS_CS:QS_CS +:qsecofr:11111111 +:qsecofr:22222222 +:qsecofr:qsecofr +:qserv:qserv +:QS_ES:QS_ES +:QS_OS:QS_OS +:QS:QS +:QSRV:11111111 +:QSRV:22222222 +:qsrvbas:qsrvbas +:qsrv:qsrv +:QSRV:QSRV +:qsvr:ibmcel +:qsvr:qsvr +:QS_WS:QS_WS +:qsysopr:qsysopr +:quser:quser +:radware:radware +:RAID:hpt +:rapport:r@p8p0r+ +:rcust:rcustpw +:rdc123:rdc123 +:readonly:apc +:readonly:lucenttech2 +:read:synnet +:readwrite:lucenttech1 +:recover:recover +:redline:redline +:remnux:malware +:REPADMIN:REPADMIN +:replication-receiver:replication-receiver +:Replicator:iscopy +:replicator:replicator +:REP_MANAGER:DEMO +:REPORTS_USER:OEM_TEMP +:REP_OWNER:DEMO +:REP_OWNER:REP_OWNER +:RE:RE +:restoreonly:restoreonly1 +:rje:rje +:RMAIL:RMAIL +:RMAN:RMAN +:RMUser1:password +:RNIServiceManager:NetServer +:Rodopi:Rodopi +:role1:role1 +:role1:tomcat +:role:changethis +:root:00000000 +:root:1001chin +:root:1111 +:root:1234 +:root:12345 +:root:123456 +:root:20080826 +:root:3ep5w2u +:root:54321 +:root:5up +:root:666666 +:root:7ujMko0admin +:root:7ujMko0vizxv +:root:888888 +:root:8RttoTriz +:root:admin +:root:ahetzip8 +:root:alpine +:root:anko +:root:anni2013 +:root:arcsight +:root:ascend +:root:attack +:root:ax400 +:root:bagabu +:root:blablabla +:root:blackarch +:root:blender +:root:brightmail +:root:calvin +:root:cat1029 +:root:ceadmin +:root:changeme +:root:changeonfirstlogin +:root:changethis +:root:china123 +:root:Cisco +:root:ciwuxe +:root:cms500 +:root:cubox-i +:root:cxlinux +:root:D13HH[ +:root:dasdec1 +:root:davox +:root:debian +:root:default +:root:dottie +:root:dreambox +:root:fai +:root:fibranne +:root:fidel123 +:root:freenas +:root:ggdaseuaimhrke +:root:GM8182 +:root:hi3518 +:root:hp +:root:ikwb +:root:indigo +:root:juantech +:root:jvbzd +:root:klv123 +:root:klv1234 +:root:kn1TG7psLu +:root:leostream +:root:libreelec +:root:linux +:root:logapp +:root:manager +:root:max2play +:root:mozart +:root:mpegvideo +:root:Mua'dib +:root:MuZhlo9n%8!G +:root:nas4free +:root:NeXT +:root:NM1$88 +:root:nokia +:root:nosoup4u +:root:nsi +:root:oelinux123 +:root:openelec +:root:openmediavault +:root:orion99 +:root:osboxes.org +:root:palosanto +:root:par0t +:root:pass +:root:passw0rd +:root:password +:root:p@ck3tf3nc3 +:root:pixmet2003 +:root:plex +:root:qwasyx21 +:root:rasplex +:root:realtek +:root:resumix +:root:root +:root:!root +:ROOT:ROOT +:root:root01 +:root:ROOT500 +:root:rootme +:root:rootpasswd +:root:screencast +:root:secur4u +:root:Serv4EMC +:root:sipwise +:root:sixaola +:root:stxadmin +:root:sun123 +:root:system +:root:t00lk1t +:root:t0talc0ntr0l4! +:root:TANDBERG +:root:timeserver +:root:toor +:root:tslinux +:root:ubnt +:root:ubuntu1404 +:root:uClinux +:root:unitrends1 +:root:user +:root:vagrant +:root:vertex25 +:root:video +:root:vizxv +:Root:wago +:root:wyse +:root:xc3511 +:root:xmhdipc +:root:xoa +:root:ys123456 +:root:zlxx +:root:zlxx. +:root:Zte521 +:ro:ro +:RSBCMON:SYS +:rwa:rwa +:rw:rw +:sa:changeonfirstlogin +:SAMPLE:SAMPLE +:sansforensics:forensics +:sans:training +:SAP*:06071992 +:SAP*:7061992 +:SA:PASSWORD +:SAPCPIC:admin +:SAPCPIC:ADMIN +:SAP*:PASS +:SAPR3:SAP +:SAP:SAPR3 +:sa:sasasa +:savelogs:crash +:scmadmin:scmchangeme +:sconsole:12345 +:SCOTT:TIGER +:SDOS_ICSAP:SDOS_ICSAP +:SECDEMO:SECDEMO +:secofr:secofr +:security:security +:sedacm:secacm +:self:system +:SERVICECONSUMER1:SERVICECONSUMER1 +:service:service +:Service:Service +:service:smile +:servlet:manager +:setpriv:system +:setup:changeme +:setup:changeme! +:setup:setup +:SH:SH +:shutdown:shutdown +:signa:signa +:siteadmin:siteadmin +:siteadmin:toplayer +:SITEMINDER:SITEMINDER +:SLIDE:SLIDEPW +:smc:smcadmin +:SMDR:SECONDARY +:snmp:nopasswd +:snmp:snmp +:spcl:0000 +:SPOOLMAN:HPOFFICE +:$SRV:$SRV +:ssladmin:ssladmin +:ssp:ssp +:stackato:stackato +:STARTER:STARTER +:status:readonly +:stratacom:stratauser +:STRAT_USER:STRAT_PASSWD +:stuccoboy:100198 +:super:5777364 +:__super:(caclulated) +:superdba:admin +:super:juniper123 +:superman:21241036 +:superman:talent +:super:super +:super.super:master +:super:superpass +:super:surt +:superuser:123456 +:superuser:admin +:SUPERUSER:ANS#150 +:superuser:asante +:SuperUser:kronites +:superuser:superuser +:SUPERVISOR:HARRIS +:SUPERVISOR:NETFRAME +:SUPERVISOR:NF +:SUPERVISOR:NFI +:supervisor:PlsChgMe! +:supervisor:supervisor +:SUPERVISOR:SUPERVISOR +:SUPERVISOR:SYSTEM +:supervisor:visor +:support:h179350 +:support:support +:support:supportpw +:support:symantec +:su:super +:sweex:mysweex +:SWPRO:SWPRO +:SWUSER:SWUSER +:Symbol:Symbol +:SYMPA:SYMPA +:sync:sync +:sysadm:admin +:sysadm:Admin +:sysadm:admpw +:sysadmin:master +:sysadmin:nortel +:sysadmin:password +:sysadmin:sysadmin +:sysadm:sysadm +:SYSADM:SYSADM +:sysadm:sysadmpw +:sysadm:syspw +:SYSA:SYSA +:sys:bin +:sysbin:sysbin +:sys:change_on_install +:SYS:CHANGE_ON_INSTALL +:SYSDBA:masterkey +:SYS:D_SYSPW +:SYSMAN:oem_temp +:SYSMAN:OEM_TEMP +:sysopr:sysopr +:Sysop:Sysop +:sys:sys +:sys:system +:system:adminpwd +:system_admin:system_admin +:SYSTEM:D_SYSTPW +:system:isp +:system:manager +:SYSTEM:MANAGER +:system/manager:sys/change_on_install +:system:mnet +:system:password +:system:prime +:system:security +:system:sys +:system:system +:system:weblogic +:sys:uplink +:t3admin:Trintech +:TAHITI:TAHITI +:target:password +:tasman:tasmannet +:Tasman:Tasmannet +:TDOS_ICSAP:TDOS_ICSAP +:teacher:password +:tech:field +:tech:nician +:technician:yZgO8Bvj +:tech:tech +:telecom:telecom +:Telecom:Telecom +:tele:tele +:tellabs:tellabs#1 +:temp1:password +:TESTPILOT:TESTPILOT +:test:test +:TEST:TEST +:tiger:tiger123 +:tomcat:changethis +:tomcat:tomcat +:toor:logapp +:topicalt:password +:topicnorm:password +:topicres:password +:TRACESRV:TRACE +:TRACESVR:TRACE +:TRAVEL:TRAVEL +:trmcnfg:trmcnfg +:trouble:trouble +:TSDEV:TSDEV +:TSUSER:TSUSER +:TURBINE:TURBINE +:ubnt:ubnt +:ucenik23:ucenik +:ULTIMATE:ULTIMATE +:umountfs:umountfs +:umountfsys:umountfsys +:umountsys:umountsys +:unix:unix +:USER0:USER0 +:User:1001 +:User:1234 +:User:19750407 +:USER1:USER1 +:USER2:USER2 +:USER3:USER3 +:USER4:USER4 +:USER5:USER5 +:USER6:USER6 +:USER7:USER7 +:USER8:USER8 +:USER9:USER9 +:user_analyst:demo +:user_approver:demo +:user_author:demo +:user_checker:demo +:user_designer:demo +:user_editor:demo +:user_expert:demo +:USERID:PASSW0RD +:USERID:PASSWORD +:user:Janitza +:user_marketer:demo +:username:password +:Username:password +:Username:Password +:user:none +:userNotUsed:userNotU +:user:password +:User:Password +:user_pricer:demo +:user:public +:user_publisher:demo +:USER_TEMPLATE:USER_TEMPLATE +:user:tivonpw +:user:user +:User:user +:User:User +:USER:USER +:user:user0000 +:user:USERP +:UTLBSTATU:UTLESTAT +:uucpadm:uucpadm +:uucp:uucp +:uwmadmin:password +:vagrant:vagrant +:VCSRV:VCSRV +:veda:12871 +:vgnadmin:vgnadmin +:viewuser:viewuser1 +:VIF_DEVELOPER:VIF_DEV_PWD +:vikram:singh +:VIRUSER:VIRUSER +:VNC:winterm +:volition:volition +:vpasp:vpasp +:VRR1:VRR1 +:VTAM:VTAM +:WANGTEK:WANGTEK +:webadmin:1234 +:WebAdmin:Admin +:webadmin:webadmin +:WebAdmin:WebBoard +:webadmin:webibm +:WEBADM:password +:WEBCAL01:WEBCAL01 +:webdb:webdb +:WEBDB:WEBDB +:webguest:1 +:weblogic:weblogic +:webmaster:webmaster +:WEBREAD:WEBREAD +:webshield:webshieldchangeme +:web:web +:whd:whd +:WINDOWS_PASSTHRU:WINDOWS_PASSTHRU +:WINSABRE:SABRE +:WINSABRE:WINSABRE +:WKSYS:WKSYS +:wlcsystem:wlcsystem +:wlpisystem:wlpisystem +:wlseuser:wlsepassword +:wlse:wlsedb +:WP:HPOFFICE +:wpsadmin:wpsadmin +:wradmin:trancell +:write:private +:write:synnet +:wVQxyQec:eomjbOBLLwbZeiKV +:WWWUSER:WWWUSER +:www:www +:WWW:WWW +:xd:xd +:xmi_demo:sap123 +:XPRT:XPRT +:XXSESS_MGRYY:X#1833 diff --git a/subset/security/password/resources/default/passwords.txt b/subset/security/password/resources/default/passwords.txt new file mode 100644 index 0000000000..d9f319e79c --- /dev/null +++ b/subset/security/password/resources/default/passwords.txt @@ -0,0 +1,1271 @@ +11111111 +x-admin +234 +1234 +22222222 +23646 +266344 +266344 +2800 +31994 +666666 +7654321 +11223344 +888888 +acer +acitoolkit +29111991 +WOOD +ADLDEMO +adm +0 +000000 +1111 +1111111 +123 +123123 +1234 +12345 +123456 +123456 +1234admin +123qwe +1988 +1988 +Admin1 +password +2222 +22222 +changeme +4321 +5001 +abc123 +access +admin +!admin +admin +Admin +admin +ADMIN +admin000 +admin1 +admin123 +admin1234 +adminadmin +adslolitec +adslroot +AitbISP4eCiG +allot +alphaadmin +alphacom +AlpheusDigital1010 +amigosw1 +asante +Ascend +asd +atc456 +atlantis +avocent +axis2 +barney +barricade +Barricade +bintec +broadband +brocade1 +cat1029 +changeit +changeme +cisco +comcomcom +conexant +default +demo +detmond +diamond +dmr99 +draadloos +Emerson1 +epicrouter +epicrouter +admin +extendnet +funkwerk +gvt12345 +hagpolm1 +hello +hipchat +hp.com +ImageFolio +imss7.0 +infrant1 +insecure +ip20 +ip21 +ip3000 +ip305Beheer +ip400 +ironport +isee +0000 +1234 +3ware +adaptec +admin +admin +administrator +Administrator +ADMINISTRATOR +Amx1234! +asecret +changeme +Fiery.1 +Gateway +ggdaseuaimhrke +letmein +manage +password +password +PlsChgMe! +p@ssw0rd +public +root +RSAAppliance +smcadmin +storageserver +Unidesk1 +vision2 +Vision2 +j5Brn9 +Janitza +jboss4 +JETSPEED +jvc +leviton +linga +ManagementConsole2015 +meinsm +michelangelo +microbusiness +mono +motorola +mp3mystic +mu +muze +my_DEMARC +netadmin +NetCache +netscreen +NetSeq +NetSurvibox +No +none +novell +noway +OCS +OkiLAN +pass +Pass +password +password +PASSWORD +passwort +peribit +pfsense +phplist +private +public +pwp +rainbow +raritan +readwrite +rmnetlm +root +SECRET123 +secure +security +setup +Sharp +smallbusiness +smcadmin +OCS +changeme +Su +superuser +su@psir +surecom +switch +symantec +symbol +Symbol +synnet +sysAdmin +system +TANDBERG +tegile +tlJwpbo6 +tomcat +tsunami +adminttd +urchin +OCS +utstar +OCS +waav +wago +welcome +WELCOME +x-admin +year2000 +ZmqVfoSIP +zoomadsl +adsl1234 +none +HP +wapnd03cm_dkbs_dap2555 +wapnd04cm_dkbs_dap3525 +wapnd15_dlob_dap1522b +wrgac01_dlob.hans_dir865 +wrgg15_di524 +wrgg19_c_dlwbr_dir300 +wrgn22_dlwbr_dir615 +wrgn23_dlwbr_dir300b +wrgn23_dlwbr_dir600b +wrgn28_dlob_dir412 +wrgn39_dlob.hans_dir645 +wrgn39_dlob.hans_dir645_V1 +wrgn49_dlob_dir600b +wrgnd08_dlob_dir815 +Amx1234! +password +SWORDFISH +anon +anonymous +any +any@ +Exabyte +password +12345 +Any +TENmanUFactOryPOWER +AP +aparker +apc +APPLSYS +FND +FNDPUB +APPS +APPUSER +AQ +AQDEMO +AQJAVA +AQUSER +AR#Admin# +ARCHIVIST +AUDIOUSER +INVALID +INVALID +author +autocad +BACKUP +backuponly1 +backuprestore1 +basisk +Basisk +bbs +changeme2 +NULL +BC4J +bciimpw +bcimpw +bcmspw +bcnaspw +bewan +sys +22332323 +PAPER +bluepw +hello +tomcat +bpel +BRIO_ADMIN +browsepw +looker +(unknown) +router +cacadmin +CATALOG +xrtwk318 +ccrusr +CDEMO82 +CDEMOCOR +CDEMORID +CDEMOUCB +cellit +CENTRA +cgadmin +checkfs +checkfsys +checksys +CHEY_ARCHSVR +CISSUS +CIDS +) +CIS +otbu+1 +cisco +Cisco +CISINFO +password +CLOTH +client +cloudera +cmaker +CMSBATCH +welcome +hello +1234 +COMPANY +COMPIERE +repair +admin +biodata +corecess +phpreactor +PASSWORD +craft +craftpw +crftpw +crftpw +telus00 +telus99 +password +password +SESAME +CSMIG +sap123 +CTXDEMO +CTXSYS +highspeed +custpw +none +dadmin +dadmin01 +daemon +davox +db2fenc1 +db2inst1 +dbase +SQL +DBDCCIC +MUMBLEFRATZ +DBSNMP +19920706 +debian +sixaola +temppwd +d.e.b.u.g +gubed +synnet +User + +antslq +OxhlwSG8 +S2fGqNFs +video +WLAN_AP +synnet +DEMO8 +DEMO9 +demo +DEMO +fai +password +demos +DES +password +changeme +password +password +DEV2000_DEMOS +dev +isdev +apc +device +danger +switch +DIP +DISCOVERER_ADMIN +distrib0 +4tas +disttech +etas +D-Link +telnet +dni +dos +changeme +1234 +1234 +DSGATEWAY +DSL +DSSYS +TJM +dvst10n +eagle +SUPPORT +echo +User +ergc +EJSADMIN +forensics +4133 +EMP +cisco +engineer +hawk201 +enisa +enquirypw +ESTORE +eurek +EVENT +EXFSYS +expert +56789 +Fact4EMC +fal +fam +fw +fax +FAX +FAXUSER +FAXWORKS +password +field +HPONLY +LOTUS +MANAGER +MGR +SERVICE +support +SUPPORT +FINANCE +firstsite +hello +FND +FORSE +SNOWMAN +kilo1987 +ftp +pbxk1064 +tuxalize +help1954 +password +video +xceladmin +fwupgrade +games +GATEWAY +Geardog +GE +Geos +glftpd +GL +12345 +12345 +gopher +GPFD +GPLD +1234 +12345 +guest +guest1 +guest +guest +Guest +GUEST +guestgue +GUESTGUE +GUESTGUEST +Janitza +truetime +TSEUG +User +*3noguru +halt +HCPARK +FIELD.SUPPORT +hello +MANAGER.SYS +MGR.SYS +OP.OPERATOR +OCS +HLW +sysadmin +HPLASER +badg3r5 +HR +hsadb +abc123 +HTTP +hunter +HXEHana1 +2222 +password +service +SYS1 +timely +ilom-admin +ilom-operator +images +IMAGEUSER +IMEDIA +inads +indspw +informix +initpw +1000 +installer +install +secret +intel +intermec +oracle +Asante +Asante +ioFTPD +IS_$hostname +init +james +jdoe +JMUSER +hello +password +STEEL +AIROPLANE +KEYSCAN +kahn +kodi +l2 +L2LDEMO +l3 +LASER +LASERWRITER +LBACSYS +LdapPassword_1 +leo +SHELVES +Liebert +live +#l@$ak#.lk;0@P +localadmin +locatepw +0000 +access +admin +password +lpadmin +lpadm +bin +lineprin +lp +LR-ISDN +lynx +m1122 +m202 +HPOFFICE +mail +MAIL +MPE +REMOTE +TELESUP +admin +pbcpbn(add-serial-number) +maint +MAINT +maintpw +ntacdmax +password +rwmaint +Admin +COGNOS +friend +HPOFFICE +ITF3000 +manager +Manager +SECURITY +managers +SYS +TCH +TELESUP +man +xxyyzz +password +master +PASSWORD +themaster01 +maygion.com +redips +MCUser1 +help +CLERK +MDDEMO +MGR +MDSYS +mediator +me +supervisor +Menara +mfd +MFG +merlin +VESOFT +CAROLIAN +CCC +CNAS +COGNOS +CONV +HPDESK +HPOFFICE +HPONLY +HPP187 +HPP189 +HPP196 +INTX3 +ITF3000 +NETBASE +REGO +RJE +ROBELLE +SECURITY +SYS +TELESUP +VESOFT +WORD +XLSERVER +MGWUSER +RSX +MIGRATE +MILLER +Password1234 +mlusr +MMO2 +dottie +YES +hello +monitor +MOREAU +mountfs +mountfsys +mountsys +MSHOME +w0rkplac3rul3s +MTSSYS +MTS_PASSWORD +MTYSYS +Muse!Admin +Musi%1921 +Musii%1921 +MXAGENT +shakeme +naadmin +guardone +NAMES +nao +NAU +ncrm +netbotz +netlink +password +netman +netopia +attack +netscreen +NETWORK +news +wampp +123456 +NICONEX +nm2user +nmspw +nobody +0 +4321 +admin +blank +none +private +sysadm +12345 +123454 +nsa +OAS_PUBLIC +OCITEST +MTRPW +ODM +ODSCOMMON +ods +ODS +OEMADM +OEMREP +OE +OLAPDBA +INSTANCE +MANAGER +ORACLE +12345 +OO +openhabian +OPENSPIRIT +OPERATIONS +OPERATNS +admin +$chwarzepumpe +COGNOS +DISC +mercury +operator +Operator +SUPPORT +SYS +SYSTEM +Oper +OPER +op +operator +oracle +ORAREGSYS +ORASSO +ORDPLUGINS +ORDSYS +osbash +osboxes.org +osmc +OSP22 +OUTLN +overseer +OWA +OWA_PUBLIC +OWNER +NetServer +PANAMA +patrol +PATROL +PBX +SYS +pepino +PERFSTAT +240653C9467E45 +piranha +q +raspberry +PLEX +rasplex +NetServer +SUPERSECRET +PM +pnadmin +PO7 +PO8 +correct +tech +SpIp +PO +PORTAL30_DEMO +PORTAL30 +PORTAL31 +PORTAL30_PUBLIC +PORTAL30_SSO +PORTAL30_SSO_PS +PORTAL30_SSO_PUBLIC +BASE +postmast +POST +POWERCARTUSER +APC +powerdown +perfectpraise +PRIMARY +primenet +primeos +prime +primeos +prime +primeos +prime +primos +PRINTER +PRINT +PRODCICS +PRODDTA +PROG +prtgadmin +$secure$ +publicpass +PUBSUB1 +PUBSUB +pwpw +pwrchute +deeplearning +hexakisoctahedron +QDBA +qpgmr +QS_ADM +QS_CBADM +QS_CB +QS_CS +11111111 +22222222 +qsecofr +qserv +QS_ES +QS_OS +QS +11111111 +22222222 +qsrvbas +qsrv +QSRV +ibmcel +qsvr +QS_WS +qsysopr +quser +radware +hpt +r@p8p0r+ +rcustpw +rdc123 +apc +lucenttech2 +synnet +lucenttech1 +recover +redline +malware +REPADMIN +replication-receiver +iscopy +replicator +DEMO +OEM_TEMP +DEMO +REP_OWNER +RE +restoreonly1 +rje +RMAIL +RMAN +password +NetServer +Rodopi +role1 +tomcat +changethis +00000000 +1001chin +1111 +1234 +12345 +123456 +20080826 +3ep5w2u +54321 +5up +666666 +7ujMko0admin +7ujMko0vizxv +888888 +8RttoTriz +admin +ahetzip8 +alpine +anko +anni2013 +arcsight +ascend +attack +ax400 +bagabu +blablabla +blackarch +blender +brightmail +calvin +cat1029 +ceadmin +changeme +changeonfirstlogin +changethis +china123 +Cisco +ciwuxe +cms500 +cubox-i +cxlinux +D13HH[ +dasdec1 +davox +debian +default +dottie +dreambox +fai +fibranne +fidel123 +freenas +ggdaseuaimhrke +GM8182 +hi3518 +hp +ikwb +indigo +juantech +jvbzd +klv123 +klv1234 +kn1TG7psLu +leostream +libreelec +linux +logapp +manager +max2play +mozart +mpegvideo +Mua'dib +MuZhlo9n%8!G +nas4free +NeXT +NM1$88 +nokia +nosoup4u +nsi +oelinux123 +openelec +openmediavault +orion99 +osboxes.org +palosanto +par0t +pass +passw0rd +password +p@ck3tf3nc3 +pixmet2003 +plex +qwasyx21 +rasplex +realtek +resumix +root +!root +ROOT +root01 +ROOT500 +rootme +rootpasswd +screencast +secur4u +Serv4EMC +sipwise +sixaola +stxadmin +sun123 +system +t00lk1t +t0talc0ntr0l4! +TANDBERG +timeserver +toor +tslinux +ubnt +ubuntu1404 +uClinux +unitrends1 +user +vagrant +vertex25 +video +vizxv +wago +wyse +xc3511 +xmhdipc +xoa +ys123456 +zlxx +zlxx. +Zte521 +ro +SYS +rwa +rw +changeonfirstlogin +SAMPLE +forensics +training +06071992 +7061992 +PASSWORD +admin +ADMIN +PASS +SAP +SAPR3 +sasasa +crash +scmchangeme +12345 +TIGER +SDOS_ICSAP +SECDEMO +secofr +security +secacm +system +SERVICECONSUMER1 +service +Service +smile +manager +system +changeme +changeme! +setup +SH +shutdown +signa +siteadmin +toplayer +SITEMINDER +SLIDEPW +smcadmin +SECONDARY +nopasswd +snmp +0000 +HPOFFICE +$SRV +ssladmin +ssp +stackato +STARTER +readonly +stratauser +STRAT_PASSWD +100198 +5777364 +(caclulated) +admin +juniper123 +21241036 +talent +super +master +superpass +surt +123456 +admin +ANS#150 +asante +kronites +superuser +HARRIS +NETFRAME +NF +NFI +PlsChgMe! +supervisor +SUPERVISOR +SYSTEM +visor +h179350 +support +supportpw +symantec +super +mysweex +SWPRO +SWUSER +Symbol +SYMPA +sync +admin +Admin +admpw +master +nortel +password +sysadmin +sysadm +SYSADM +sysadmpw +syspw +SYSA +bin +sysbin +change_on_install +CHANGE_ON_INSTALL +masterkey +D_SYSPW +oem_temp +OEM_TEMP +sysopr +Sysop +sys +system +adminpwd +system_admin +D_SYSTPW +isp +manager +MANAGER +sys/change_on_install +mnet +password +prime +security +sys +system +weblogic +uplink +Trintech +TAHITI +password +tasmannet +Tasmannet +TDOS_ICSAP +password +field +nician +yZgO8Bvj +tech +telecom +Telecom +tele +tellabs#1 +password +TESTPILOT +test +TEST +tiger123 +changethis +tomcat +logapp +password +password +password +TRACE +TRACE +TRAVEL +trmcnfg +trouble +TSDEV +TSUSER +TURBINE +ubnt +ucenik +ULTIMATE +umountfs +umountfsys +umountsys +unix +USER0 +1001 +1234 +19750407 +USER1 +USER2 +USER3 +USER4 +USER5 +USER6 +USER7 +USER8 +USER9 +demo +demo +demo +demo +demo +demo +demo +PASSW0RD +PASSWORD +Janitza +demo +password +password +Password +none +userNotU +password +Password +demo +public +demo +USER_TEMPLATE +tivonpw +user +user +User +USER +user0000 +USERP +UTLESTAT +uucpadm +uucp +password +vagrant +VCSRV +12871 +vgnadmin +viewuser1 +VIF_DEV_PWD +singh +VIRUSER +winterm +volition +vpasp +VRR1 +VTAM +WANGTEK +1234 +Admin +webadmin +WebBoard +webibm +password +WEBCAL01 +webdb +WEBDB +1 +weblogic +webmaster +WEBREAD +webshieldchangeme +web +whd +WINDOWS_PASSTHRU +SABRE +WINSABRE +WKSYS +wlcsystem +wlpisystem +wlsepassword +wlsedb +HPOFFICE +wpsadmin +trancell +private +synnet +eomjbOBLLwbZeiKV +WWWUSER +www +WWW +xd +sap123 +XPRT +X#1833 diff --git a/subset/security/password/resources/default/usernames.txt b/subset/security/password/resources/default/usernames.txt new file mode 100644 index 0000000000..cd9867b8cb --- /dev/null +++ b/subset/security/password/resources/default/usernames.txt @@ -0,0 +1,1271 @@ +11111111 +11111 +123 +1234 +22222222 +**23646 +**266344 +266344 +2800 +31994 +666666 +7654321 +880175445 +888888 +acer +acitoolkit +Adam +ADAMS +ADLDEMO +adm +admin +admin +admin +admin +admin +admin +admin +admin +admin +Admin +admin +Admin +admin +Admin +Admin1 +admin1 +admin +admin +admin2 +admin +Admin +admin +admin +admin +admin +Admin +Admin +ADMIN +ADMIN +admin +Admin +admin +admin +admin +admin +admin +admin +admin +admin +ADMIN +admin +admin +admin +admin +admin +Admin +admin +admin +admin +admin +admin +Admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +Admin +admin +Admin +admin@example.com +admin +admin +admin +admin +admin +admin +admin +Admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +Administrator +administrator +Administrator +Administrator +Administrator +ADMINISTRATOR +administrator +Administrator +ADMINISTRATOR +administrator +administrator +Administrator +Administrator +Administrator +Administrator +Administrator +Administrator +administrator +Administrator +administrator +Administrator +Administrator +administrator +administrator +Administrator +Administrator +Administrator +Administrator +Administrator +admin +admin +admin +ADMIN +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +Admin +admin +admin +admin +admin +admin +admin +Admin +admin +Admin +ADMIN +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +Admin +admin +admin +admin +admin +admin +admin +adminstat +adminstrator +Admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +admin +adminttd +admin +adminuser +admin +adminview +admin +Admin +admin +ADMIN +admin +admin +admin +admin +adsl +adtec +ADVMAIL +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +Alphanetworks +amx +amx +ANDY +anon +anonymous +anonymous +anonymous +anonymous +anonymous +Any +Any +(any) +AP +aparker@geometrixx.info +apc +APPLSYS +APPLSYS +APPLSYSPUB +APPS +APPUSER +AQ +AQDEMO +AQJAVA +AQUSER +ARAdmin +ARCHIVIST +AUDIOUSER +AURORA@ORB@UNAUTHENTICATED +AURORA$ORB$UNAUTHENTICATED +author +autocad +BACKUP +backuponly +backuprestore +basisk +Basisk +bbs +bbsd-client +bbsd-client +BC4J +bciim +bcim +bcms +bcnas +bewan +bin +Blaeri +BLAKE +blue +Bobo +both +bpel +BRIO_ADMIN +browse +browse +bubba +cablecom +cac_admin +CATALOG +c-comatic +ccrusr +CDEMO82 +CDEMOCOR +CDEMORID +CDEMOUCB +cellit +CENTRA +cgadmin +checkfs +checkfsys +checksys +CHEY_ARCHSVR +CICSUSER +CIDS +cirros +CIS +CISCO15 +cisco +Cisco +CISINFO +citel +CLARK +client +cloudera +cmaker +CMSBATCH +cn=orcladmin +Coco +comcast +COMPANY +COMPIERE +computer +conferencing +config +corecess +core +CQSCHEMAUSER +craft +craft +craft +Craft +(created) +(created) +crowd­-openid-­server +Crowd +CSG +CSMIG +ctb_admin +CTXDEMO +CTXSYS +cusadmin +cust +customer +dadmin +dadmin +daemon +davox +db2fenc1 +db2inst1 +dbase +DBA +DBDCCICS +DBI +DBSNMP +DDIC +debian +debian +debian +debug +debug +debug +d.e.b.u.g +default +default +default +default +default +default +defug +DEMO8 +DEMO9 +demo +DEMO +demo +Demo +demos +DES +deskalt +deskman +desknorm +deskres +DEV2000_DEMOS +dev +Developer +device +device +diag +diag +DIP +DISCOVERER_ADMIN +distrib +disttech +disttech +disttech +D-Link +dm +dni +dos +dpn +draytek +Draytek +DSGATEWAY +DSL +DSSYS +DTA +dvstation +eagle +EARLYWATCH +echo +echo +egcr +EJSADMIN +elk_user +emaq +EMP +enable +eng +engmode +enisa +enquiry +ESTOREUSER +eurek +EVENT +EXFSYS +expert +Factory +factory +fal +fam +fastwire +fax +FAX +FAXUSER +FAXWORKS +fg_sysadmin +field +FIELD +FIELD +FIELD +FIELD +FIELD +field +FIELD +FINANCE +firstsite +Flo +FND +FORSE +FROSTY +ftp_admi +ftp +ftp_inst +ftp_nmc +ftp_oper +ftpuser +ftp +fwadmin +fwupgrade +games +GATEWAY +Gearguy +GE +geosolutions +glftpd +GL +god1 +god2 +gopher +GPFD +GPLD +guest +guest +guest1 +guest1 +guest +Guest +Guest +GUEST +guest +GUEST +GUEST +guest +guest +GUEST +guest +guru +halt +HCPARK +HELLO +hello +HELLO +HELLO +HELLO +helpdesk +HLW +(hostname/ipaddress) +HPLASER +HPSupport +HR +hsa +hscroot +HTTP +hunter +hxeadm +ibm +ibm +ibm +IBMUSER +iclock +ilom-admin +ilom-operator +images +IMAGEUSER +IMEDIA +inads +inads +informix +init +installer +installer +install +install +intel +intermec +internal +IntraStack +IntraSwitch +ioFTPD +IS_$hostname +itsadmin +james +jdoe@geometrixx.info +JMUSER +Joe +joe +JONES +JWARD +keyscan +khan +kodi +l2 +L2LDEMO +l3 +LASER +LASERWRITER +LBACSYS +LDAP_Anonymous +leo +LIBRARIAN +Liebert +live +LocalAdministrator +localadmin +locate +login +login +login +login +lpadmin +lpadm +lp +lp +lp +LR-ISDN +lynx +m1122 +m202 +MAIL +mail +MAIL +MAIL +MAIL +MAIL +maintainer +maintainer +maint +MAINT +maint +maint +maint +maint +Manager +MANAGER +manager +MANAGER +MANAGER +manager +Manager +MANAGER +managers +MANAGER +MANAGER +MANAGER +man +manuf +mary +master +MASTER +master +MayGion +McdataSE +MCUser +MD110 +MDDEMO_CLERK +MDDEMO +MDDEMO_MGR +MDSYS +mediator +me +memotec +Menara +mfd +MFG +mg3500 +MGE +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGR +MGWUSER +MICRO +MIGRATE +MILLER +misp +mlusr +MMO2 +mobile +MODTEST +Moe +monitor +MOREAU +mountfs +mountfsys +mountsys +MSHOME +mso +MTSSYS +MTS_USER +MTYSYS +museadmin +musi1921 +musi1921 +MXAGENT +myshake +naadmin +n.a +NAMES +nao +NAU +ncrm +netbotz +netlink +NetLinx +netman +netopia +netrangr +netscreen +NETWORK +news +newuser +nexthink +NICONEX +nm2user +nms +nobody +none +none +none +none +none +none +none +nop +nop +NSA +OAS_PUBLIC +OCITEST +ODM_MTR +ODM +ODSCOMMON +ods +ODS +OEMADM +OEMREP +OE +OLAPDBA +OLAPSVR +OLAPSYS +OMWB_EMULATION +onlime_r +OO +openhabian +OPENSPIRIT +OPERATIONS +OPERATNS +operator +operator +OPERATOR +OPERATOR +operator +operator +Operator +OPERATOR +OPERATOR +OPERATOR +Oper +OPER +op +op +oracle +ORAREGSYS +ORASSO +ORDPLUGINS +ORDSYS +osbash +osboxes +osmc +OSP22 +OUTLN +overseer +OWA +OWA_PUBLIC +OWNER +PACSLinkIP +PANAMA +patrol +PATROL +PBX +PCUSER +pepino +PERFSTAT +PFCUser +piranha +piranha +pi +PLEX +plexuser +PLMIMService +PLSQL +PM +pnadmin +PO7 +PO8 +politically +poll +Polycom +PO +PORTAL30_DEMO +PORTAL30 +PORTAL30 +PORTAL30_PUBLIC +PORTAL30_SSO +PORTAL30_SSO_PS +PORTAL30_SSO_PUBLIC +POST +postmaster +POST +POWERCARTUSER +POWERCHUTE +powerdown +praisenetwork +PRIMARY +primenet +primenet +primeos +primeos +prime +prime +primos_cs +primos_cs +PRINTER +PRINT +PRODCICS +PRODDTA +PROG +prtgadmin +PSEAdmin +public +PUBSUB1 +PUBSUB +pw +pwrchute +pyimagesearch +qbf77101 +QDBA +qpgmr +QS_ADM +QS_CBADM +QS_CB +QS_CS +qsecofr +qsecofr +qsecofr +qserv +QS_ES +QS_OS +QS +QSRV +QSRV +qsrvbas +qsrv +QSRV +qsvr +qsvr +QS_WS +qsysopr +quser +radware +RAID +rapport +rcust +rdc123 +readonly +readonly +read +readwrite +recover +redline +remnux +REPADMIN +replication-receiver +Replicator +replicator +REP_MANAGER +REPORTS_USER +REP_OWNER +REP_OWNER +RE +restoreonly +rje +RMAIL +RMAN +RMUser1 +RNIServiceManager +Rodopi +role1 +role1 +role +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +ROOT +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +root +Root +root +root +root +root +root +root +root +root +ro +RSBCMON +rwa +rw +sa +SAMPLE +sansforensics +sans +SAP* +SAP* +SA +SAPCPIC +SAPCPIC +SAP* +SAPR3 +SAP +sa +savelogs +scmadmin +sconsole +SCOTT +SDOS_ICSAP +SECDEMO +secofr +security +sedacm +self +SERVICECONSUMER1 +service +Service +service +servlet +setpriv +setup +setup +setup +SH +shutdown +signa +siteadmin +siteadmin +SITEMINDER +SLIDE +smc +SMDR +snmp +snmp +spcl +SPOOLMAN +$SRV +ssladmin +ssp +stackato +STARTER +status +stratacom +STRAT_USER +stuccoboy +super +__super +superdba +super +superman +superman +super +super.super +super +super +superuser +superuser +SUPERUSER +superuser +SuperUser +superuser +SUPERVISOR +SUPERVISOR +SUPERVISOR +SUPERVISOR +supervisor +supervisor +SUPERVISOR +SUPERVISOR +supervisor +support +support +support +support +su +sweex +SWPRO +SWUSER +Symbol +SYMPA +sync +sysadm +sysadm +sysadm +sysadmin +sysadmin +sysadmin +sysadmin +sysadm +SYSADM +sysadm +sysadm +SYSA +sys +sysbin +sys +SYS +SYSDBA +SYS +SYSMAN +SYSMAN +sysopr +Sysop +sys +sys +system +system_admin +SYSTEM +system +system +SYSTEM +system/manager +system +system +system +system +system +system +system +sys +t3admin +TAHITI +target +tasman +Tasman +TDOS_ICSAP +teacher +tech +tech +technician +tech +telecom +Telecom +tele +tellabs +temp1 +TESTPILOT +test +TEST +tiger +tomcat +tomcat +toor +topicalt +topicnorm +topicres +TRACESRV +TRACESVR +TRAVEL +trmcnfg +trouble +TSDEV +TSUSER +TURBINE +ubnt +ucenik23 +ULTIMATE +umountfs +umountfsys +umountsys +unix +USER0 +User +User +User +USER1 +USER2 +USER3 +USER4 +USER5 +USER6 +USER7 +USER8 +USER9 +user_analyst +user_approver +user_author +user_checker +user_designer +user_editor +user_expert +USERID +USERID +user +user_marketer +username +Username +Username +user +userNotUsed +user +User +user_pricer +user +user_publisher +USER_TEMPLATE +user +user +User +User +USER +user +user +UTLBSTATU +uucpadm +uucp +uwmadmin +vagrant +VCSRV +veda +vgnadmin +viewuser +VIF_DEVELOPER +vikram +VIRUSER +VNC +volition +vpasp +VRR1 +VTAM +WANGTEK +webadmin +WebAdmin +webadmin +WebAdmin +webadmin +WEBADM +WEBCAL01 +webdb +WEBDB +webguest +weblogic +webmaster +WEBREAD +webshield +web +whd +WINDOWS_PASSTHRU +WINSABRE +WINSABRE +WKSYS +wlcsystem +wlpisystem +wlseuser +wlse +WP +wpsadmin +wradmin +write +write +wVQxyQec +WWWUSER +www +WWW +xd +xmi_demo +XPRT +XXSESS_MGRYY diff --git a/subset/security/password/resources/faux/dictionary.txt b/subset/security/password/resources/faux/dictionary.txt new file mode 100644 index 0000000000..8caf47c4a5 --- /dev/null +++ b/subset/security/password/resources/faux/dictionary.txt @@ -0,0 +1,4 @@ +:user:pass +:admin:default +:root:user +:default:pass \ No newline at end of file diff --git a/subset/security/password/resources/faux/passwords.txt b/subset/security/password/resources/faux/passwords.txt new file mode 100644 index 0000000000..d2cc81dc7d --- /dev/null +++ b/subset/security/password/resources/faux/passwords.txt @@ -0,0 +1,4 @@ +pass +default +user +pass \ No newline at end of file diff --git a/subset/security/password/resources/faux/usernames.txt b/subset/security/password/resources/faux/usernames.txt new file mode 100644 index 0000000000..5b38a1c32c --- /dev/null +++ b/subset/security/password/resources/faux/usernames.txt @@ -0,0 +1,4 @@ +user +admin +root +default \ No newline at end of file diff --git a/subset/security/password/resources/raw/manufacturer.csv b/subset/security/password/resources/raw/manufacturer.csv new file mode 100644 index 0000000000..5f1ac893c3 --- /dev/null +++ b/subset/security/password/resources/raw/manufacturer.csv @@ -0,0 +1,2850 @@ +Vendor,Username,Password,Comments +"2Wire, Inc.",http,, +360 Systems,factory,factory, +3COM,3comcso,RIP000,Resets all passwords to defaults +3COM,,12345, +3COM,,1234admin, +3COM,,, +3COM,,ANYCOM, +3COM,,ILMI, +3COM,,PASSWORD, +3COM,,admin, +3COM,,comcomcom, +3COM,,, +3COM,,PASSWORD, +3COM,,admin, +3COM,Admin,Admin, +3COM,Administrator,, +3COM,Administrator,admin, +3COM,Type User: FORCE,, +3COM,User,Password, +3COM,adm,, +3COM,admin,1234admin, +3COM,admin,, +3COM,admin,admin, +3COM,admin,comcomcom, +3COM,admin,password, +3COM,admin,synnet, +3COM,adminttd,adminttd, +3COM,debug,synnet, +3COM,defug,synnet, +3COM,manager,manager, +3COM,monitor,monitor, +3COM,none,admin, +3COM,read,synnet, +3COM,recover,recover,http://support.3com.com/infodeli/tools/switches/ss3/4900/dha1770-0aaa04/htm/support/problemsolving/cliproblems.htm +3COM,recovery,recovery,Unit must be powered off +3COM,root,!root,http://support.3com.com/infodeli/tools/remote/ocradsl/20/812_cli20.pdfhttp://support.3com.com/infodeli/tools/remote/ocremote/brouters/840/2sysadmin.htm +3COM,security,security, +3COM,tech,, +3COM,tech,tech, +3COM,write,synnet, +3M,VOL-0215,,http://multimedia.3m.com/mws/mediawebserver?6666660Zjcf6lVs6EVs666xa9COrrrrQ- +3M,volition,,http://multimedia.3m.com/mws/mediawebserver?6666660Zjcf6lVs6EVs666xa9COrrrrQ- +3M,volition,volition, +3ware,Administrator,3ware, +ACCTON,,0000, +ACCTON,__super,(caclulated),http://www.vettebak.nl/hak/ +ACCTON,admin,, +ACCTON,manager,manager, +ACCTON,monitor,monitor, +ACCTON,none,0, +ADC Kentrox,,secret, +ADC Kentrox,,secret, +ADIC,admin,password, +ADIC,admin,secure, +ADP,sysadmin,master, +ADT,,2580,http://krebsonsecurity.com/2013/01/does-your-alarm-have-a-default-duress-code/ +ADTRAN,admin,password, +AIRAYA Corp,Airaya,Airaya,http://www.airaya.com/support/guides/WirelessGRID-Manual_O.pdf +ALLNET,admin,admin, +ALLNET,admin,password, +ALLNET,none,admin, +AMI,,A.M.I, +AMI,,AM, +AMI,,AMI, +AMI,,AMI!SW, +AMI,,AMI.KEY, +AMI,,AMI.KEZ, +AMI,,AMI?SW, +AMI,,AMIPSWD, +AMI,,AMISETUP, +AMI,,AMI_SW, +AMI,,AMI~, +AMI,,BIOSPASS, +AMI,,CMOSPWD, +AMI,,HEWITT RAND, +AMI,,aammii, +AMI,,A.M.I, +AMI,,AM, +AMI,,AMI, +AMI,,AMI!SW, +AMI,,AMI.KEY, +AMI,,AMI.KEZ, +AMI,,AMI?SW, +AMI,,AMIAMI, +AMI,,AMIDECOD, +AMI,,AMIPSWD, +AMI,,AMISETUP, +AMI,,AMI_SW, +AMI,,AMI~, +AMI,,BIOSPASS, +AMI,,HEWITT RAND, +AMI,,aammii, +AMX,,1988,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,,,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,,admin,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,Admin,1988,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,Administrator,vision2,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,NetLinx,password,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,admin,1988,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,admin,admin,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,administrator,password,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,guest,guest,http://www.amx.com/techsupport/PDFs/981.pdf +AMX,root,mozart,http://www.amx.com/techsupport/PDFs/981.pdf +AOC,,admin, +APACHE,admin,jboss4, +APC,(any),TENmanUFactOryPOWER, +APC,,serial number of the Call-UPS, +APC,,serial number of the Share-UPS, +APC,,TENmanUFactOryPOWER, +APC,,backdoor, +APC,POWERCHUTE,APC, +APC,apc,apc, +APC,device,apc,https://www.jlab.org/Hall-D/Documents/manuals/APC%20stuff/AP9630%209631%20UPS%20Network%20Management%20Card%202%20User's%20Guide%20firmware%20V5.1.1.pdf +APC,device,device, +APC,readonly,apc,https://www.jlab.org/Hall-D/Documents/manuals/APC%20stuff/AP9630%209631%20UPS%20Network%20Management%20Card%202%20User's%20Guide%20firmware%20V5.1.1.pdf +ARtem,,admin, +ASMAX,admin,epicrouter, +AST,,SnuFG5, +AST,,SnuFG5, +AT&T,,mcp, +ATL,Service,5678,Tape Library Service Access +ATL,operator,1234,Tape Library Operator Access +AVM,,0, +AVM,,, +AWARD,,1322222, +AWARD,,256256, +AWARD,,589589, +AWARD,,589721, +AWARD,,, +AWARD,,?award, +AWARD,,AWARD SW, +AWARD,,AWARD?SW, +AWARD,,AWARD_PW, +AWARD,,AWARD_SW, +AWARD,,Award, +AWARD,,BIOS, +AWARD,,CONCAT, +AWARD,,HELGA-S, +AWARD,,HEWITT RAND, +AWARD,,HLT, +AWARD,,PASSWORD, +AWARD,,SER, +AWARD,,SKY_FOX, +AWARD,,SWITCHES_SW, +AWARD,,SW_AWARD, +AWARD,,SZYX, +AWARD,,Sxyz, +AWARD,,TTPTHA, +AWARD,,TzqF, +AWARD,,ZAAADA, +AWARD,,aLLy, +AWARD,,aPAf, +AWARD,,admin, +AWARD,,alfarome, +AWARD,,award.sw, +AWARD,,award_?, +AWARD,,award_ps, +AWARD,,awkward, +AWARD,,biosstar, +AWARD,,biostar, +AWARD,,condo, +AWARD,,djonet, +AWARD,,efmukl, +AWARD,,g6PJ, +AWARD,,h6BB, +AWARD,,j09F, +AWARD,,j256, +AWARD,,j262, +AWARD,,j322, +AWARD,,j64, +AWARD,,lkw peter, +AWARD,,lkwpeter, +AWARD,,setup, +AWARD,,t0ch20x, +AWARD,,t0ch88, +AWARD,,wodj, +AWARD,,zbaaaca, +AWARD,,zjaaadc, +AXUS,,0,Storage DAS SATA to SCSI/FC +Accelerated Networks,sysadm,anicust, +Aceex,admin,, +Acer,,, +Actiontec,,, +Actiontec,admin,password,Verizon Fios Setup +AdComplete.com,Admin1,Admin1, +Adaptec,Administrator,adaptec, +AddPac Technology,root,router, +Addon,admin,admin, +Adobe,admin,admin,https://docs.adobe.com/docs/v5_2/html-resources/cq5_guide_power_user/ch07s02.html#sect_default_users_and_groups +Adobe,anonymous,anonymous,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/ +Adobe,aparker@geometrixx.info,aparker,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/ +Adobe,author,author,https://docs.adobe.com/docs/v5_2/html-resources/cq5_guide_power_user/ch07s02.html#sect_default_users_and_groups +Adobe,jdoe@geometrixx.info,jdoe,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/ +Adobe,replication-receiver,replication-receiver,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/ +Adobe,vgnadmin,vgnadmin,http://dev.day.com/content/docs/en/crx/connectors/vignette/current.html +Adtech,root,ax400, +Adtran,,adtran, +Adtran,admin,password,http://www.adtran.com/pub/Library/Quick_Start_Guides/Public_View/NetVanta%203430%20Quick%20Start%20Guide.pdf +Advanced Integration,,Advance, +Advanced Integration,,Advance, +Advantek Networks,admin,, +Aethra,admin,password, +AirLink Plus,,admin, +AirTies RT-210,admin,admin, +Airlink,,admin, +Aironet,,, +Airway,,0000, +Aladdin,root,kn1TG7psLu, +Alcatel,,,http://www.speedtouch.com/support.htm +Alcatel,,admin, +Alcatel,,1064, +Alcatel,SUPERUSER,ANS#150, +Alcatel Thomson,admin,admin, +Alcatel,adfexc,adfexc,thanks to Nicolas Gregoire +Alcatel,admin,switch, +Alcatel,at4400,at4400,thanks to Nicolas Gregoire +Alcatel,client,client, +Alcatel,dhs3mt,dhs3mt,thanks to Nicolas Gregoire +Alcatel,dhs3pms,dhs3pms,thanks to Nicolas Gregoire +Alcatel,diag,switch, +Alcatel,ftp_admi,kilo1987, +Alcatel,ftp_inst,pbxk1064, +Alcatel,ftp_nmc,tuxalize, +Alcatel,ftp_oper,help1954, +Alcatel,halt,tlah,thanks to Nicolas Gregoire +Alcatel,install,llatsni,thanks to Nicolas Gregoire +Alcatel,kermit,kermit,thanks to Nicolas Gregoire +Alcatel,mtch,mtch,thanks to Nicolas Gregoire +Alcatel,mtcl,, +Alcatel,mtcl,mtcl,thanks to Nicolas Gregoire +Alcatel,root,letacla,thanks to Nicolas Gregoire +Alcatel,root,permit,Perm/Config port 38036 +Alcatel,superuser,superuser, +Alien Technology,alien,alien,http://seclists.org/fulldisclosure/2010/May/63 +Alien Technology,root,alien,http://seclists.org/fulldisclosure/2010/May/63 +Allied Telesyn,,manager, +Allied Telesyn,,admin, +Allied Telesyn,admin,, +Allied Telesyn,manager,admin, +Allied Telesyn,manager,friend, +Allied Telesyn,manager,manager, +Allied Telesyn,root,, +Allied Telesyn,secoff,secoff, +Allnet,admin,admin,http://www.allnet.de/ +Allot,admin,allot, +Allot,root,bagabu, +Alteon,admin,, +Alteon,admin,admin, +Alteon,admin,linga, +Ambit,root,, +Ambit,root,root, +Ambit,user,user, +Amigo,admin,epicrouter, +Amino,,leaves,http://www.vsicam.com/files/documents/AmiNet/AmiNet_and_AVN_Configuration_Manual.pdf +Amino,,snake,http://www.vsicam.com/files/documents/AmiNet/AmiNet_and_AVN_Configuration_Manual.pdf +Amitech,admin,admin, +AmpJuke,admin,pass, +Amptron,,Polrty, +Amptron,,Polrty, +Andover Controls,acc,acc, +Apache Project,jj,, +Apache,admin,, +Apache,admin,admin, +Apache,admin,j5Brn9, +Apache,admin,tomcat, +Apache,both,tomcat, +Apache,role,changethis, +Apache,role1,role1, +Apache,role1,tomcat, +Apache,root,changethis, +Apache,root,root, +Apache,tomcat,changethis, +Apache,tomcat,tomcat, +Apple,,public,See Apple article number 58613 +Apple,,xyzzy, +Apple,,admin,see Apple article number 107518 +Apple,,password,See Apple article number 106597 +Apple Computer,,public, +Apple Computer,,xyzzy, +Apple,admin,public, +Apple,mobile,dottie, +Apple,root,admin, +Apple,root,alpine, +Applied Innovations,scout,scout, +Areca,admin,0, +Arescom,,atc123, +Arlotto,admin,123456, +Arris,admin,password, +Arrowpoint,,, +Arrowpoint,admin,system, +Aruba,admin,admin, +Arun,123,234, +Asante,IntraStack,Asante, +Asante,IntraSwitch,Asante, +Asante,admin,asante, +Asante,superuser,, +Asante,superuser,asante, +Ascend,,ascend, +Ascend,,ascend, +Ascend,readonly,lucenttech2, +Ascend,readwrite,lucenttech1, +Ascend,root,ascend, +Ascom,,3ascotel, +Aspect,DTA,TJM, +Aspect,customer,none, +Asus,,admin, +Asus,admin,admin, +Asus,adsl,adsl1234, +Atlantis,admin,atlantis, +Atlassian,Crowd,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf +Atlassian,Demo,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf +Atlassian,Username,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf +Atlassian,crowd­-openid-­server,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf +Attachmate,,PASSWORD, +Audioactive,,telos, +Autodesk,autocad,autocad, +Avaya,,Craftr4, +Avaya,,, +Avaya,,admin, +Avaya,Administrator,ggdaseuaimhrke, +Avaya,Craft,crftpw, +Avaya,admin,admin,https://downloads.avaya.com/css/P8/documents/100173462 +Avaya,admin,admin123, +Avaya,admin,barney, +Avaya,admin,password,https://downloads.avaya.com/css/P8/documents/100181785 +Avaya,craft,, +Avaya,craft,crftpw, +Avaya,dadmin,dadmin, +Avaya,dadmin,dadmin01, +Avaya,diag,danger, +Avaya,manuf,xxyyzz, +Avaya,root,ROOT500, +Avaya,root,cms500, +Avaya,root,ggdaseuaimhrke, +Avaya,root,root, +Avenger News System (ANS),,Administrative, +Avocent,root,tslinux, +Award,,lkwpeter, +Award,,1322222, +Award,,256256, +Award,,?award, +Award,,AWARD_SW, +Award,,BIOS, +Award,,CONCAT, +Award,,CONDO, +Award,,HELGA-S, +Award,,HEWITT RAND, +Award,,HLT, +Award,,PASSWORD, +Award,,SER, +Award,,SKY_FOX, +Award,,SWITCHES_SW, +Award,,SY_MB, +Award,,SZYX, +Award,,Sxyz, +Award,,TTPTHA, +Award,,TzqF, +Award,,aLLy, +Award,,aPAf, +Award,,admin, +Award,,alfarome, +Award,,award, +Award,,awkward, +Award,,biosstar, +Award,,biostar, +Award,,g6PJ, +Award,,h6BB, +Award,,j09F, +Award,,j256, +Award,,j262, +Award,,j322, +Award,,j64, +Award,,lkw peter, +Award,,lkwpeter, +Award,,setup, +Award,,t0ch20x, +Award,,t0ch88, +Award,,wodj, +Award,,zbaaaca, +Axis,,, +Axis Communications,root,pass, +Axis,root,pass, +Axway,setup,setup,https://cdn.axway.com/u/documentation/secure_transport/5.3.0/SecureTransport_GettingStartedGuide_allOS_en.pdf +Aztech,admin,admin, +Aztech,isp,isp,backdoor � not in all f/w versions +Aztech,root,admin, +BBR-4MG and,root,, +BEA,system,weblogic, +BECU,musi1921,Musii%1921, +BLACKBOX,Administrator,public, +BMC Software,Administrator,the same all over, +BMC Software,Best1_User,BackupU$r,http://krebsonsecurity.com/2014/01/new-clues-in-the-target-breach/ +BMC,patrol,patrol, +BNI,USER,USER, +BT,admin,admin, +"Barco, Inc.",,clickshare,https://www.barco.com/tde/(2331390682231610)/R5900004/08/Barco_InstallationManual_R5900004_08__ClickShare-CSC-1-Installation-Guide.pdf +"Barco, Inc.",admin,admin,https://www.barco.com/tde/(2331390682231610)/R5900004/08/Barco_InstallationManual_R5900004_08__ClickShare-CSC-1-Installation-Guide.pdf +Barracuda,admin,admin,http://www.barracudanetworks.com/ns/downloads/Quick_Start_Guides/QSG_Barracuda_SSLVPN_US.pdf +Barracuda,ssladmin,ssladmin,http://www.barracudanetworks.com/ns/downloads/Quick_Start_Guides/QSG_Barracuda_SSLVPN_US.pdf +Bausch Datacom,admin,epicrouter, +Bay Networks,,NetICs, +Bay Networks,,NetICs, +Bay Networks,Manager,, +Bay Networks,User,, +Bay Networks,security,security, +Beetel,admin,admin, +Beetel,admin,password, +Belkin,,MiniAP, +Belkin,,admin, +Belkin,admin,none, +Benq,admin,admin, +Best Practical Solutions,root,password,http://requesttracker.wikia.com/wiki/RecoverRootPassword +BestPractical,root,password, +Bewan,bewan,bewan, +Billion,,, +Billion,admin,admin, +BinTec,,snmp-Trap,by rootkid +BinTec,Admin,No, +BinTec,admin,bintec, +Bintec,admin,bintec, +Bintec,admin,funkwerk, +Biodata,,Babylon, +Biodata,config,biodata, +Biostar,,Biostar, +Biostar,,Q54arwms, +Biostar,,Biostar, +Biostar,,Q54arwms, +Biscom,admin,admin,http://ticsoftware.helpserve.com/Knowledgebase/Article/GetAttachment/48/40 +BizDesign,Admin,ImageFolio, +Black Widow Web Design Ltd,admin,nimda, +Blaeri,Blaeri,22332323, +Blitzz Technologies,admin,admin, +Blue Coat Systems,admin,articon, +Bluecoat,admin,admin, +Bomgar,admin,password, +Borland,,, +Borland,politically,correct, +Bosch,live,live, +Bosch,service,service, +Bosch,user,user, +Breezecom,,, +Breezecom,,Helpdesk, +Breezecom,,Master, +Breezecom,,Super, +Breezecom,,laflaf, +Breezecom,,Helpdesk, +Breezecom,,Master, +Breezecom,,Super, +Breezecom,,laflaf, +Broadlogic,admin,admin, +Broadlogic,installer,installer, +Broadlogic,webadmin,webadmin, +Brocade,admin,brocade1, +Brocade,admin,password, +Brocade,factory,Fact4EMC, +Brocade,root,Serv4EMC, +Brocade,root,fibranne, +Brocade,root,fivranne,by Nicolas Gregoire +Brocade,user,password,Also on other SAN equipment +Brother,,access, +Brother,,access, +Brother Industries Ltd.,,00000000, +Brother Industries Ltd.,,12345678, +Brother Industries Ltd.,admin,access, +Brother,admin,access, +Buffalo Technology,admin,password, +Buffalo,root,, +Buffalo/MELCO,root,, +Busybox,admin,admin, +CA Process Automation,pamadmin,pamadmin, +CGI World,,protection, +CNET,admin,1234, +CNet,Admin,admin, +COM3,admin,admin, +CTX International,,CTX_123, +CTX International,,CTX_123, +Cable And Wireless,admin,1234, +Cabletron,,, +Cabletron,netman,, +Canon/Brother,7654321,7654321, +Canon,,0, +Capricorn Infotech India,,1234567890,http://www.isecurity.info/downloads/eToken_Basic_Operation_Guide_1.0.pdf +CareStream Health,KeyOperator,DV5800,http://www.spectrumxray.com/sites/default/files/pdfs/Carestream-DryView-5800-5850.pdf +CareStream Health,LocalService,DV5800,http://www.spectrumxray.com/sites/default/files/pdfs/Carestream-DryView-5800-5850.pdf +Carsten Schmitz,admin,password,http://docs.limesurvey.org/Installation&structure=English+Instructions+for+LimeSurvey#Connect_to_the_administration_script_for_the_first_time +Cayman,,, +Cayman,admin,(serial number), +Cayman,admin,, +Cayman,},, +Celerity,mediator,mediator, +Celerity,root,Mua'dib, +Cellit,cellit,cellit, +Ceragon Networks,root,tooridu,http://www.kb.cert.org/vuls/id/936356 +Chase Research,,iolan, +Check Point,admin,admin, +Check Point,admin,adminadmin, +Checkpoint,admin,abc123, +Checkpoint,admin,admin, +Chuming Chen,administrator,adminpass, +CipherTrust,admin,password, +Ciphertrust,admin,password, +Cisco, EAdmin,, +Cisco, UAMIS_,, +Cisco, UNITY_,, +Cisco, UOMNI_,, +Cisco, UVPIM_,, +Cisco,,, +Cisco,,Cisco, +Cisco,,Cisco router, +Cisco,,_Cisco, +Cisco,,c, +Cisco,,cc, +Cisco,,changeit,http://160.78.48.20/vpn/software/How_to_use_Webvpn_with_Citrix_Metaframe.pdf +Cisco,,cisco, +Cisco,,letmein, +Cisco,,public/private/secret, +Cisco,,riverhead,http://www.cisco.com/en/US/products/ps5888/prod_release_note09186a0080237333.html +Cisco,,Cisco router, +Cisco,,ILMI, +Cisco,,c, +Cisco,,cable-docsis, +Cisco,,cc, +Cisco,,cisco, +Cisco,Administrator,admin, +Cisco,Administrator,changeme, +Cisco,CISCO15,otbu+1, +Cisco,Cisco,Cisco, +Cisco,ESubscriber,, +Cisco,End User,7936, +Cisco,admin,, +Cisco,admin,admin, +Cisco,admin,changeme, +Cisco,admin,cisco, +Cisco,admin,default, +Cisco,admin,diamond, +Cisco,admin,tsunami, +Cisco,bbsd-client,NULL, +Cisco,bbsd-client,changeme2, +Cisco,bubba,(unknown), +Cisco,cisco,, +Cisco,cisco,cisco, +Cisco,cmaker,cmaker, +Cisco,enable,, +Cisco,enable,cisco, +Cisco,guest,, +Cisco,hsa,hsadb, +Cisco,hsa,hsadb , +Cisco,netrangr,attack, +Cisco,pnadmin,pnadmin, +Cisco,praisenetwork,perfectpraise, +Cisco,private ReadWrite access,secret, +Cisco,public ReadOnly access,secret, +Cisco,ripeop,, +Cisco,root ,attack, +Cisco,root,Cisco, +Cisco,root,attack, +Cisco,root,blender, +Cisco,root,password,Added by DPL admin. From +Cisco,root,secur4u,http://www.cisco.com/c/en/us/td/docs/security/physical_security/video_surveillance/network/vsm/6_3/user_guide/cvsm_6_3/overview.html#wp1035089 +Cisco,sa,, +Cisco,technician,2 + last 4 of Audio, +Cisco,uwmadmin,password,http://www.cisco.com/c/en/us/td/docs/cloud_services/cisco_modeling_labs/v100/installation/guide/administrator/b_cml_install_sys_admin/b_cml_install_sys_admin_chapter_0111.html +Cisco,wlse,wlsedb, +Cisco,wlseuser,wlsepassword, +Cisco-Arrowpoint,admin,system, +Citel,,citel, +Citel,citel,password, +"Citrix Systems, Inc.",nsroot,nsroot,http://support.citrix.com/proddocs/topic/access-gateway-hig-appliances/ag-using-setup-wizard-tsk.html +"Citrix Systems, Inc.",root,rootadmin, +Claris,,familymacintosh, +ClearOne Communications,ClearOne,RAV,http://www.clearone.com/uploads/resource/800_153_560_Rev1_0_Converge560_590Man-0.pdf +ClearOne Communications,clearone,converge,"http://pdf.textfiles.com/manuals/STARINMANUALS/ClearOne/Manuals/Archive/ConvergePro%208i,%20840T,%20880,%20TH20%20v0.91.pdf" +Cnet,Admin,epicrouter, +Cnet,admin,password, +Cobalt,admin,admin, +Colubris Networks,admin,admin, +Colubris,admin,admin, +Comcast Home Networking,comcast,, +Comcast SMC,cusadmin,highspeed, +Comcast SMC,cusadmin,CantTouchThis, +Comersus,admin,dmr99, +"Comodo Group, Inc",mydlp,mydlp,https://www.mydlp.com/wp-content/uploads/Comodo_MyDLP_Admin_guide.pdf +Compaq,,,use ALT+G at boot to reset config +Compaq,,Compaq, +Compaq,,Compaq, +Compaq,PFCUser,240653C9467E45, +Compaq,administrator,administrator, +Compaq,anonymous,, +Compaq,operator,operator, +Compaq,root,manager, +Compaq,root,rootme, +Compaq,user,public, +Compaq,user,user, +Compualynx,administrator,asecret, +Comtrend,admin,1234, +Comtrend,admin,, +Comtrend,admin,admin, +Conceptronic,admin,1234, +Conceptronic,admin,password, +Conceptronic,anonymous,password, +Concord,,last, +Concord,,last, +Conexant,,admin, +Conexant,,epicrouter, +Conexant,Administrator,admin, +Conexant,admin,amigosw1, +Conexant,admin,conexant, +Conexant,admin,epicrouter, +Conexant,admin,password, +Conitec,Adam,29111991, +Control4,,ducati900ss,"http://www.digitalmunition.com/_/Blog/Entries/2010/10/13_Control4_gear_is_umm..._backdoored,_I_guess.html" +Control4,root,t0talc0ntr0l4!,"http://www.digitalmunition.com/_/Blog/Entries/2010/10/13_Control4_gear_is_umm..._backdoored,_I_guess.html" +Corecess,Administrator,admin, +Corecess,admin,, +Corecess,corecess,corecess, +CoronaMatrix,admin,admin, +Covertix,Admin,Admin,http://www.kagoon.com/smartcipher-installation-guide-docx/main +Creative,,, +Crossbeam,,x40rocks,At the LILO boot prompt type CTC +Crystalview,,Crystal, +CyberMax,,Congress, +CyberMax,,Congress, +Cyberguard,cgadmin,cgadmin, +Cyclades,root,, +Cyclades,root,tslinux, +Cyclades,super,surt, +D-Link,,admin, +D-Link,,private, +D-Link,,public, +D-Link,,admin, +D-Link,Admin,, +D-Link,Alphanetworks,wrgg15_di524, +D-Link,D-Link,D-Link, +D-Link,admin,, +D-Link,admin,admin, +D-Link,admin,gvt12345, +D-Link,admin,none, +D-Link,admin,password,hardcoded for Verizon FiOS +D-Link,admin,public, +D-Link,admin,year2000, +D-Link,dont need one,admin, +D-Link,none,none, +D-Link,none,private, +D-Link,root,admin, +D-Link,user,,by rootkid +D-Link,user,none, +D9287ar,Clarissa,, +DIGICOM,root,admin, +DVB,dvstation,dvst10n, +DVB,root,pixmet2003, +Daewoo,,Daewuu, +Daewoo,,Daewuu, +Dallas Semiconductors,root,tini, +Dassault Systemes,Test Everything,,http://public.dhe.ibm.com/partnerworld/pub/whitepaper/193d6.pdf +Data General,op,op, +Data General,op,operator, +Data General,operator,operator, +DataWizard Technologies Inc.,anonymous,, +DataWizard Technologies Inc.,test,test, +Datacom,,letmein, +Datacom,,letmein, +Datacom,sysadm,sysadm, +Datawizard.net,anonymous,any, +Datawizard.net,anonymous,any@, +Davolink,user,user, +Davox,admin,admin, +Davox,davox,davox, +Davox,root,davox, +Davox,sa,, +Daytek,,Daytec, +Daytek,,Daytec, +Debian,,tatercounter2000, +Deerfield,MDaemon,MServer,web interface to manage MDaemon. fixed June 2002 +Dell,,Dell, +Dell,,Fireport,http://www.vennercorp.com/blog/2014/09/08/what-are-the-default-wyse-admin-passwords/ +Dell,,nz0u4bbe, +Dell,,1RRWTTOOI, +Dell,,Dell, +Dell,,admin, +Dell,Admin,,case sensitive username +Dell,Administrator,storageserver, +Dell,VNC,winterm,http://www.vennercorp.com/blog/2014/09/08/what-are-the-default-wyse-admin-passwords/ +Dell,admin,admin, +Dell,admin,password, +Dell,rapport,r@p8p0r+,http://www.vennercorp.com/blog/2014/09/08/what-are-the-default-wyse-admin-passwords/ +Dell,root,calvin, +Dell,root,wyse,http://www.vennercorp.com/blog/2014/09/08/what-are-the-default-wyse-admin-passwords/ +Demarc,admin,my_DEMARC, +Deutsche Telekom,,0, +Deutsche Telekom,admin,, +Develcon,,BRIDGE, +Develcon,,password, +Develcon,,BRIDGE, +Develcon,,password, +Dictaphone,NETOP,, +Dictaphone,NETWORK,NETWORK, +Dictaphone,PBX,PBX, +Digicom,admin,michelangelo, +Digicom,user,password, +Digicorp,,BRIDGE, +Digicorp,,password, +Digicorp,,BRIDGE, +Digicorp,,password, +Digicraft Software,Yak,asd123, +Digital Equipment,1,manager, +Digital Equipment,1,operator, +Digital Equipment,1,syslib, +Digital Equipment,1.1,SYSTEM, +Digital Equipment,2,maintain, +Digital Equipment,2,manager, +Digital Equipment,2,operator, +Digital Equipment,2,syslib, +Digital Equipment,30,games, +Digital Equipment,5,games, +Digital Equipment,7,maintain, +Digital Equipment,,1, +Digital Equipment,,ACCESS, +Digital Equipment,,SYSTEM, +Digital Equipment,,access, +Digital Equipment,,komprie, +Digital Equipment,,system, +Digital Equipment,,ACCESS, +Digital Equipment,,SYSTEM, +Digital Equipment,,komprie, +Digital Equipment,ALLIN1,ALLIN1, +Digital Equipment,ALLIN1MAIL,ALLIN1MAIL, +Digital Equipment,ALLINONE,ALLINONE, +Digital Equipment,BACKUP,BACKUP, +Digital Equipment,BATCH,BATCH, +Digital Equipment,DCL,DCL, +Digital Equipment,DECMAIL,DECMAIL, +Digital Equipment,DECNET,DECNET, +Digital Equipment,DECNET,NONPRIV, +Digital Equipment,DEFAULT,DEFAULT, +Digital Equipment,DEFAULT,USER, +Digital Equipment,DEMO,DEMO, +Digital Equipment,FIELD,DIGITAL, +Digital Equipment,FIELD,FIELD, +Digital Equipment,FIELD,SERVICE, +Digital Equipment,FIELD,TEST, +Digital Equipment,GUEST,GUEST, +Digital Equipment,HELP,HELP, +Digital Equipment,HELPDESK,HELPDESK, +Digital Equipment,HOST,HOST, +Digital Equipment,INFO,INFO, +Digital Equipment,INGRES,INGRES, +Digital Equipment,LINK,LINK, +Digital Equipment,MAILER,MAILER, +Digital Equipment,MBMANAGER,MBMANAGER, +Digital Equipment,MBWATCH,MBWATCH, +Digital Equipment,NETCON,NETCON, +Digital Equipment,NETMGR,NETMGR, +Digital Equipment,NETNONPRIV,NETNONPRIV, +Digital Equipment,NETPRIV,NETPRIV, +Digital Equipment,NETSERVER,NETSERVER, +Digital Equipment,NETWORK,NETWORK, +Digital Equipment,NEWINGRES,NEWINGRES, +Digital Equipment,NEWS,NEWS, +Digital Equipment,OPERVAX,OPERVAX, +Digital Equipment,PDP11,PDP11, +Digital Equipment,PDP8,PDP8, +Digital Equipment,POSTMASTER,POSTMASTER, +Digital Equipment,PRIV,PRIV, +Digital Equipment,REPORT,REPORT, +Digital Equipment,RJE,RJE, +Digital Equipment,STUDENT,STUDENT, +Digital Equipment,SYS,SYS, +Digital Equipment,SYSMAINT,DIGITAL, +Digital Equipment,SYSMAINT,SERVICE, +Digital Equipment,SYSMAINT,SYSMAINT, +Digital Equipment,SYSTEM,MANAGER, +Digital Equipment,SYSTEM,OPERATOR, +Digital Equipment,SYSTEM,SYSLIB, +Digital Equipment,SYSTEM,SYSTEM, +Digital Equipment,SYSTEST,UETP, +Digital Equipment,SYSTEST_CLIG,SYSTEST, +Digital Equipment,SYSTEST_CLIG,SYSTEST_CLIG, +Digital Equipment,TELEDEMO,TELEDEMO, +Digital Equipment,TEST,TEST, +Digital Equipment,UETP,UETP, +Digital Equipment,USER,PASSWORD, +Digital Equipment,USER,USER, +Digital Equipment,USERP,USERP, +Digital Equipment,VAX,VAX, +Digital Equipment,VMS,VMS, +Digital Equipment,accounting,accounting, +Digital Equipment,boss,boss, +Digital Equipment,demo,demo, +Digital Equipment,manager,manager, +Digital Equipment,software,software, +"Digium, Inc.",admin,password,ftp://ftp.gtlib.gatech.edu/pub/asterisknow/quickstart_asterisknow.pdf +Divar,admin,,http://www.google.com/url?sa=t&source=web&cd=3&ved=0CBsQFjAC&url=http%3A%2F%2Fresource.boschsecurity.com%2Fdocuments%2FEX85MegapixelIP_TechnicalServiceNote_enUS_T6355740427.doc&ei=h55lTLeXCcT_lgfgkqyTDg&usg=AFQjCNGPOLSf4n0L9PAyJ8Jv7FFN0IDIVw +Divar,viewer,,http://www.google.com/url?sa=t&source=web&cd=3&ved=0CBsQFjAC&url=http%3A%2F%2Fresource.boschsecurity.com%2Fdocuments%2FEX85MegapixelIP_TechnicalServiceNote_enUS_T6355740427.doc&ei=h55lTLeXCcT_lgfgkqyTDg&usg=AFQjCNGPOLSf4n0L9PAyJ8Jv7FFN0IDIVw +Dlink,admin,, +Dlink,admin,admin, +Dlink,admin,public, +DotNetNuke Corporation,admin,dnnadmin, +DotNetNuke Corporation,host,dnnhost, +Draytek Corp,admin,, +Draytek,Draytek,1234, +Draytek,admin,, +Draytek,admin,admin, +Draytek,draytek,1234, +DuPont,root,par0t, +Ducati Motor Holding,,Last 4 digits of VIN,http://www.laresblog.com/2011/04/why-cant-i-just-buy-motorcycle-without.html +Dynalink,admin,admin, +Dynalink,admin,private, +Dynalink,userNotUsed,userNotU, +Dynix Library Systems,LIBRARY,, +Dynix Library Systems,SETUP,, +Dynix Library Systems,circ,, +E-Con,admin,epicrouter, +E-Tech,,admin, +E-Tech,admin,epicrouter, +E-Tech,admin,password, +E-Tech,none,admin, +EMC,MCUser,MCUser1,https://community.emc.com/message/525389 +EMC,admin,,EMC Fiber Switch +EMC,admin,changeme,https://community.emc.com/message/525389 +EMC,backuponly,backuponly1,https://community.emc.com/message/525389 +EMC,backuprestore,backuprestore1,https://community.emc.com/message/525389 +EMC,dpn,changeme,https://community.emc.com/message/525389 +EMC,restoreonly,restoreonly1,https://community.emc.com/message/525389 +EMC,root,8RttoTriz,https://community.emc.com/message/525389 +EMC,root,changeme,https://community.emc.com/message/525389 +EMC,viewuser,viewuser1,https://community.emc.com/message/525389 +EPISD,computer,repair, +EPiServer AB,admin,store,http://world.episerver.com/Documentation/Items/Installation-Instructions/EPiServer-Commerce/Installation-Instructions---EPiServer-Commerce/#First%20Time%20Login +EZPhotoSales,admin,admin, +Eaton,admin,admin, +Echelon Corporation,ilon,ilon, +Edimax,admin,123, +Edimax,admin,1234, +Edimax,admin,, +Edimax,admin,epicrouter, +Edimax,admin,password, +Edimax,admin,su@psir, +Edimax,edimax,software01,for most Edimax HW???? +Edimax,guest,1234, +Edimax,guest,, +Efficient,,, +Efficient,,admin, +Efficient,,admin, +Efficient Networks,,hs7mwxkk, +Efficient Networks,,4getme2, +Efficient Networks,login,admin, +Efficient,login,admin, +Efficient,login,password, +Efficient,superuser,admin, +Efficinet Networks,login,admin, +"Ektron, Inc.",builtin,builtin,http://dev.ektron.com/kb_article.aspx?id=1818 +"Ektron, Inc.",sa,Ektron,http://downloads2.ektron.com/software/released/CMS400/v48/SetupManual.pdf +Elron,(hostname/ipaddress),sysadmin, +Elsa,,, +Elsa,,cisco, +Elsa,,, +Elsa,,cisco, +Emerson,Admin,Emerson1, +Eminent,admin,admin, +EnGenius,admin,admin, +Enhydra ,admin,enhydra, +Enox,,xo11nE, +Enox,,xo11nE, +Enterasys,,netadmin, +Enterasys,admin,, +Enterasys,admin,netadmin, +Enterasys,tiger,tiger123, +Entrust,admin,admin, +Entrust,websecadm,changeme,Access to Admin Gui via /sek-bin/ +Epox,,central, +Epox,,central, +Ericsson ACC,public,, +Ericsson,MD110,help, +Ericsson,admin,default, +Ericsson,expert,expert, +Ericsson,netman,netman, +EverFocus,admin,admin, +EverFocus,operator,operator, +EverFocus,supervisor,supervisor, +Exabyte,anonymous,Exabyte, +Exacq Technologies,admin,admin256,http://d1ni7hpbick8ut.cloudfront.net/ev-desktop-IP-quickstart-0309.pdf +Exacq Technologies,user,user5710,http://d1ni7hpbick8ut.cloudfront.net/ev-desktop-IP-quickstart-0309.pdf +Exinda Networks,admin,exinda, +Extended Systems,admin,admin, +Extended Systems,admin,extendnet, +Extreme Networks,admin,, +F5,admin,admin, +F5,root,default, +F5,support,, +F5-Networks,,, +Fastream Technologies,root,, +Fastwire,fastwire,fw, +FatWire,firstsite,firstsite,http://www.vvgr.demon.co.uk/FatWire_Analytics.pdf +FatWire,fwadmin,xceladmin,http://www.vvgr.demon.co.uk/FatWire_Analytics.pdf +Firebird Project,SYSDBA,masterkey, +Firebird,SYSDBA,masterkey, +Flowpoint,,, +Flowpoint,,password, +Flowpoint,,, +Flowpoint,,password, +Flowpoint,admin,admin, +Fortigate,admin,, +Fortinet,,bcpb(serial number of the firewall), +Fortinet,admin,, +Fortinet,maintainer,admin, +Fortinet,maintainer,bcpb[SERIAL NO.], +Fortinet,maintainer,pbcpbn(add-serial-number), +Foscam,admin,,http://foscam.com/Private/ProductFiles/FI8918W%20user%20manual-v36.00.pdf +Foundry Networks,,, +Foundry Networks,admin,admin, +Freetech,,Posterie, +Freetech,,Posterie, +FrontRange Solutions,master,access, +Fujitsu Siemens,,connect, +Fujitsu Siemens,manage,!manage, +Funk Software,admin,radius, +"GE Security, Inc.",install,install,http://www.qdigital.us/soporte/CasiRusco/Casirusco-documentation/controllers/460972001B.pdf +GE,museadmin,Muse!Admin, +GVC,Administrator,admin, +Galacticomm,Sysop,Sysop, +Gandalf,,console, +Gandalf,,gandalf, +Gandalf,,system, +Gandalf,,xmux, +Gateway,admin,admin, +Geeklog,username,password, +General Instruments,test,test, +Gericom,Administrator,, +Gigabyte,admin,admin, +Globespan Virata,DSL,DSL, +GlobespanVirata,root,root, +Google,admin,urchin, +Gossamer Threads Inc.,admin,admin, +Gossamer Threads Inc.,author,author, +Gossamer Threads Inc.,guest,guest, +GrandStream,,admin, +GrandStream,Administrator,admin, +GrandStream,End User,123 (or blank), +"Grandstream Networks, Inc",End User,123,http://www.grandstream.com/user_manuals/HandyTone-486UserManual.pdf +"Grandstream Networks, Inc",admin,admin,http://www.grandstream.com/user_manuals/HandyTone-486UserManual.pdf +Grandstream,admin,1234, +Greatspeed,admin,broadband, +"Groupee, Inc.",Admin5,4tugboat, +GuardOne,,guardone, +GuardOne,n.a,guardone, +Guru,admin,admin, +H2O Project,admin,admin, +HP,,, +HP,,AUTORAID, +HP,Administrator,admin, +HP,Factory,56789, +HP,admin,!admin, +HP,admin,, +HP,admin,admin, +HP,admin,isee, +HP,root,password, +Hayes,system,isp, +Hemoco Software,lansweeperuser,mysecretpassword0*,http://www.lansweeper.com/documentation.pdf +Hewlett Packard,admin,admin, +Hewlett-Packard,,, +Hewlett-Packard,,hewlpack, +Hewlett-Packard,,hewlpack, +Hewlett-Packard,ADVMAIL,, +Hewlett-Packard,ADVMAIL,HP, +Hewlett-Packard,ADVMAIL,HPOFFICE DATA, +Hewlett-Packard,Admin,Admin, +Hewlett-Packard,Administrator,The last eight digits of the serial number,http://bizsupport1.austin.hp.com/bizsupport/TechSupport/Document.jsp?objectID=c01141537&lang=en&cc=us&taskId=101&prodSeriesId=428936&prodTypeId=15351 +Hewlett-Packard,Anonymous,, +Hewlett-Packard,FIELD,, +Hewlett-Packard,FIELD,HPONLY, +Hewlett-Packard,FIELD,HPP187 SYS, +Hewlett-Packard,FIELD,HPWORD PUB, +Hewlett-Packard,FIELD,LOTUS, +Hewlett-Packard,FIELD,MANAGER, +Hewlett-Packard,FIELD,MGR, +Hewlett-Packard,FIELD,SERVICE, +Hewlett-Packard,FIELD,SUPPORT, +Hewlett-Packard,HELLO,FIELD.SUPPORT, +Hewlett-Packard,HELLO,MANAGER.SYS, +Hewlett-Packard,HELLO,MGR.SYS, +Hewlett-Packard,HELLO,OP.OPERATOR, +Hewlett-Packard,HPSupport,badg3r5,http://www.lolware.net/hpstorage.html +Hewlett-Packard,MAIL,HPOFFICE, +Hewlett-Packard,MAIL,MAIL, +Hewlett-Packard,MAIL,MPE, +Hewlett-Packard,MAIL,REMOTE, +Hewlett-Packard,MAIL,TELESUP, +Hewlett-Packard,MANAGER,COGNOS, +Hewlett-Packard,MANAGER,HPOFFICE, +Hewlett-Packard,MANAGER,ITF3000, +Hewlett-Packard,MANAGER,SECURITY, +Hewlett-Packard,MANAGER,SYS, +Hewlett-Packard,MANAGER,TCH, +Hewlett-Packard,MANAGER,TELESUP, +Hewlett-Packard,MGE,VESOFT, +Hewlett-Packard,MGR,CAROLIAN, +Hewlett-Packard,MGR,CCC, +Hewlett-Packard,MGR,CNAS, +Hewlett-Packard,MGR,COGNOS, +Hewlett-Packard,MGR,CONV, +Hewlett-Packard,MGR,HPDESK, +Hewlett-Packard,MGR,HPOFFICE, +Hewlett-Packard,MGR,HPONLY, +Hewlett-Packard,MGR,HPP187, +Hewlett-Packard,MGR,HPP189, +Hewlett-Packard,MGR,HPP196, +Hewlett-Packard,MGR,INTX3, +Hewlett-Packard,MGR,ITF3000, +Hewlett-Packard,MGR,NETBASE, +Hewlett-Packard,MGR,REGO, +Hewlett-Packard,MGR,RJE, +Hewlett-Packard,MGR,ROBELLE, +Hewlett-Packard,MGR,SECURITY, +Hewlett-Packard,MGR,SYS, +Hewlett-Packard,MGR,TELESUP, +Hewlett-Packard,MGR,VESOFT, +Hewlett-Packard,MGR,WORD, +Hewlett-Packard,MGR,XLSERVER, +Hewlett-Packard,OPERATOR,COGNOS, +Hewlett-Packard,OPERATOR,DISC, +Hewlett-Packard,OPERATOR,SUPPORT, +Hewlett-Packard,OPERATOR,SYS, +Hewlett-Packard,OPERATOR,SYSTEM, +Hewlett-Packard,Oper,Oper, +Hewlett-Packard,PCUSER,SYS, +Hewlett-Packard,RSBCMON,SYS, +Hewlett-Packard,SPOOLMAN,HPOFFICE, +Hewlett-Packard,WP,HPOFFICE, +Hewlett-Packard,admin,admin,http://www.google.com/url?sa=t&source=web&cd=1&sqi=2&ved=0CBgQFjAA&url=http%3A%2F%2Fcdn.procurve.com%2Ftraining%2FManuals%2Fr52%2F42-10-5100-02_QS_MSC-5100_en.pdf&ei=b1DkTZ7HBInQgAfwzPDCBg&usg=AFQjCNEPTe_QUv1bf17RCW-0wYwrJmS24g +Hewlett-Packard,admin,hp.com, +Hewlett-Packard,admin,isee, +HighPOint,RAID,hpt,http://www.hptmac.com/image/PDF/RAID_Managment_Software_Download.pdf +Honeynet Project,roo,honey, +Honeynet Project,root,honey, +Honeywell,LocalComServer,LCS pwd 03, +Honeywell,TPSLocalServer,TLS pwd 03, +Horizon DataSys,,foolproof, +Hosting Controller,AdvWebadmin,advcomm500349, +Huawei,TMAR#HWMT8007079,, +Huawei Technologies Co,TMAR#HWMT8007079,, +Huawei Technologies Co,admin,admin, +Huawei,admin,admin, +"Hyperic, Inc.",hqadmin,hqadmin,http://support.hyperic.com/display/DOC/QuickStart+Installation +IBM,$ALOC$,, +IBM,$SRV,$SRV, +IBM,11111111,11111111, +IBM,22222222,22222222, +IBM,,IBM, +IBM,,MBIU0, +IBM,,ascend, +IBM,,sertafu, +IBM,,, +IBM,,IBM, +IBM,,MBIU0, +IBM,,R1QTPS,submitted by FX +IBM,,admin, +IBM,,ascend, +IBM,,sertafu, +IBM,ADMIN,, +IBM,AP2SVP,, +IBM,APL2PP,, +IBM,AUTOLOG1,, +IBM,Administrator,admin, +IBM,BATCH,, +IBM,BATCH1,, +IBM,BATCH2,, +IBM,CCC,, +IBM,CICSUSER,CISSUS, +IBM,CMSBATCH,, +IBM,CMSBATCH,CMSBATCH, +IBM,CMSUSER,, +IBM,CPNUC,, +IBM,CPRM,, +IBM,CSPUSER,, +IBM,CVIEW,, +IBM,DATAMOVE,, +IBM,DBDCCICS,DBDCCIC, +IBM,DEMO1,, +IBM,DEMO2,, +IBM,DEMO3,, +IBM,DEMO4,, +IBM,DIRECT,, +IBM,DIRMAINT,, +IBM,DISKCNT,, +IBM,EREP,, +IBM,ESSEX,, +IBM,FORSE,FORSE, +IBM,FSFADMIN,, +IBM,FSFTASK1,, +IBM,FSFTASK2,, +IBM,GCS,, +IBM,IBMUSER,SYS1, +IBM,IDMS,, +IBM,IDMSSE,, +IBM,IIPS,, +IBM,IPC,, +IBM,IPFSERV,, +IBM,ISPVM,, +IBM,IVPM1,, +IBM,IVPM2,, +IBM,MAINT,, +IBM,MAINT,MAINT, +IBM,MOESERV,, +IBM,NEVIEW,, +IBM,OLTSEP,, +IBM,OP1,, +IBM,OPER,OPER, +IBM,OPERATIONS,OPERATIONS, +IBM,OPERATNS,, +IBM,OPERATNS,OPERATNS, +IBM,OPERATOR,, +IBM,Operator,Operator, +IBM,PDMREMI,, +IBM,PENG,, +IBM,POST,BASE, +IBM,PROCAL,, +IBM,PRODBM,, +IBM,PRODCICS,PRODCICS, +IBM,PROG,PROG, +IBM,PROMAIL,, +IBM,PSFMAINT,, +IBM,PVM,, +IBM,QSRV,11111111, +IBM,QSRV,22222222, +IBM,QSRV,QSRV, +IBM,RDM470,, +IBM,ROUTER,, +IBM,RSCS,, +IBM,RSCSV2,, +IBM,SAVSYS,, +IBM,SFCMI,, +IBM,SFCNTRL,, +IBM,SMART,, +IBM,SQLDBA,, +IBM,SQLUSER,, +IBM,SYSA,SYSA, +IBM,SYSADMIN,, +IBM,SYSCKP,, +IBM,SYSDUMP1,, +IBM,SYSERR,, +IBM,SYSWRM,, +IBM,TDISK,, +IBM,TEMP,, +IBM,TSAFVM,, +IBM,USERID,PASSW0RD, +IBM,USERID,PASSWORD,http://pic.dhe.ibm.com/infocenter/sonasic/sonas1ic/index.jsp?topic=%2Fcom.ibm.sonas.doc%2Fcreate_sol.html +IBM,VASTEST,, +IBM,VCSRV,VCSRV, +IBM,VM3812,, +IBM,VMARCH,, +IBM,VMASMON,, +IBM,VMASSYS,, +IBM,VMBACKUP,, +IBM,VMBSYSAD,, +IBM,VMMAP,, +IBM,VMTAPE,, +IBM,VMTLIBR,, +IBM,VMUTIL,, +IBM,VSEIPO,, +IBM,VSEMAINT,, +IBM,VSEMAN,, +IBM,VTAM,, +IBM,VTAM,VTAM, +IBM,VTAMUSER,, +IBM,admin,, +IBM,admin,admin, +IBM,admin,password, +IBM,admin,secure, +IBM,db2fenc1,db2fenc1, +IBM,db2inst1,db2inst1, +IBM,fg_sysadmin,password,http://webcache.googleusercontent.com/search?q=cache:tjiEP5dFc6IJ:help.sterlingcommerce.com/SFG20/topic/T94571/SFGCreateUserAccts_5101_127332_22331.html+sterling+fg_sysadmin+password&cd=1&hl=en&ct=clnk&gl=us&client=ubuntu&source=www.google.com +IBM,guest,, +IBM,guest,guest, +IBM,hscroot,abc123, +IBM,ibm,2222, +IBM,ibm,password, +IBM,ibm,service, +IBM,qpgmr,qpgmr, +IBM,qsecofr,11111111, +IBM,qsecofr,22222222, +IBM,qsecofr,qsecofr, +IBM,qserv,qserv, +IBM,qsrv,qsrv, +IBM,qsrvbas,qsrvbas, +IBM,qsvr,ibmcel, +IBM,qsvr,qsvr, +IBM,qsysopr,qsysopr, +IBM,quser,quser, +IBM,root,,Also works for older 4400 printers +IBM,root,passw0rd,http://pic.dhe.ibm.com/infocenter/powersys/v3r1m5/index.jsp?topic=/iphai_p5/whychgprepwds.htm +IBM,secofr,secofr, +IBM,sedacm,secacm, +IBM,storwatch,specialist,By Nicolas Gregoire +IBM,superadmin,secret,Documented in Web Administration Guide +IBM,sysopr,sysopr, +IBM,user,USERP, +IBM,vt100,public,Swap MAC address chip from +IBM,webadmin,webibm, +IBM,wpsadmin,wpsadmin, +INOVA,iclock,timely, +IQinVision,root,system,http://www.iqeye.com/iqeye/images/uploads/File/manuals/Quick-Install.pdf +IRC,,FOOBAR, +Inedo,Admin,Admin,http://inedo.com/support/tutorials/getting-started-with-proget-private-npm-registries +Infoblox,admin,, +Informix,informix,informix, +Infosmart,admin,0, +Infrant,admin,infrant1, +Innovaphone,admin,ip20, +Innovaphone,admin,ip21,http://www.annexnet.cz/New_PDF/innovaphone/gwe_manual.pdf +Innovaphone,admin,ip3000, +Innovaphone,admin,ip305Beheer, +Innovaphone,admin,ip400, +Inova,iclock,timely, +Integral,Administrator,letmein, +Integrated Networks,Administrator,1234, +Integrated Networks,Administrator,12345678, +Integrated Networks,Administrator,19750407, +Intel,,Intel, +Intel,,isolation, +Intel,,shiva, +Intel,Guest,, +Intel,NICONEX,NICONEX, +Intel,intel,intel, +Intel,root,, +Intel,setup,setup, +Intel/Shiva,admin,hello, +IntelliTouch,administrator,1234, +Interbase,SYSDBA,masterkey, +Intermec,,intermec, +Intermec,intermec,intermec, +Internet Archive,admin,letmein, +Intershop,operator,$chwarzepumpe, +Intersystems,system,sys, +Intracom,admin,admin, +Inventel Wanadoo,Admin,Admin, +Inventel,admin,admin, +Ipswitch,XXSESS_MGRYY,X#1833, +Ipswitch,admin,admin,http://www.ipswitch.com/support/whatsup/guide/v700/7browsera3.html +Ipswitch,guest,,http://www.ipswitch.com/support/whatsup/guide/v700/7browsera3.html +IronPort,admin,ironport, +Irongate,admin,NetSurvibox, +Iwill,,iwill, +Iwill,,iwill, +JAHT,admin,epicrouter, +JAMF Software,jamfsoftware,jamfsw03,http://resources.jamfsoftware.com/archive/Casper_Suite_Administrators_Guide.pdf +JD Edwards,JDE,JDE, +JDE,PRODDTA,PRODDTA, +JDS Microprocessing,hydrasna,, +JDS,hydrasna,, +Janitza,Homepage Password,0th,https://wiki.janitza.de/display/GRIDVIS40/UMG+604+-+Passwort +Janitza,admin,Janitza,https://wiki.janitza.de/display/GRIDVIS40/UMG+604+-+Passwort +Janitza,guest,Janitza,https://wiki.janitza.de/display/GRIDVIS40/UMG+604+-+Passwort +Janitza,user,Janitza,https://wiki.janitza.de/display/GRIDVIS40/UMG+604+-+Passwort +Jaspersoft Corporation,demo,demo,http://community.jaspersoft.com/documentation/jasperreports-server-install-guide-community-edition +Jaspersoft Corporation,jasperadmin,jasperadmin,http://community.jaspersoft.com/documentation/jasperreports-server-install-guide-community-edition +Jaspersoft Corporation,joeuser,joeuser,http://community.jaspersoft.com/documentation/jasperreports-server-install-guide-community-edition +Jaspersoft Corporation,superuser,superuser,http://community.jaspersoft.com/documentation/jasperreports-server-install-guide-community-edition +Jean-Philippe Lang,admin,admin,http://www.redmine.org/projects/redmine/wiki/RedmineInstall +Jeebles Technology,,admin, +JetWay,,spooml, +JetWay,,spooml, +Jetform,Jetform,, +Johnson Controls,johnson,control, +Joss Technology,,57gbzb, +Joss Technology,,technolgi, +Joss Technology,,57gbzb, +Joss Technology,,technolgi, +Juniper,admin,abc123,https://kb.juniper.net/InfoCenter/index?page=content&id=KB26220&actp=search +Juniper,admin,netscreen, +Juniper,admin,peribit, +Juniper,netscreen,netscreen, +Juniper,redline,redline, +Juniper,serial#,serial#,Resets to factory settings +Juniper,super,juniper123,https://kb.juniper.net/InfoCenter/index?page=content&id=KB26220&actp=search +Justin Hagstrom,admin,admin,http://autoindex.sourceforge.net/ +Justin Hagstrom,test,test,http://autoindex.sourceforge.net/ +KASDA,admin,adslroot, +KTI,admin,123, +KTI,admin,123456, +KTI,superuser,123456, +Kalatel,,3477, +Kalatel,,8111, +Kentico Software,administrator,,http://www.kentico.com/downloads/kenticocms_quickguide.pdf +Kethinov,root,password,http://freshmeat.net/projects/kboardforum/ +Keyscan,keyscan,KEYSCAN, +Kodak,PACSLinkIP,NetServer,http://yeec.com/uploadimages1/forum/yingxiang/kodak/mim_service_manual_v6_1.pdf +Kodak,PLMIMService,NetServer,http://yeec.com/uploadimages1/forum/yingxiang/kodak/mim_service_manual_v6_1.pdf +Kodak,RNIServiceManager,NetServer,http://yeec.com/uploadimages1/forum/yingxiang/kodak/mim_service_manual_v6_1.pdf +Kodak,SA,PASSWORD,http://yeec.com/uploadimages1/forum/yingxiang/kodak/mim_service_manual_v6_1.pdf +Kodak,Service,Service,http://yeec.com/uploadimages1/forum/yingxiang/kodak/mim_service_manual_v6_1.pdf +Kodi,kodi,kodi, +Konica Minolta,,0000, +Konica Minolta,,1234, +Konica Minolta,,, +Konica Minolta,,MagiMFP,http://www.kn.nl/data/handleiding/182185/Reference%20Guide%20Magicolor%202490MF.pdf +Konica Minolta,,sysAdmin, +Konica Minolta,,0,Printer configuration interface +Konica Minolta,,sysadm, +Konica Minolta,admin,administrator, +Kronos,SuperUser,kronites,http://www.scribd.com/doc/56930986/System-Administrators-Guide#outer_page_29 +Kyocera,2800,2800, +Kyocera,,admin00, +Kyocera,,PASSWORD, +Kyocera,,admin00, +Kyocera,admin,, +Kyocera,admin,admin, +Kyocera,root,root, +LANCOM,,, +LANSA,WEBADM,password, +LANSA,admin,admin,http://support.lansa.com/download/temp18/aXes_Rapid_Install_Instructions.pdf +LANSA,dev,dev,http://support.lansa.com/download/temp18/aXes_Rapid_Install_Instructions.pdf +LAXO,admin,admin, +LG,,jannie, +LG,admin,epicrouter, +LG,vikram,singh, +LGIC,LR-ISDN,LR-ISDN, +LaCie,admin,admin, +Lanier,,sysadm, +Lanier,admin,, +Lanier,supervisor,, +Lantronics,,access, +Lantronics,,system, +Lantronix,,access, +Lantronix,,,secondary priv. password: system +Lantronix,,access, +Lantronix,,admin,secondary priv. password: +Lantronix,,lantronix, +Lantronix,,system, +Lantronix,login,access, +Lantronix,sysadmin,PASS,9600/N/8/1 XON/XOFF +Leading Edge,,MASTER, +Leading Edge,,MASTER, +Lenel,admin,admin, +Level1,admin,admin, +Leviton,admin,leviton, +Liebert,Liebert,Liebert,http://www.emersonnetworkpower.com/documentation/en-us/products/acpower/rackpdu/documents/sl-20826.pdf +Lindsay Electronics,ADMINISTRATOR,SENTINEL, +Lindsay Electronics,SENTINEL,SENTINEL, +Linksys,,admin, +Linksys,,epicrouter, +Linksys,,, +Linksys,,admin, +Linksys,Administrator,admin, +Linksys,admin,, +Linksys,admin,admin, +Linksys,comcast,1234, +Linksys,root,orion99, +Linksys,user,tivonpw, +Linux,gonzo,, +Linux,root,uClinux, +Linux,satan,, +Linux,snake,, +"Liquidware Labs, Inc.",ssadmin,sspassword,http://www.liquidwarelabs.com/content/pdf/documents/support/Liquidware-Labs-Stratusphere-UX-Quick-Start-Guide.pdf +Livingston,!root,, +Livingstone,root,, +Lockdown,setup,changeme!, +LogiLink,admin,1234, +Logitech,,0, +Loglogic,root,logapp, +Loglogic,toor,logapp, +Longshine,admin,0,http://www.tenable.com/4917.html +Lucent,(any 3 characters),cascade, +Lucent,(any 3 chars),cascade, +Lucent,,admin, +Lucent,,cascade, +Lucent,Administrator,, +Lucent,LUCENT01,UI-PSWD-01,requires GSI software +Lucent,LUCENT02,UI-PSWD-02,requires GSI software +Lucent,admin,AitbISP4eCiG, +Lucent,admin,Ascend, +Lucent,bciim,bciimpw, +Lucent,bcim,bcimpw, +Lucent,bcms,bcmspw, +Lucent,bcnas,bcnaspw, +Lucent,blue,bluepw, +Lucent,browse,browsepw, +Lucent,browse,looker, +Lucent,craft,craft, +Lucent,craft,craftpw, +Lucent,cust,custpw, +Lucent,enquiry,enquirypw, +Lucent,field,support, +Lucent,inads,inads, +Lucent,inads,indspw, +Lucent,init,initpw, +Lucent,locate,locatepw, +Lucent,maint,maintpw, +Lucent,maint,rwmaint, +Lucent,nms,nmspw, +Lucent,pw,pwpw, +Lucent,rcust,rcustpw, +Lucent,readonly,lucenttech2, +Lucent,readwrite,lucenttech1, +Lucent,root,ascend, +Lucent,super,super, +Lucent,support,supportpw, +Lucent,sysadm,admpw, +Lucent,sysadm,sysadmpw, +Lucent,sysadm,syspw, +Lucent,tech,field, +Luxon Communications,administrator,19750407, +M Technology,,mMmM, +M Technology,,mMmM, +MERCURY,Administrator,admin, +MP3Mystic,admin,mp3mystic, +MRV,admin,admin, +MTNL,admin,admin, +MachSpeed,,sp99dd, +MachSpeed,,sp99dd, +Macromedia,,admin, +Macsense,admin,admin, +Magento,admin,123123,http://www.magentocommerce.com/wiki/recover/resetting-admin-password +Magic-Pro,,prost, +Magic-Pro,,prost, +Main Street Softworks,MCVEADMIN,password, +Mambo,admin,admin,http://sourceforge.org/projects/mambo +ManageEngine,admin,admin,http://www.manageengine.com/products/self-service-password/help/admin-guide/index.html +Mandarin Library Automation,admin,boca raton,http://www.nassauboces.org/cms/lib5/NY18000988/Centricity/Domain/31/Automation_handouts/M3_Users_Guide_1.6_SP1.pdf +Mantis,administrator,root, +Marconi,ami,, +McAfee,admin,admin123, +McAfee,scmadmin,scmchangeme, +McAfee,webshield,webshieldchangeme, +McData,Administrator,password, +McData,McdataSE,redips, +Mediatrix,admin,1234, +Mediatrix,administrator,, +Megastar,,star, +Megastar,,star, +Memotec,memotec,supervisor, +Mentec,MICRO,RSX, +Mercury Interactive,admin,admin, +Meridian,service,smile, +Michiel,admin,phplist,http://freshmeat.net/projects/phplist/ +Microcom,admin,epicrouter, +Microcom,admin,superuser, +Microcom,user,password, +Micron,,sldkj754, +Micron,,xyzall, +Micron,,sldkj754, +Micron,,xyzall, +Micronet,admin,admin,http://archives.neohapsis.com/archives/bugtraq/2004-10/0078.html +Micronet,admin,epicrouter, +Micronet,mac,, +Micronet,root,default, +Micronics,,dn_04rjc, +Micronics,,dn_04rjc, +Microplex,root,root, +Microsoft,,, +Microsoft,,admin, +Microsoft,,sa, +Microsoft,Administrator,, +Microsoft,Administrator,Administrator, +Microsoft,Guest,, +Microsoft,Guest,Guest, +Microsoft,IS_$hostname,IS_$hostname, +Microsoft,LDAP_Anonymous,LdapPassword_1, +Microsoft,LessonUser1,, +Microsoft,LessonUser2,, +Microsoft,MSHOME,MSHOME, +Microsoft,User,User, +Microsoft,sa,, +Mike Peters,bsxuser,bsxpass,http://freshmeat.net/projects/basilix/ +MikroTik,admin,, +Mikrotik,admin,,also for SSH and Web access +Milan,root,root, +Minolta PagrPro,,sysadm, +Minolta QMS,admin,, +Minolta QMS,operator,, +Mintel,,SYSTEM, +Mintel,,SYSTEM, +Mitel,,, +Mitel Networks,1nstaller,5X2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel Networks,installer,sx2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel Networks,maint1,sx2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel Networks,maint2,sx2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel Networks,s1stem,5X2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel Networks,system,sx2000,http://www.tek-tips.com/viewthread.cfm?qid=1036643 +Mitel,installer,1000, +Mitel,system,mnet, +Mitel,system,password, +Mobotix,admin,meinsm, +Mole,admin,admin, +Motive,admin,isee, +Motorola,,0000,http://rarara77.googlepages.com/ +Motorola,admin,motorola, +Motorola,admin,password, +Motorola,cablecom,router, +Motorola,service,smile, +Motorola,setup,, +Motorola,technician,yZgO8Bvj,https://community.rapid7.com/Rapid7_BlogPostDetail?id=a111400000AanBsAAJ +Mutare,,admin, +Muze,admin,muze, +MySQL,admin@example.com,admin, +MySQL,root,, +MySQL,superdba,admin, +MyioSoft,demo,demo, +NAI,GlobalAdmin,GlobalAdmin,By Nicolas Gregoire +NAI,admin,admin123,By Nicolas Gregoire +NCR,ncrm,ncrm, +NEC,,, +NEC,admin,password, +NGSEcure,admin,admin, +NGSec,admin,, +NGSec,admin,asd, +NICE Systems Ltd.,Administrator,nicecti, +NICE Systems Ltd.,Nice-admin,nicecti, +NOMADIX,admin,, +NRG or RICOH,,password, +NSI,root,nsi,http://www.nsi.com.au/vmpfw-install.htm +Nanoteq,admin,NetSeq, +NeXT,me,, +NeXT,root,NeXT, +NeXT,signa,signa, +NetApp,admin,NetCache, +NetApp,admin,admin123, +NetBotz,netbotz,netbotz, +NetGenesis,naadmin,naadmin, +Netasq,admin,admin, +Netcomm,,admin, +Netcomm,admin,password, +Netcomm,user,password, +Netcordia,admin,admin, +Netgear,,1234, +Netgear,,admin, +Netgear,,private, +Netgear,,zebra, +Netgear,,, +Netgear,,admin, +Netgear,,password, +Netgear,Admin,password, +Netgear,Gearguy,Geardog, +Netgear,admin,1234, +Netgear,admin,, +Netgear,admin,admin, +Netgear,admin,draadloos, +Netgear,admin,infrant1,Upto v3 firmware +Netgear,admin,netgear1,v4 firmware onwards +Netgear,admin,password, +Netgear,admin,setup, +Netgear,comcast,1234, +Netgear,cusadmin,highspeed, +Netgear,super,5777364, +Netgear,superman,21241036, +Netopia,,, +Netopia,,, +Netopia,admin,, +Netopia,admin,noway, +Netopia,factory,(see note),http://packetstormsecurity.org/files/91948/Netopia-Routers-Factory-Password-Generator.html +Netopia,netopia,netopia, +Netport,setup,setup, +Netscape,admin,admin, +Netscreen,,, +Netscreen,Administrator,, +Netscreen,admin,, +Netscreen,admin,netscreen, +Netscreen,netscreen,netscreen, +Netscreen,operator,, +Netstar,admin,password, +Network Appliance,admin,NetCache, +Network Associates,e250,e250changeme, +Network Associates,e500,e500changeme, +Network Everywhere,,admin, +NetworkICE,iceman,, +NewMedia-NET GmbH,root,admin,http://www.dd-wrt.com/wiki/index.php/Index:FAQ#What.27s_the_default_username_and_password.3F +Nexsan,ADMIN,PASSWORD, +Niksun,vcr,NetVCR,su after login with empty password +Nimble,,xdfk9874t3, +Nimble,,xdfk9874t3, +Nokia,,9999, +Nokia,,Telecom, +Nokia,,nokai, +Nokia,,nokia, +Nokia,Security Code,12345, +Nokia,Telecom,Telecom, +Nokia,client,client, +Nokia,m1122,m1122, +Nokia,nop,12345, +Nokia,nop,123454, +Nokia,root,nokia, +Nokia,root,rootme, +Nokia,telecom,telecom, +Norstar,**23646,23646, +Norstar,**266344,266344, +Nortel,266344,266344, +Nortel,,0, +Nortel,,, +Nortel,,l1, +Nortel,,l2, +Nortel,,ro, +Nortel,,rw, +Nortel,,rwa, +Nortel,,secure, +Nortel,,266344, +Nortel,,, +Nortel,,secure, +Nortel,Manager,, +Nortel,admin,000000,http://support.avaya.com/css/P8/documents/100097575 +Nortel,admin,,http://support.avaya.com/css/P8/documents/100097575 +Nortel,admin,admin, +Nortel,admin,admin000, +Nortel,admin,root, +Nortel,admin,setup, +Nortel,administrator,PlsChgMe!, +Nortel,ccrusr,ccrusr, +Nortel,conferencing,admin,http://support.avaya.com/css/P8/documents/100097575 +Nortel,debug,gubed,http://support.avaya.com/css/P8/documents/100097575 +Nortel,distrib,distrib0, +Nortel,disttech,4tas, +Nortel,disttech,disttech, +Nortel,disttech,etas, +Nortel,l2,l2, +Nortel,l3,l3, +Nortel,login,0,AUTH codes in LD 8 +Nortel,login,0000, +Nortel,login,1111,AUTH codes in LD 8 +Nortel,login,8429,AUTH codes in LD 8 +Nortel,maint,maint, +Nortel,maint,ntacdmax, +Nortel,mlusr,mlusr, +Nortel,ro,ro, +Nortel,root,3ep5w2u, +Nortel,rw,rw, +Nortel,rwa,rwa, +Nortel,service,smile, +Nortel,spcl,0,AUTH codes in LD 8 +Nortel,spcl,0000, +Nortel,supervisor,PlsChgMe!, +Nortel,supervisor,visor, +Nortel,sysadmin,nortel, +Nortel,system,adminpwd, +Nortel,tasman,tasmannet, +Nortel,trmcnfg,trmcnfg, +Nortel,user,, +Nortel,user,user, +Nortel,user,user0000, +Novell,,cr0wmt 911, +Novell,,root, +Novell,,san fran 8, +Novell,ADMIN,, +Novell,ADMIN,ADMIN, +Novell,ADMIN,admin, +Novell,ARCHIVIST,, +Novell,ARCHIVIST,ARCHIVIST, +Novell,BACKUP,, +Novell,BACKUP,BACKUP, +Novell,CHEY_ARCHSVR,, +Novell,CHEY_ARCHSVR,CHEY_ARCHSVR, +Novell,FAX,, +Novell,FAX,FAX, +Novell,FAXUSER,, +Novell,FAXUSER,FAXUSER, +Novell,FAXWORKS,, +Novell,FAXWORKS,FAXWORKS, +Novell,GATEWAY,, +Novell,GATEWAY,GATEWAY, +Novell,GUEST,, +Novell,GUEST,GUEST, +Novell,GUEST,GUESTGUE, +Novell,GUEST,GUESTGUEST, +Novell,GUEST,TSEUG, +Novell,HPLASER,, +Novell,HPLASER,HPLASER, +Novell,LASER,, +Novell,LASER,LASER, +Novell,LASERWRITER,, +Novell,LASERWRITER,LASERWRITER, +Novell,MAIL,, +Novell,MAIL,MAIL, +Novell,POST,, +Novell,POST,POST, +Novell,PRINT,, +Novell,PRINT,PRINT, +Novell,PRINTER,, +Novell,PRINTER,PRINTER, +Novell,ROOT,, +Novell,ROOT,ROOT, +Novell,ROUTER,, +Novell,SABRE,, +Novell,SUPERVISOR,, +Novell,SUPERVISOR,HARRIS, +Novell,SUPERVISOR,NETFRAME, +Novell,SUPERVISOR,NF, +Novell,SUPERVISOR,NFI, +Novell,SUPERVISOR,SUPERVISOR, +Novell,SUPERVISOR,SYSTEM, +Novell,TEST,, +Novell,TEST,TEST, +Novell,USER_TEMPLATE,, +Novell,USER_TEMPLATE,USER_TEMPLATE, +Novell,WANGTEK,, +Novell,WANGTEK,WANGTEK, +Novell,WINDOWS_PASSTHRU,, +Novell,WINDOWS_PASSTHRU,WINDOWS_PASSTHRU, +Novell,WINSABRE,SABRE, +Novell,WINSABRE,WINSABRE, +Novell,admin,admin,https://www.novell.com/documentation/vibe32/vibe32_admin/data/brchh4k.html +Novell,admin,novell, +Novell,sadmin,, +Novell,servlet,manager, +Nullsoft,admin,changeme, +Nurit,$system,, +OCE,,0 and the number of OCE printer, +ODS,ods,ods, +OMRON,,, +OPEN Networks,root,0P3N, +OSMC,osmc,osmc, +OTRS Inc.,root@localhost,root,http://doc.otrs.org/2.4/en/html/c444.html +Oki,admin,,Last 6 characters of the MAC address in uppercase +Oki,admin,OkiLAN, +Oki,root,,Last 6 characters of the MAC address in uppercase +Oleg Khabarov,username,password,https://github.com/comfy/comfortable-mexican-sofa/wiki/Installation-and-Quick-Start-Guide#quick-start-guide +Olicom,,AaBbCcDd, +Omnitronix,,SMDR, +Omnitronix,,SUPER, +Open-Xchange Inc.,mailadmin,secret, +OpenConnect,admin,OCS, +OpenConnect,adminstat,OCS, +OpenConnect,adminuser,OCS, +OpenConnect,adminview,OCS, +OpenConnect,helpdesk,OCS, +OpenMarket,Bobo,hello, +OpenMarket,Coco,hello, +OpenMarket,Flo,hello, +OpenMarket,Joe,hello, +OpenMarket,Moe,hello, +OpenMarket,admin,demo, +OpenMarket,user_analyst,demo, +OpenMarket,user_approver,demo, +OpenMarket,user_author,demo, +OpenMarket,user_checker,demo, +OpenMarket,user_designer,demo, +OpenMarket,user_editor,demo, +OpenMarket,user_expert,demo, +OpenMarket,user_marketer,demo, +OpenMarket,user_pricer,demo, +OpenMarket,user_publisher,demo, +Openlink,admin,admin, +Openwave,cac_admin,cacadmin, +Openwave,sys,uplink, +Optivision,root,mpegvideo, +Oracle,,, +Oracle,ADAMS,WOOD, +Oracle,ADLDEMO,ADLDEMO, +Oracle,ADMIN,JETSPEED, +Oracle,ADMIN,WELCOME, +Oracle,ADMINISTRATOR,ADMINISTRATOR, +Oracle,ADMINISTRATOR,admin, +Oracle,ANDY,SWORDFISH, +Oracle,AP,AP, +Oracle,APPLSYS,APPLSYS, +Oracle,APPLSYS,FND, +Oracle,APPLSYSPUB,FNDPUB, +Oracle,APPS,APPS, +Oracle,APPUSER,APPUSER, +Oracle,AQ,AQ, +Oracle,AQDEMO,AQDEMO, +Oracle,AQJAVA,AQJAVA, +Oracle,AQUSER,AQUSER, +Oracle,AUDIOUSER,AUDIOUSER, +Oracle,AURORA$JIS$UTILITY$,, +Oracle,AURORA$ORB$UNAUTHENTICATED,INVALID, +Oracle,AURORA@ORB@UNAUTHENTICATED,INVALID, +Oracle,BC4J,BC4J, +Oracle,BLAKE,PAPER, +Oracle,BRIO_ADMIN,BRIO_ADMIN, +Oracle,CATALOG,CATALOG, +Oracle,CDEMO82,CDEMO82, +Oracle,CDEMOCOR,CDEMOCOR, +Oracle,CDEMORID,CDEMORID, +Oracle,CDEMOUCB,CDEMOUCB, +Oracle,CENTRA,CENTRA, +Oracle,CIDS,CIDS, +Oracle,CIS,CIS, +Oracle,CISINFO,CISINFO, +Oracle,CLARK,CLOTH, +Oracle,COMPANY,COMPANY, +Oracle,COMPIERE,COMPIERE, +Oracle,CQSCHEMAUSER,PASSWORD, +Oracle,CSMIG,CSMIG, +Oracle,CTXDEMO,CTXDEMO, +Oracle,CTXSYS,, +Oracle,CTXSYS,CTXSYS, +Oracle,DBI,MUMBLEFRATZ, +Oracle,DBSNMP,DBSNMP, +Oracle,DEMO,DEMO, +Oracle,DEMO8,DEMO8, +Oracle,DEMO9,DEMO9, +Oracle,DES,DES, +Oracle,DEV2000_DEMOS,DEV2000_DEMOS, +Oracle,DIP,DIP, +Oracle,DISCOVERER_ADMIN,DISCOVERER_ADMIN, +Oracle,DSGATEWAY,DSGATEWAY, +Oracle,DSSYS,DSSYS, +Oracle,EJSADMIN,EJSADMIN, +Oracle,EMP,EMP, +Oracle,ESTOREUSER,ESTORE, +Oracle,EVENT,EVENT, +Oracle,EXFSYS,EXFSYS, +Oracle,FINANCE,FINANCE, +Oracle,FND,FND, +Oracle,FROSTY,SNOWMAN, +Oracle,GL,GL, +Oracle,GPFD,GPFD, +Oracle,GPLD,GPLD, +Oracle,HCPARK,HCPARK, +Oracle,HLW,HLW, +Oracle,HR,HR, +Oracle,IMAGEUSER,IMAGEUSER, +Oracle,IMEDIA,IMEDIA, +Oracle,JMUSER,JMUSER, +Oracle,JONES,STEEL, +Oracle,JWARD,AIROPLANE, +Oracle,L2LDEMO,L2LDEMO, +Oracle,LBACSYS,LBACSYS, +Oracle,LIBRARIAN,SHELVES, +Oracle,MASTER,PASSWORD, +Oracle,MDDEMO,MDDEMO, +Oracle,MDDEMO_CLERK,CLERK, +Oracle,MDDEMO_MGR,MGR, +Oracle,MDSYS,MDSYS, +Oracle,MFG,MFG, +Oracle,MGWUSER,MGWUSER, +Oracle,MIGRATE,MIGRATE, +Oracle,MILLER,MILLER, +Oracle,MMO2,MMO2, +Oracle,MODTEST,YES, +Oracle,MOREAU,MOREAU, +Oracle,MTSSYS,MTSSYS, +Oracle,MTS_USER,MTS_PASSWORD, +Oracle,MTYSYS,MTYSYS, +Oracle,MXAGENT,MXAGENT, +Oracle,NAMES,NAMES, +Oracle,OAS_PUBLIC,OAS_PUBLIC, +Oracle,OCITEST,OCITEST, +Oracle,ODM,ODM, +Oracle,ODM_MTR,MTRPW, +Oracle,ODS,ODS, +Oracle,ODSCOMMON,ODSCOMMON, +Oracle,OE,OE, +Oracle,OEMADM,OEMADM, +Oracle,OEMREP,OEMREP, +Oracle,OLAPDBA,OLAPDBA, +Oracle,OLAPSVR,INSTANCE, +Oracle,OLAPSYS,MANAGER, +Oracle,OMWB_EMULATION,ORACLE, +Oracle,OO,OO, +Oracle,OPENSPIRIT,OPENSPIRIT, +Oracle,ORACACHE,(random password), +Oracle,ORAREGSYS,ORAREGSYS, +Oracle,ORASSO,ORASSO, +Oracle,ORDPLUGINS,ORDPLUGINS, +Oracle,ORDSYS,ORDSYS, +Oracle,OSE$HTTP$ADMIN,(random password), +Oracle,OSP22,OSP22, +Oracle,OUTLN,OUTLN, +Oracle,OWA,OWA, +Oracle,OWA_PUBLIC,OWA_PUBLIC, +Oracle,OWNER,OWNER, +Oracle,PANAMA,PANAMA, +Oracle,PATROL,PATROL, +Oracle,PERFSTAT,PERFSTAT, +Oracle,PLEX,PLEX, +Oracle,PLSQL,SUPERSECRET, +Oracle,PM,PM, +Oracle,PO,PO, +Oracle,PO7,PO7, +Oracle,PO8,PO8, +Oracle,PORTAL30,PORTAL30, +Oracle,PORTAL30,PORTAL31, +Oracle,PORTAL30_DEMO,PORTAL30_DEMO, +Oracle,PORTAL30_PUBLIC,PORTAL30_PUBLIC, +Oracle,PORTAL30_SSO,PORTAL30_SSO, +Oracle,PORTAL30_SSO_PS,PORTAL30_SSO_PS, +Oracle,PORTAL30_SSO_PUBLIC,PORTAL30_SSO_PUBLIC, +Oracle,POWERCARTUSER,POWERCARTUSER, +Oracle,PRIMARY,PRIMARY, +Oracle,PUBSUB,PUBSUB, +Oracle,PUBSUB1,PUBSUB1, +Oracle,QDBA,QDBA, +Oracle,QS,QS, +Oracle,QS_ADM,QS_ADM, +Oracle,QS_CB,QS_CB, +Oracle,QS_CBADM,QS_CBADM, +Oracle,QS_CS,QS_CS, +Oracle,QS_ES,QS_ES, +Oracle,QS_OS,QS_OS, +Oracle,QS_WS,QS_WS, +Oracle,RE,RE, +Oracle,REPADMIN,REPADMIN, +Oracle,REPORTS_USER,OEM_TEMP, +Oracle,REP_MANAGER,DEMO, +Oracle,REP_OWNER,DEMO, +Oracle,REP_OWNER,REP_OWNER, +Oracle,RMAIL,RMAIL, +Oracle,RMAN,RMAN, +Oracle,SAMPLE,SAMPLE, +Oracle,SAP,SAPR3, +Oracle,SCOTT,TIGER, +Oracle,SDOS_ICSAP,SDOS_ICSAP, +Oracle,SECDEMO,SECDEMO, +Oracle,SERVICECONSUMER1,SERVICECONSUMER1, +Oracle,SH,SH, +Oracle,SITEMINDER,SITEMINDER, +Oracle,SLIDE,SLIDEPW, +Oracle,STARTER,STARTER, +Oracle,STRAT_USER,STRAT_PASSWD, +Oracle,SWPRO,SWPRO, +Oracle,SWUSER,SWUSER, +Oracle,SYMPA,SYMPA, +Oracle,SYS,CHANGE_ON_INSTALL, +Oracle,SYS,D_SYSPW, +Oracle,SYSADM,SYSADM, +Oracle,SYSMAN,OEM_TEMP, +Oracle,SYSMAN,oem_temp, +Oracle,SYSTEM,D_SYSTPW, +Oracle,SYSTEM,MANAGER, +Oracle,TAHITI,TAHITI, +Oracle,TDOS_ICSAP,TDOS_ICSAP, +Oracle,TESTPILOT,TESTPILOT, +Oracle,TRACESRV,TRACE, +Oracle,TRACESVR,TRACE, +Oracle,TRAVEL,TRAVEL, +Oracle,TSDEV,TSDEV, +Oracle,TSUSER,TSUSER, +Oracle,TURBINE,TURBINE, +Oracle,ULTIMATE,ULTIMATE, +Oracle,USER,USER, +Oracle,USER0,USER0, +Oracle,USER1,USER1, +Oracle,USER2,USER2, +Oracle,USER3,USER3, +Oracle,USER4,USER4, +Oracle,USER5,USER5, +Oracle,USER6,USER6, +Oracle,USER7,USER7, +Oracle,USER8,USER8, +Oracle,USER9,USER9, +Oracle,UTLBSTATU,UTLESTAT, +Oracle,VIDEOUSER,VIDEO USER, +Oracle,VIF_DEVELOPER,VIF_DEV_PWD, +Oracle,VIRUSER,VIRUSER, +Oracle,VRR1,VRR1, +Oracle,WEBCAL01,WEBCAL01, +Oracle,WEBDB,WEBDB, +Oracle,WEBREAD,WEBREAD, +Oracle,WKSYS,WKSYS, +Oracle,WWW,WWW, +Oracle,WWWUSER,WWWUSER, +Oracle,XPRT,XPRT, +Oracle,admin,admin,http://docs.oracle.com/cd/E22630_01/Platform.1002/pdf/ATGInstallGuide.pdf +Oracle,admin,adminadmin,http://www.oracle.com/technetwork/java/install-139173.html +Oracle,admin,security, +Oracle,admin,welcome, +Oracle,bpel,bpel, +Oracle,cn=orcladmin,welcome, +Oracle,demo,demo, +Oracle,ilom-admin,ilom-admin,http://seclists.org/fulldisclosure/2012/Nov/229 +Oracle,ilom-operator,ilom-operator,http://seclists.org/fulldisclosure/2012/Nov/229 +Oracle,internal,oracle, +Oracle,joe,password, +Oracle,mary,password, +Oracle,nm2user,nm2user,http://seclists.org/fulldisclosure/2012/Nov/229 +Oracle,oracle,oracle, +Oracle,scott,tiger or tigger, +Oracle,siteadmin,siteadmin,http://docs.oracle.com/cd/E24152_01/Platform.10-1/ATGMultisiteAdminGuide/html/s1505accesscontrol01.html +Oracle,sys,change_on_install, +Oracle,sys,sys, +Oracle,system,manager, +Oracle,system,password, +Oracle,system,security, +Oracle,system/manager,sys/change_on_install, +Oracle,webdb,webdb, +Oracle,weblogic,weblogic, +Oracle,wlcsystem,wlcsystem, +Oracle,wlpisystem,wlpisystem, +Orange,admin,admin, +Orange,root,1234, +Osicom,Manager,Admin, +Osicom,Manager,Manager, +Osicom,d.e.b.u.g,User, +Osicom,debug,d.e.b.u.g, +Osicom,echo,User, +Osicom,echo,echo, +Osicom,guest,User, +Osicom,guest,guest, +Osicom,sysadm,Admin, +Osicom,sysadm,sysadm, +Osicom,write,private, +Overland,Factory,56789, +Overland Storage,root,Password, +OvisLink Canada Inc.,root,root, +OvisLink Canada Inc.,user,user, +PBX,tech,nician, +PHPReactor,core,phpreactor,http://freshmeat.net/projects/phpreactor/ +PLANET Technology Corp.,admin,ISPMODE, +PLANET Technology Corp.,admin,[^_^], +POWERLOGIC,Administrator,Gateway,http://www.merlingerin.com/C12570CB00504485/all/83A902802C60F942412570D9003ABDC1/$File/63230-319-204.pdf +PRTG,prtgadmin,prtgadmin, +Pacific Micro Data,pmd,, +Packard Bell,,bell9, +Packard Bell,,bell9, +Packeteer,,touchpwd=, +Panasonic,,1234, +Panasonic,,, +Panasonic,admin,1234, +Panasonic,admin,12345,ftp://ftp.panasonic.com/pub/Panasonic/cctv/OperatingInstructions/WV-NS202A-Operating-Instructions.pdf +Pandatel,admin,admin, +Parallels,admin,setup, +Parrot,,0000, +Patton,monitor,monitor, +Patton,superuser,superuser, +PentaSafe,PSEAdmin,$secure$, +Pentagram,admin,password, +Pentaoffice,,pento, +Perle,admin,superuser, +Philips,admin,admin, +Phoenix v1.14,Administrator,admin, +Pikatel,DSL,DSL, +Pirelli,admin,admin, +Pirelli,admin,microbusiness, +Pirelli,admin,mu, +Pirelli,admin,smallbusiness, +Pirelli,user,password, +"Pivotal Software, Inc. ",guest,guest,https://www.rabbitmq.com/management.html +PlainTree,,default.password, +Planet,,default, +Planet,admin,1234, +Planet,admin,epicrouter, +Planex,admin,0, +PokerTracker Software,postgres,dbpass,http://www.pokertracker.com/products/PT3/docs/PokerTracker3_Quick_Start_Guide.pdf +PokerTracker Software,postgres,svcPASS83,http://www.pokertracker.com/products/PT3/docs/PokerTracker3_Quick_Start_Guide.pdf +Pollsafe,SMDR,SECONDARY, +Polycom,,, +Polycom,,ACCORD,"http://www.polycom.com/common/pw_cmp_updateDocKeywords/0,1687,6312,00.pdf" +Polycom,,admin, +Polycom,,x6zynd56, +Polycom,Polycom,456,username is case sensitive +Polycom,Polycom,SpIp, +Polycom,administrator,* * #, +PostgreSQL,postgres,, +Powerchute,pwrchute,pwrchute, +Prestige,admin,1234, +Prestigio,,, +Prime,dos,dos, +Prime,fam,fam, +Prime,guest,guest, +Prime,guest1,guest, +Prime,guest1,guest1, +Prime,mail,mail, +Prime,maint,maint, +Prime,mfd,mfd, +Prime,netlink,netlink, +Prime,prime,prime, +Prime,prime,primeos, +Prime,primenet,primenet, +Prime,primenet,primeos, +Prime,primeos,prime, +Prime,primeos,primeos, +Prime,primos_cs,prime, +Prime,primos_cs,primos, +Prime,system,prime, +Prime,system,system, +Prime,tele,tele, +Prime,test,test, +PrimeBase,Administrator,, +Prolink,admin,password, +"Promise Technology, Inc.",administrator,password,http://www.promise.com/media_bank/Download%20Bank/Manual/1_WebPAM%20User%20Manual%20v1.4.pdf +Promise,admin,admin, +Promise,engmode,hawk201, +Prostar,none,4321, +Protocraft,musi1921,Musi%1921, +Proxicast,,1234,http://www.proxicast.com/support/files/GPRS-QuickStartGuide.pdf +Proxim,,, +Proxim,,public, +Psionteklogix,admin,admin, +Psionteklogix,support,h179350, +Pyramid Computer,admin,admin, +Pyramid Computer,admin,gnumpf, +Q-Tec,Admin,, +QDI,,QDI, +QDI,,lesarotl, +QDI,,password, +QDI,,QDI, +QDI,,lesarotl, +QLogic,admin,password, +QLogic,images,images, +QualiTeam,master,master, +Quantex,,teX1, +Quantex,,xljlbj, +Quantex,,teX1, +Quantex,,xljlbj, +Quantum,,, +Quest Software,TOAD,TOAD, +Questra Corporation,guest,guest, +Questra Corporation,questra,questra, +Quintum Technologies Inc.,admin,admin, +RCA,,admin, +RM,,RM, +RM,,RM, +RM,RMUser1,password, +RM,admin,rmnetlm, +RM,admin2,changeme, +RM,adminstrator,changeme, +RM,deskalt,password, +RM,deskman,changeme, +RM,desknorm,password, +RM,deskres,password, +RM,guest,, +RM,replicator,replicator, +RM,setup,changeme, +RM,teacher,password, +RM,temp1,password, +RM,topicalt,password, +RM,topicnorm,password, +RM,topicres,password, +RNN,admin,demo, +RObiGVqUbQt,wVQxyQec,eomjbOBLLwbZeiKV, +RSA,admin,admin1234, +RSA,administrator,RSAAppliance, +RSA,master,themaster01, +Radio Shack,,744, +Radio Shack,[MULTIPLE],744, +Radvision,,MCUrv, +Radvision,admin,, +Radware,lp,lp, +Radware,radware,radware, +Raidzone,,raidzone, +Raidzone,,raidzone, +Rainbow,,PASSWORD, +Rainbow,,rainbow, +Ramp Networks,wradmin,trancell, +RapidStream,rsadmin,, +Raritan Inc.,admin,raritan,http://seclists.org/fulldisclosure/2015/Sep/33 +Raritan Inc.,epiq_api,raritan,http://seclists.org/fulldisclosure/2015/Sep/33 +Raritan Inc.,web_api,sl33p30F00dumass!,http://seclists.org/fulldisclosure/2015/Sep/33 +Raritan,admin,raritan, +RayTalk,root,root, +"Red Hat, Inc",,AMIAMI, +"Red Hat, Inc",,AMIDECOD, +"Red Hat, Inc",admin,admin,http://docs.jboss.org/jbossas/guides/installguide/r1/en/html_single/ +"Red Hat, Inc",piranha,piranha, +"Red Hat, Inc",piranha,q, +RedHat,piranha,piranha, +RedHat,piranha,q, +Redcreek Communications,,1234, +Redcreek Communications,,private, +Remedy,ARAdmin,AR#Admin#, +Remedy,Demo,, +Research,,Col2ogro2, +Research,,Col2ogro2, +Research Machines,manager,changeme, +Resumix,root,resumix, +Ricoh,,password, +Ricoh,,sysadm, +Ricoh,,password, +Ricoh,,sysadm, +Ricoh,admin,, +Ricoh,admin,no password, +Ricoh,admin,password, +Ricoh,sysadm,sysadm, +Ricoh,sysadmin,password, +Riverbed,Admin,password, +Rizen,Admin,123qwe,http://freshmeat.net/projects/webgui/ +RoamAbout,admin,password, +Rodopi,Rodopi,Rodopi, +RuggedCom,Admin,admin, +SAF Tehnika,administrator,d1scovery, +SAF Tehnika,integrator,p1nacate, +SAF Tehnika,monitor,monitor, +SAF Tehnika,operator,col1ma, +SAGEM,admin,epicrouter, +SAP,Administrator,manage, +SAP,DDIC,19920706, +SAP,Developer,isdev, +SAP,EARLYWATCH,SUPPORT, +SAP,Replicator,iscopy, +SAP,SAP*,06071992, +SAP,SAP*,7061992, +SAP,SAP*,PASS, +SAP,SAPCPIC,ADMIN, +SAP,SAPCPIC,admin, +SAP,SAPR3,SAP, +SAP,TMSADM,, +SAP,admin,axis2,https://spl0it.wordpress.com/2010/11/23/axis2-deployer-metasploit-modules-upload-exec-via-soap/ +SAP client EARLYWATCH,admin,Support, +SAP,ctb_admin,sap123, +SAP,itsadmin,init, +SAP,xmi_demo,sap123, +SMA America,,sma,http://files.sma.de/dl/4253/SWebBox-BUS-eng-111033.pdf +SMC,,0000, +SMC,,, +SMC,,smcadmin, +SMC,,smcadmin, +SMC,Admin,Barricade, +SMC,Administrator,smcadmin, +SMC,admin,, +SMC,admin,admin, +SMC,admin,barricade, +SMC,admin,smcadmin, +SMC,cusadmin,highspeed, +SMC,default,WLAN_AP, +SMC,mso,w0rkplac3rul3s, +SMC,none,none, +SMC,smc,smcadmin, +SOPHIA (Schweiz),admin,Protector, +SOPHIA (Schweiz),root,root, +SSA,SSA,SSA,rarely changed/used for +SUN,root,sun123, +SWEEX,sweex,mysweex, +Saba,admin,admin, +Safecom,admin,epicrouter, +Sagem,Menara,Menara, +Sagem,admin,admin, +Sagem,root,1234, +Samba,Any,Any, +Sambar Technologies,admin,, +Sambar Technologies,anonymous,, +Sambar Technologies,billy-bob,, +Sambar Technologies,ftp,, +Sambar Technologies,guest,guest, +Samsung,,s!a@m#n$p%c,http://l8security.com/post/36715280176/uv-281284-samsung-printer-snmp-backdoor +Samsung,,, +Samsung,admin,password,after reset +Samsung,public,public,def. WEP keys: 0123456789 +Samuel Abels,user,password, +Schneider Electric,,admin, +Schneider Electric,USER,USER, +Schneider Electric,ntpupdate,ntpupdate, +Scientific Atlanta,admin,w2402, +Seagate,admin,admin, +Seagull Scientific,ADMIN,admin, +Seagull Scientific,USER,USER, +Seclore,root,changeonfirstlogin, +Seclore,sa,changeonfirstlogin, +Secure Computing,admin,, +Securicor3NET,manager,friend, +Semaphore,DESQUETOP,, +Semaphore,DS,, +Semaphore,DSA,, +Semaphore,PHANTOM,, +Sempre,admin,admin, +Senao,admin,, +Sercom,admin,admin, +Server Technology,ADMN,admn,Telnet port 2001 +Server Technology,GEN1,gen1,Telnet port 2001 +Server Technology,GEN2,gen2,Telnet port 2001 +"Seyeon Technology Co., Ltd",root,root,http://www.flexwatch.com/pro_down/fw_all/M4028%20-%20FlexWATCH%20User%20Manual%20Part2.pdf +Sharp,,sysadm, +Sharp,Administrator,admin, +Sharp,admin,Sharp, +Sharp,admin,admin,Different to other Sharp units +Sharp,none,sysadm, +Shiva,admin,hello, +Shiva,guest,, +Shiva,hello,hello, +Shiva,root,, +ShoreTel,Admin,admin1, +Shoretel,admin,changeme, +Shuttle,,Spacve, +Shuttle,,Spacve, +Siemens,31994,31994, +Siemens,,0, +Siemens,,123456,http://wiki.unify.com/wiki/OpenStage_SIP_FAQ#What_are_the_default_passwords.3F +Siemens,,admin, +Siemens,,123456, +Siemens,,, +Siemens,,SKY_FOX, +Siemens,,admin, +Siemens,,gubed, +Siemens Corp,18140815,18140815, +Siemens Corp,31994,31994, +Siemens Corp,,SKY_FOX, +Siemens Corp,,uboot, +Siemens Corp,WinCCAdmin,2WSXcde,http://iadt.siemens.ru/forum/viewtopic.php?p=2974&sid=58cedcf3a0fc7a0b6c61c7bc46530928 +Siemens Corp,WinCCConnect,2WSXcder,http://iadt.siemens.ru/forum/viewtopic.php?p=2974&sid=58cedcf3a0fc7a0b6c61c7bc46530928 +Siemens Corp,admin,, +Siemens Corp,admin,pwp, +Siemens Corp,eng,engineer, +Siemens Corp,op,op, +Siemens Corp,op,operator, +Siemens Corp,poll,poll, +Siemens Corp,poll,tech, +Siemens Corp,su,super, +Siemens Corp,sysadmin,sysadmin, +Siemens Corp,system,field, +Siemens Corp,system,system, +Siemens Corp,tech,tech, +Siemens,admin,, +Siemens,admin,, +Siemens,admin,admin,Also has an account with: +Siemens,admin,hagpolm1, +Siemens,admin,pwp, +Siemens,basisk,basisk,http://m.itworld.com/data-centerservers/190269/power-plant-hack-anybody-could-use +Siemens,eng,engineer, +Siemens,op,op, +Siemens,op,operator, +Siemens,poll,tech, +Siemens,su,super, +Siemens,superuser,admin, +Siemens,sysadmin,sysadmin, +Siemens,tech,field, +Siemens,tech,tech, +Sierra Wireless,user,12345,http://mycusthelp.net/SIERRAWIRELESS/_cs/AnswerDetail.aspx?sSessionID=&aid=468 +Sigma,admin,admin, +Signamax,admin,admin, +Siips,Administrator,ganteng,Thx silex +Silex Technology,root,,http://www.silexeurope.com/en/home/support/faq/usb-device-server.html#faq25 +Silicon Graphics,4Dgifts,4Dgifts, +Silicon Graphics,4Dgifts,, +Silicon Graphics,6.x,, +Silicon Graphics,Ezsetup,, +Silicon Graphics,OutOfBox,, +Silicon Graphics,demos,, +Silicon Graphics,field,field, +Silicon Graphics,guest,, +Silicon Graphics,lp,, +Silicon Graphics,tour,tour, +Silicon Graphics,tutor,, +Silicon Graphics,tutor,tutor, +Silvercrest,admin,admin, +Site Interactive,admin,pass, +Sitecom,,damin, +Sitecom,,sitecom, +Sitecom,admin,admin, +Sitecom,admin,password, +Sitecore Corporation,Audrey,a,http://www.procheckup.com/media/176566/pentesting_sitecore.pdf +Sitecore Corporation,Bill,b,http://www.procheckup.com/media/176566/pentesting_sitecore.pdf +Sitecore Corporation,Denny,d,http://www.procheckup.com/media/176566/pentesting_sitecore.pdf +Sitecore Corporation,Lonnie,l,http://www.procheckup.com/media/176566/pentesting_sitecore.pdf +Sitecore Corporation,Minnie,m,http://www.procheckup.com/media/176566/pentesting_sitecore.pdf +Sitecore Corporation,admin,b,http://sdn.sitecore.net/upload/sdn5/tools/v53_to_v6/sitecore_cms_53_to_cms_6_database_conversion_tool-a4.pdf +SmartSwitch,admin,, +Snap Appliance,admin,admin, +SnapGear,root,default, +Snapgear,root,admin, +Snom,Administrator,0000, +Software AG,Administrator,manage, +Softwarehouse,manager,manager, +SolarWinds,LocalAdministrator,#l@$ak#.lk;0@P,http://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2004-2532 +SolarWinds,whd,whd,http://www.solarwinds.com/documentation/WebHelpDesk/docs/WHDAdminGuide.pdf +Solution 6,aaa,often blank, +Solwise,root,same as webui pwd, +Sonic-X,root,admin, +SonicWALL,admin,password, +Sonicwall,admin,password, +Sony Ericsson,,0000, +Sony,admin,admin, +Sorenson,,admin, +Sourcefire,admin,password, +Sourcefire,root,password, +Sovereign Hill,Admin,shs, +Sparklan,admin,admin, +Spectra Logic,administrator,, +Spectra Logic,operator,, +SpeedStream,,admin, +SpeedStream,,adminttd, +SpeedStream,Administrator,admin, +SpeedStream,admin,admin, +SpeedXess,,speedxess, +Sphairon,admin,passwort, +Spider Systems,,hello, +Spike,enable,, +Splunk,admin,changeme, +Ssangyoung,,2501, +Stan Ozier,admin,,http://taskfreak.com/ +Stratitec,root,ahetzip8, +SuSE GmbH,root,root, +Sun Microsystems,root,changeme, +Sun,admin,admin,submit by Nabil Ouchn +Sun,root,changeme, +Sun,root,t00lk1t, +Sun,ssp,ssp, +"Super Micro Computer, Inc.",ADMIN,ADMIN,http://www.supermicro.com/manuals/other/Onboard_BMC_IPMI.pdf +SuperMicro,,ksdjfg934t, +SuperMicro,,ksdjfg934t, +Supercook,admin,AlpheusDigital1010, +Supercook,super,super, +Supermicro,ADMIN,admin, +Surecom,admin,admin, +Surecom,admin,surecom, +Sweex,,, +Sweex,,admin, +Sweex,,mysweex, +Sweex,admin,1234, +Sweex,admin,epicrouter, +Sweex,none,blank, +Sweex,rdc123,rdc123, +Sweex,sweex,mysweex, +Swissvoice,target,password, +Syabas Technology,ftpuser,1234, +Syabas Technology,nmt,1234, +Sybase,12.x,, +Sybase,DBA,SQL, +Sybase,jagadmin,, +Sybase,sa,, +Sybase,sa,sasasa, +Symantec,,symantec, +Symantec,,symantec, +Symantec,admin,, +Symantec,admin,symantec, +Symantec,root,brightmail, +Symbol,,Symbol, +Symbol,Symbol,Symbol, +"Symbol Technologies, Inc",admin,superuser, +Symbol,admin,symbol, +Symmetricom,guest,truetime, +Symmetricom,operator,mercury, +Synology Inc,admin,,http://forum.synology.com/wiki/index.php/Quick_Install_Guide:_Installing_the_Synology_Server +SysKonnect,default.password,, +SysMaster,admin,12345, +System/32,install,secret, +T-Com,,0, +T-Com,,123456, +T-Com,,0, +T-Comfort,Administrator,, +T-com,veda,12871, +TELTRONIC S.A.U.,admin,tetra, +TIBCO,admin,admin,https://docs.tibco.com/pub/managed-file-transfer-internet-server/7.2.0-may-2012/MFT%20IS%20v7.2.0%20Installation%20Guide.pdf +TIBCO,admin,changeit,https://docs.tibco.com/pub/slingshot/1.9.3/doc/pdf/TIB_slingshot_1.9.3_installation.pdf +TMC,,BIGO, +TMC,,BIGO, +TOTOLINK,onlime_r,12345,https://pierrekim.github.io/advisories/2015-totolink-0x03.txt +TOTOLINK,root,12345,https://pierrekim.github.io/advisories/2015-totolink-0x03.txt +TP Link,admin,admin, +TVT System,,enter, +TVT System,craft,, +TYPO3,,joh316, +TYPO3,admin,password, +Tandberg,,GWrv, +Tandberg,,TANDBERG, +Tandberg,,10023, +Tandberg,Admin,, +Tandberg,admin,, +Tandberg,admin,TANDBERG, +Tandberg,root,TANDBERG, +Tandem,super.super,, +Tandem,super.super,master, +Tasman,Tasman,Tasmannet, +Team Xodus,xbox,xbox, +Tegile,admin,tegile,https://40centcoffee.wordpress.com/2014/09/17/register-vcenter-server-on-tegile/ +Teklogix,Administrator,, +Telappliant,admin,1234, +Telco Systems,telco,telco, +Telebit,setup,setup, +Telebit,snmp,nopasswd, +Teledat,admin,1234, +Telelec,eagle,eagle, +Teletronics,admin,1234, +Telewell,admin,admin, +Telewell,admin,password, +Telindus,,, +Telindus,admin,admin, +Tellabs,root,admin_1,telnet on port 3083 +Tellabs,tellabs,tellabs#1, +Telus,(created),telus00, +Telus,(created),telus99, +Terayon,,, +Terayon,admin,password, +TexBox,,123, +TextPortal,god1,12345, +TextPortal,god2,12345, +Thecus Tech,admin,admin,http://www.google.com/url?sa=t&source=web&cd=77&ved=0CC4QFjAGOEY&url=http%3A%2F%2Fwww.thecus.com%2Fdownload%2Fother%2FN5200_FAQ_2006-11-22.pdf&ei=ayJcTKXXEMOC8gb1qImLAg&usg=AFQjCNE0RnP34sEcHyg382gRoxnnKcNRKw +Thomson,,admin, +Thomson,,admin,SSID : SpeedTouch180 +Thomson,admin,admin, +Thomson,admin,password, +Tiara Networks,,tiara, +Tiara,tiara,tiaranet,also known as Tasman Networks +Tim Schaab,theman,changeit,http://freshmeat.net/projects/madthought/ +TimeTools,admin,admin, +Tiny,,Tiny, +Tinys,,tiny, +Tinys,,Tiny, +TopLayer,siteadmin,toplayer, +Topcom,admin,admin, +Toshiba,,24Banc81, +Toshiba,,Toshiba, +Toshiba,,toshy99, +Toshiba,,24Banc81, +Toshiba,,, +Toshiba,,Toshiba, +Toshiba,,toshy99, +Toshiba,Admin,123456, +Toshiba,admin,123456, +Toshiba,super,superpass, +Trend Micro,admin,admin, +Trend Micro,admin,imsa7.0,http://www.trendmicro.com/ftp/documentation/guides/IMSA-QSG.pdf +Trend Micro,root,trendimsa1.0, +TrendMicro,admin,imss7.0, +TrendNET,admin,password, +Trintech,t3admin,Trintech, +Tripp Lite,root,TrippLite, +Triumph-Adler,admin,0, +Troy,admin,extendnet, +Tsunami,managers,managers, +Tumbleweed,Admin,SECRET123, +Typo3 Association,admin,password, +U.S. Robotics,,12345, +U.S. Robotics,,admin, +U.S. Robotics,Any,12345, +U.S. Robotics,admin,, +U.S. Robotics,admin,admin, +U.S. Robotics,none,amber, +U.S. Robotics,root,12345, +U.S. Robotics,root,admin, +U.S. Robotics,support,support, +UNEX,,password, +UNIX,adm,, +UNIX,adm,adm, +UNIX,admin,admin, +UNIX,administrator,, +UNIX,administrator,administrator, +UNIX,anon,anon, +UNIX,bbs,, +UNIX,bbs,bbs, +UNIX,bin,sys, +UNIX,checkfs,checkfs, +UNIX,checkfsys,checkfsys, +UNIX,checksys,checksys, +UNIX,daemon,, +UNIX,daemon,daemon, +UNIX,demo,, +UNIX,demo,demo, +UNIX,demos,, +UNIX,demos,demos, +UNIX,dni,, +UNIX,dni,dni, +UNIX,fal,, +UNIX,fal,fal, +UNIX,fax,, +UNIX,fax,fax, +UNIX,ftp,, +UNIX,ftp,ftp, +UNIX,games,, +UNIX,games,games, +UNIX,gopher,gopher, +UNIX,gropher,, +UNIX,guest,, +UNIX,guest,guest, +UNIX,guest,guestgue, +UNIX,halt,, +UNIX,halt,halt, +UNIX,informix,informix, +UNIX,install,install, +UNIX,lp,, +UNIX,lp,bin, +UNIX,lp,lineprin, +UNIX,lp,lp, +UNIX,lpadm,lpadm, +UNIX,lpadmin,lpadmin, +UNIX,lynx,, +UNIX,lynx,lynx, +UNIX,mail,, +UNIX,mail,mail, +UNIX,man,, +UNIX,man,man, +UNIX,me,, +UNIX,me,me, +UNIX,mountfs,mountfs, +UNIX,mountfsys,mountfsys, +UNIX,mountsys,mountsys, +UNIX,news,, +UNIX,news,news, +UNIX,nobody,, +UNIX,nobody,nobody, +UNIX,nuucp,, +UNIX,operator,, +UNIX,operator,operator, +UNIX,oracle,, +UNIX,postmaster,, +UNIX,postmaster,postmast, +UNIX,powerdown,powerdown, +UNIX,rje,rje, +UNIX,root,, +UNIX,root,hp, +UNIX,root,root, +UNIX,service,smile, +UNIX,setup,, +UNIX,setup,setup, +UNIX,shutdown,, +UNIX,shutdown,shutdown, +UNIX,sync,, +UNIX,sync,sync, +UNIX,sys,bin, +UNIX,sys,sys, +UNIX,sys,system, +UNIX,sysadm,admin, +UNIX,sysadm,sysadm, +UNIX,sysadmin,sysadmin, +UNIX,sysbin,sysbin, +UNIX,system_admin,, +UNIX,system_admin,system_admin, +UNIX,trouble,trouble, +UNIX,umountfs,umountfs, +UNIX,umountfsys,umountfsys, +UNIX,umountsys,umountsys, +UNIX,unix,unix, +UNIX,user,user, +UNIX,uucp,uucp, +UNIX,uucpadm,uucpadm, +UNIX,web,, +UNIX,web,web, +UNIX,webmaster,, +UNIX,webmaster,webmaster, +UNIX,www,, +UNIX,www,www, +USRobotics,admin,admin,http://www.usr.com/support/doc-popup-template.asp?url=faqs/networking/wireless-security-07/wireless-security-07.htm&loc=unst +UT Lexar,lexar,, +UTStarcom,dbase,dbase, +UTStarcom,field,field, +UTStarcom,guru,*3noguru, +UTStarcom,snmp,snmp, +Unex,,password, +Unidesk,Administrator,Unidesk1,http://www.unidesk.com/support/learn/2.7.0/administer/manage_appliances/admin_ma_config +Unify,,123456,http://wiki.unify.com/wiki/OpenStage_SIP_FAQ#What_are_the_default_passwords.3F +Union,root,root, +Unisys,ADMINISTRATOR,ADMINISTRATOR, +Unisys,HTTP,HTTP, +Unisys,NAU,NAU, +United Technologies Corporation,admin,1234,http://www.interlogix.com/_/assets/library/1072627A%20TruVision%20IP%20Camera%20Configuration%20Manual.pdf +Unknown,,password, +Unknown,operator,operator, +Unknown,overseer,overseer, +Unknown,test,test, +UsRobotics,Any,12345, +Utstar,admin,utstar, +VASCO,admin,, +VBrick Systems,admin,admin, +VPASP,admin,admin, +VPASP,vpasp,vpasp, +Various,root,admin,Alternative firmware +Veramark,admin,password, +Verifone,,166816, +Verilink,,, +Veritas,admin,password, +Verity,admin,admin, +Verizon,admin,password,http://www22.verizon.com/Support/Residential/Internet/fiosinternet/networking/setup/vz9100em/124043.htm +Vextrec Technology,,Vextrex, +Vextrec Technology,,Vextrex, +VieNuke,admin,admin, +Vina Technologies,,, +Virtual Programming,admin,admin, +Virtual Programming,vpasp,vpasp, +Visa VAP,root,QNX, +Visual Networks,admin,visual, +Vobis,,merlin, +Vobis,,merlin, +VoiceGenie Technologies,pw,pw, +VoiceObjects Germany,voadmin,manager,http://developers.voiceobjects.com/docs/en/VO9/024-voiceobjectspreferences.htm +Vonage,user,user, +VxWorks,admin,admin, +VxWorks,guest,guest, +WAAV,admin,waav, +WLAN_3D,Administrator,admin, +WWWBoard,WebAdmin,WebBoard, +Wanadoo,admin,admin, +"Wanco, Inc.",,ABCD,http://www.scribd.com/doc/68216572/Wanco-Users-Guide-Ntcip-Message-Sign-Software-Dec-2004 +"Wanco, Inc.",,Guest,http://www.scribd.com/doc/68216572/Wanco-Users-Guide-Ntcip-Message-Sign-Software-Dec-2004 +"Wanco, Inc.",,NTCIP,http://www.scribd.com/doc/68216572/Wanco-Users-Guide-Ntcip-Message-Sign-Software-Dec-2004 +"Wanco, Inc.",,Public,http://www.scribd.com/doc/68216572/Wanco-Users-Guide-Ntcip-Message-Sign-Software-Dec-2004 +Wang,CSG,SESAME, +WatchGuard,,wg, +WatchGuard,admin,admin,http://www.watchguard.com/help/docs/v70FireboxXEdge_QS.pdf +WatchGuard,admin,readwrite,http://www.watchguard.com/help/docs/wsm/xtm_11/en-us/content/en-us/basicadmin/factory_default_about_c.html +WatchGuard,status,readonly,http://www.watchguard.com/help/docs/wsm/xtm_11/en-us/content/en-us/basicadmin/factory_default_about_c.html +Watchguard,,wg, +Watchguard,admin,, +Watchguard,user,pass,works only from the inside LAN +Web Wiz,Administrator,letmein,http://www.webwizguide.info/web_wiz_forums/default.asp +Webmin,admin,hp.com, +Webramp,wradmin,trancell, +Weidm�eller,admin,detmond, +Westell,CSG,SESAME, +Westell,admin,, +Westell,admin,password, +Westell,admin,password1,Verizon cable router (Model +Westell,admin,sysAdmin, +Wim Bervoets,,Compleri, +Wim Bervoets,,Compleri, +"Wireless, Inc.",root,rootpass, +WorldClient,WebAdmin,Admin, +Wyse,,Fireport, +Wyse,,password,by satadru +Wyse,VNC,winterm, +Wyse,rapport,r@p8p0r+, +Wyse,root,, +Wyse,root,wyse, +X-Micro,1502,1502,From BUGTRAQ +X-Micro,super,super,From BUGTRAQ +XAMPP,newuser,wampp, +Xavi,,, +Xavi,admin,admin, +Xerox,11111,x-admin, +Xerox,,11111, +Xerox,,0, +Xerox,Administrator,Fiery.1, +Xerox,NSA,nsa,http://download.support.xerox.com/pub/docs/FlowPort2/userdocs/any-os/en/fp_dc_setup_guide.pdf +Xerox,admin,1111, +Xerox,admin,2222, +Xerox,admin,22222,works for access panel 2 +Xerox,admin,, +Xerox,admin,admin, +Xerox,admin,x-admin, +Xerox,savelogs,crash, +Xinit Systems Ltd.,openfiler,password, +Xylan,admin,switch, +Xylan,diag,switch, +Xyplex,,access, +Xyplex,,system, +Xyplex,,, +Xyplex,,access, +Xyplex,,system, +Xyplex,setpriv,system, +Yakumo,admin,admin, +Yokogawa,,727,For model codes ending in E +Yokogawa,admin,!admin,TFGW410 ISA100 gateway +Yuxin,User,1234, +Yuxin,User,19750407, +ZEOS,,zeosx, +ZEOS,,zeosx, +ZTE,ADSL,expert03,Default Password if user does +Zcom,root,admin, +Zcomax,admin,password, +Zebra Technologies,admin,1234, +Zebra,admin,1234, +Zenith,,3098z, +Zenith,,Zenith, +Zenith,,3098z, +Zenith,,Zenith, +Zeus,admin,, +Zoom,admin,zoomadsl, +ZyWALL Series,,admin, +Zyxel,1234,1234, +Zyxel,192.168.1.1 60020,@dsl_xilno, +Zyxel,,1234, +Zyxel,,1234, +Zyxel,,, +Zyxel,,admin, +Zyxel,Admin,atc456, +Zyxel,admin,0000,Password is 4 zeros. Gray router +Zyxel,admin,1234, +Zyxel,admin,,terra +Zyxel,admin,admin, +Zyxel,root,1234, +Zyxel,webadmin,1234, +accton t-online,,0, +acer,acer,acer, +actiontec,admin,admin, +adtran,,, +adtran,,Password,CTRL-L +adtran,,adtran, +allied,,, +ast,,, +backtrack,root,toor, +boson,,, +canyon,Administrator,admin, +crt,egcr,ergc, +cuproplus,,, +cyberguard,cgadmin,cgadmin, +darkman,ioFTPD,ioFTPD, +decnet,operator,admin, +digicom,Admin,, +drupal.org,admin,admin, +eMachines,emaq,4133, +eQ-3,root,MuZhlo9n%8!G, +eSeSIX Computer GmbH,root,jstwo, +eZ Systems,admin,publish,https://doc.ez.no/eZ-Publish/User-manual/4.x/The-administration-interface/The-login-page +enCAD,,, +ericsson,,help, +ericsson,,, +fon,admin,admin, +giga,Administrator,admin, +glFtpD,glftpd,glftpd, +glftpd,glftpd,glftpd, +greatspeed,netadmin,nimdaten,ETB Colombia +haier,ucenik23,ucenik, +iDirect,admin,P@55w0rd!,to enable ssh connections to the +iDirect,root,iDirect,first enable sshd telnet to router: +iNTERFACEWARE Inc.,admn,password,http://help.interfaceware.com/kb/767 +iPSTAR,admin,operator,For CSLoxInfo and iPSTAR Customers +iblitzz,admin,admin, +inchon,admin,admin, +infacta,Administrator,, +intel,,, +intel,admin,, +intel,khan,kahn, +intel,root,admin, +intex,,, +ion,,admin,vreau ceva +ion,Administrator,admin,vreau ceva +iso sistemi,,, +kaptest,admin,, +latis network,,, +longshine,admin,0, +m0n0wall,admin,mono, +maxdata,,, +medion,,medion, +metro,client,client, +microRouter,,letmein, +mklencke,root,blablabla, +motorola,,, +mro software,SYSADM,sysadm, +nCircle,root,ciwuxe, +olitec (Trendchip),admin,admin, +olitec,admin,adslolitec, +oodie.com,admin,admin, +ovislink,root,, +penril datability,,system, +pfSense,admin,pfsense, +phoenix,,admin, +phpLiteAdmin,,admin,https://bitbucket.org/phpliteadmin/public/wiki/Configuration +phpMyAdmin,root,, +phpTest,admin,1234, +phpTest,guest,guest, +planet,admin,admin, +ptcl,admin,admin, +rPath,admin,password, +redline,admin,admin, +reg.pnu.ac.ir,880175445,11223344, +remote-exploit,root,toor, +rm,administrator,password/changeme or secret, +schneider,USER,USER, +securstar,admin,rainbow, +seninleyimben,admin,admin, +sharp,,, +sitara,root,, +smartBridges,admin,public, +sprint,self,system, +stratacom,stratacom,stratauser, +stuccoboy,stuccoboy,100198, +technology,root,,for telnet / HTTP +telecom,operator,, +tert,james,james, +topsec,superman,talent, +vertex,root,vertex25, +warraCorp,pepino,pepino, +weblogic,system,weblogic, +winwork,operator,, +wline,admin,1234, +xavi,admin,admin, +xd,xd,xd, +xerox,,admin, +xerox,admin,1111, +zenitel,admin,alphaadmin,https://wiki.zenitel.com/wiki/Password_(IP_Stations) +zenitel,ADMIN,alphacom +zenitel,,1851 +zenitel,,1234 +zoom,admin,zoomadsl, diff --git a/subset/security/password/resources/raw/ssh.txt b/subset/security/password/resources/raw/ssh.txt new file mode 100644 index 0000000000..89513db0fd --- /dev/null +++ b/subset/security/password/resources/raw/ssh.txt @@ -0,0 +1,121 @@ +root:calvin +root:root +root:toor +administrator:password +NetLinx:password +administrator:Amx1234! +amx:password +amx:Amx1234! +admin:1988 +admin:admin +Administrator:Vision2 +cisco:cisco +c-comatic:xrtwk318 +root:qwasyx21 +admin:insecure +pi:raspberry +user:user +root:default +root:leostream +leo:leo +localadmin:localadmin +fwupgrade:fwupgrade +root:rootpasswd +admin:password +root:timeserver +admin:password +admin:motorola +cloudera:cloudera +root:p@ck3tf3nc3 +apc:apc +device:apc +eurek:eurek +netscreen:netscreen +admin:avocent +root:linux +sconsole:12345 +root:5up +cirros:cubswin:) +root:uClinux +root:alpine +root:dottie +root:arcsight +root:unitrends1 +vagrant:vagrant +root:vagrant +m202:m202 +demo:fai +root:fai +root:ceadmin +maint:password +root:palosanto +root:ubuntu1404 +root:cubox-i +debian:debian +root:debian +root:xoa +root:sipwise +debian:temppwd +root:sixaola +debian:sixaola +myshake:shakeme +stackato:stackato +root:screencast +root:stxadmin +root:nosoup4u +root:indigo +root:video +default:video +default: +ftp:video +nexthink:123456 +ubnt:ubnt +root:ubnt +sansforensics:forensics +elk_user:forensics +osboxes:osboxes.org +root:osboxes.org +sans:training +user:password +misp:Password1234 +hxeadm:HXEHana1 +acitoolkit:acitoolkit +osbash:osbash +enisa:enisa +geosolutions:Geos +pyimagesearch:deeplearning +root:NM1$88 +remnux:malware +hunter:hunter +plexuser:rasplex +root:openelec +root:rasplex +root:plex +root:openmediavault +root:ys123456 +root:libreelec +openhabian:openhabian +admin:ManagementConsole2015 +public:publicpass +admin:hipchat +nao:nao +support:symantec +root:max2play +admin:pfsense +root:root01 +root:nas4free +USERID:PASSW0RD +Administrator:p@ssw0rd +root:freenas +root:cxlinux +admin:symbol +admin:Symbol +admin:superuser +admin:admin123 +root:D13HH[ +root:blackarch +root:dasdec1 +root:7ujMko0admin +root:7ujMko0vizxv +root:Zte521 +root:zlxx. diff --git a/subset/security/password/resources/raw/telnet.txt b/subset/security/password/resources/raw/telnet.txt new file mode 100644 index 0000000000..6ffa3a3c9c --- /dev/null +++ b/subset/security/password/resources/raw/telnet.txt @@ -0,0 +1,146 @@ +root:calvin +administrator:password +NetLinx:password +administrator:Amx1234! +amx:password +amx:Amx1234! +admin:1988 +admin:admin +Administrator:Vision2 +cisco:cisco +root:fidel123 +user:user +root:default +localadmin:localadmin +Root:wago +Admin:wago +User:user +Guest:guest +root:rootpasswd +admin:password +adtec:none +root:timeserver +root:password +Admin:Su +root:admin +admin:motorola +Admin:5001 +User:1001 +GE:GE +Admin:Pass +device:apc +apc:apc +root:anni2013 +root:xc3511 +root:dreambox +root:vizxv +admin:1111111 +admin:smcadmin +admin:4321 +888888:888888 +666666:666666 +ubnt:ubnt +admin:22222 +adminttd:adminttd +root:!root +admin:epicrouter +tech:tech +manager:manager +smc:smcadmin +netscreen:netscreen +netopia:netopia +root:888888 +root:xmhdipc +root:juantech +root:123456 +root:54321 +support:support +root:root +root:12345 +root:pass +admin:admin1234 +root:1111 +admin:1111 +root:666666 +root:1234 +root:klv123 +Administrator:admin +service:service +guest:guest +guest:12345 +admin1:password +administrator:1234 +root:klv1234 +root:Zte521 +root:hi3518 +root:jvbzd +root:anko +root:zlxx. +root:7ujMko0vizxv +root:7ujMko0admin +root:system +root:ikwb +root:dreambox +root:user +root:realtek +root:00000000 +admin:1234 +admin:12345 +default:OxhlwSG8 +admin:tlJwpbo6 +default:S2fGqNFs +admin:meinsm +supervisor:supervisor +admin:123456 +root:zlxx +dm:telnet +webguest:1 +Liebert:Liebert +User:User +admin:avocent +root:linux +admin:system +user:public +admin:private +guest:guest +admin:admin +root:root +qbf77101:hexakisoctahedron +ftpuser:password +USER:USER +Basisk:Basisk +sconsole:12345 +root:5up +root:cat1029 +MayGion:maygion.com +admin:cat1029 +admin:ZmqVfoSIP +default:antslq +admin:microbusiness +admin:jvc +root:GM8182 +root:uClinux +Alphanetworks:wrgg19_c_dlwbr_dir300 +Alphanetworks:wrgn49_dlob_dir600b +Alphanetworks:wrgn23_dlwbr_dir600b +Alphanetworks:wrgn22_dlwbr_dir615 +Alphanetworks:wrgnd08_dlob_dir815 +Alphanetworks:wrgg15_di524 +Alphanetworks:wrgn39_dlob.hans_dir645 +Alphanetworks:wapnd03cm_dkbs_dap2555 +Alphanetworks:wapnd04cm_dkbs_dap3525 +Alphanetworks:wapnd15_dlob_dap1522b +Alphanetworks:wrgac01_dlob.hans_dir865 +Alphanetworks:wrgn23_dlwbr_dir300b +Alphanetworks:wrgn28_dlob_dir412 +Alphanetworks:wrgn39_dlob.hans_dir645_V1 +root:oelinux123 +mg3500:merlin +root:cxlinux +root:1001chin +root:china123 +admin:symbol +admin:Symbol +admin:superuser +admin:admin123 +root:20080826 diff --git a/subset/security/password/run_password_test_for_protocol b/subset/security/password/run_password_test_for_protocol new file mode 100755 index 0000000000..5b4d0c4099 --- /dev/null +++ b/subset/security/password/run_password_test_for_protocol @@ -0,0 +1,165 @@ +#!/bin/bash + +# A test which runs a dictionary brute force attack using ncrack/medusa, saving the result into a file afterwards. +# +# Supports http, https, telnet and ssh protocols on their specified ports. +# +# Assumes the following files are in the dictionary directory: +# - dictionary.txt (File with format ":username:password") +# - usernames.txt (File with a list of usernames, one on each line) +# - passwords.txt (File with a list of passwords, one on each line) +# +# Usage: ./run_password_test_for_protocol $TARGET_IP, $PROTOCOL, $PORT, $DICTIONARY_DIR, $RESULTS_DIR + +TARGET_IP=$1 +PROTOCOL=$2 +PORT=$3 +DICTIONARY_DIR=$4 +RESULT_DIR=$5 + +DICTIONARY="$DICTIONARY_DIR/dictionary.txt" +USERNAMES="$DICTIONARY_DIR/usernames.txt" +PASSWORDS="$DICTIONARY_DIR/passwords.txt" +RESULT_FILE="${RESULT_DIR}/security_password_${PROTOCOL}.result" + +# Runs the nmap on a target host and port. Returns the output of the command. +# $1 Target IP +# $2 Target Port +function run_nmap_and_get_command() { + NMAP_OUTPUT=$(nmap -p $2 $1) + echo "$NMAP_OUTPUT" +} + +# Return true if grep finds a match for the nmap port open message which looks like: e.g. "80/tcp open http" +# $1 Nmap command output +# $2 Target protocol +# $3 Target port +function is_specified_port_open() { + echo "$1" | grep -q "$3.*open.*$2" +} + +# Returns true if grep can find the host down message from the Nmap output +# $1 Nmap command output +function is_host_down() { + echo "$1" | grep -q "Host seems down" +} + +# Checks if http or https has authentication. +# $1 Target protocol (HTTP or HTTPS) +# $2 Target IP +# $3 Target port +function does_http_or_https_have_authentication() { + if [[ $1 == "http" || $1 == "https" ]]; then + echo "$(curl -k -s -I $1://$2:$3)" | grep -q "401 Unauthorized" + fi +} + +# Runs the brute force using ncrack/medusa, and returns the output. Redirect stderr into stdout also. +# $1 Target IP +# $2 Target protocol +# $3 Target port +# $4 Colon separated brute force dictionary for medusa +# $5 Username list for ncrack +# $6 Password list for ncrack +function run_brute_force_and_get_command_new() { + if [ "$2" == "http" ]; then + ncrack_output=$(ncrack --pairwise -v -U $5 -P $6 http://$1:$3 2>&1) + echo "$ncrack_output" + elif [ "$2" == "https" ]; then + ncrack_output=$(ncrack --pairwise -v -U $5 -P $6 https://$1:$3 2>&1) + echo "$ncrack_output" + elif [ "$2" == "ssh" ]; then + medusa_output=$(medusa -C $4 -h $1 -M ssh -n $3 -v 4 2>&1) + echo "$medusa_output" + elif [ "$2" == "telnet" ]; then + ncrack_output=$(ncrack --pairwise -v -U $5 -P $6 -T2 telnet://$1:$3 2>&1) + echo "$ncrack_output" + fi +} + +# Returns true if grep can find the success message in the brute force output. +# $1 ncrack/medusa output +function brute_force_successful() { + echo "$1" | grep -qE "Discovered credentials|\[SUCCESS\]" +} + +# True if "timed-out/prematurely-closed" is present and their values are not zero. +# $1 ncrack/medusa output +function ncrack_brute_force_skip() { + echo "$1" | grep -oE "timed-out: [0-9]+ \| prematurely-closed: [0-9]+" | grep -qvE "timed-out: 0 \| prematurely-closed: 0" +} + +# True if ALERT or NOTICE are in the output. +# $1 ncrack/medusa output +function medusa_brute_force_skip() { + echo "$1" | grep -qE "NOTICE:|ALERT:|ERROR:|FATAL:" +} + +# Writes the test result into a file. +# $1 Target protocol +# $2 Target port +# $3 Result code +function write_to_result_file() { + mkdir -p $RESULT_DIR + + if [ -f $RESULT_FILE ]; then + rm $RESULT_FILE + fi + touch $RESULT_FILE + + if [ "$3" == "pass" ]; then + echo "RESULT pass security.passwords.$1 Was not able to brute force using dictionary." > $RESULT_FILE + elif [ "$3" == "fail" ]; then + echo "RESULT fail security.passwords.$1 Was able to brute force using dictionary." > $RESULT_FILE + elif [ "$3" == "skip_no_host" ]; then + echo "RESULT skip security.passwords.$1 Unable to connect to host." > $RESULT_FILE + elif [ "$3" == "skip_no_port" ]; then + echo "RESULT skip security.passwords.$1 Port $2 not open on target device." > $RESULT_FILE + elif [ "$3" == "skip_ncrack_error" ]; then + echo "RESULT skip security.passwords.$1 Skipping due to brute force issue with ncrack. Please see log." > $RESULT_FILE + elif [ "$3" == "skip_medusa_error" ]; then + echo "RESULT skip security.passwords.$1 Skipping due to brute force issue with medusa. Please see log." > $RESULT_FILE + elif [ "$3" == "skip_http_error" ]; then + echo "RESULT skip security.passwords.$1 Skipping due to http(s) server not having authentication method." > $RESULT_FILE + fi +} + +# Main function: + +NMAP_OUTPUT="$(run_nmap_and_get_command $TARGET_IP $PORT)" +echo "$NMAP_OUTPUT" + +if ! is_host_down "$NMAP_OUTPUT"; then + + if is_specified_port_open "$NMAP_OUTPUT" $PROTOCOL $PORT; then + BRUTE_FORCE_OUTPUT="$(run_brute_force_and_get_command_new $TARGET_IP $PROTOCOL $PORT $DICTIONARY $USERNAMES $PASSWORDS)" + echo "$BRUTE_FORCE_OUTPUT" + + if ! does_http_or_https_have_authentication $PROTOCOL $TARGET_IP $PORT; then + echo "Will not start brute force as http(s) server has no authentication." + RESULT="skip_http_error" + elif ncrack_brute_force_skip "$BRUTE_FORCE_OUTPUT"; then + echo "Could not brute force due to an issue with ncrack at runtime. Please check log." + RESULT="skip_ncrack_error" + elif medusa_brute_force_skip "$BRUTE_FORCE_OUTPUT"; then + echo "Could not brute force due to an issue with medusa at runtime. Please check log." + RESULT="skip_medusa_error" + elif brute_force_successful "$BRUTE_FORCE_OUTPUT"; then + echo "Was able to brute force using dictionary." + RESULT="fail" + else + echo "Could not brute force using dictionary." + RESULT="pass" + fi + + else + echo "Could not connect to specified port on host." + RESULT="skip_no_port" + fi + +else + echo "Could not connect to host." + RESULT="skip_no_host" +fi + +write_to_result_file $PROTOCOL $PORT $RESULT diff --git a/subset/security/password/test_password b/subset/security/password/test_password new file mode 100755 index 0000000000..9fe18b5bda --- /dev/null +++ b/subset/security/password/test_password @@ -0,0 +1,144 @@ +#!/bin/bash -e + +# Entry point for the security.admin.password test - Runs the test on all protocols in parallel +# and writes the output into reports file to be used by DAQ. + +source reporting.sh + +# Hard coded paths. +DAQ_REPORT="/tmp/report.txt" +RESULTS_DIR="/tmp/results" +LOG_DIR="/tmp/logs" +MODULE_CONFIG="/tmp/module_config.json" + +# Hard coded files and names. +HTTP_LOG="$LOG_DIR/security_password_http.log" +HTTPS_LOG="$LOG_DIR/security_password_https.log" +SSH_LOG="$LOG_DIR/security_password_ssh.log" +TELNET_LOG="$LOG_DIR/security_password_telnet.log" + +HTTP_RESULT="$RESULTS_DIR/security_password_http.result" +HTTPS_RESULT="$RESULTS_DIR/security_password_https.result" +SSH_RESULT="$RESULTS_DIR/security_password_ssh.result" +TELNET_RESULT="$RESULTS_DIR/security_password_telnet.result" + +# Hard coded json keys for jq +DICTIONARY_DIR_KEY=".modules.password.dictionary_dir" +HTTP_PORT_KEY=".modules.password.http_port" +HTTPS_PORT_KEY=".modules.password.https_port" +SSH_PORT_KEY=".modules.password.ssh_port" +TELNET_PORT_KEY=".modules.password.telnet_port" +USER_SPECIFIED_USERNAME_KEY=".device_info.default_username" +USER_SPECIFIED_PASSWORD_KEY=".device_info.default_password" + +# Default configuration values. +DICTIONARY_DIR="resources/default" +DEFAULT_PLACE_HOLDER="*** (optional) ***" +USER_SPECIFIED_DICTIONARY_DIR="resources/user_specified" +USER_SPECIFIED_DICTIONARY_FILE="resources/user_specified/dictionary.txt" +USER_SPECIFIED_USERNAME_FILE="resources/user_specified/usernames.txt" +USER_SPECIFIED_PASSWORD_FILE="resources/user_specified/passwords.txt" +HTTP_PORT=80 +HTTPS_PORT=443 +SSH_PORT=22 +TELNET_PORT=23 + +# Retrieves the value specified in specified key from the module config. +# $1 Module config file path +# $2 jq JSON key string +function get_module_config_value_from_key() { + cat $1 | jq $2 | tr -d '"' +} + +# Retrieve a modified version of the test description for a protocol. +# $1 Protocol name +function get_test_description() { + echo "Verify all device manufacturer default passwords are changed for protocol: $1, and new passwords are set." +} + +# Retrieve a modified version of the test name for a protocol. +# $1 Protocol name +function get_test_name() { + echo "security.admin.password.$1" +} + +# Removes whitespace and colon from user specified string to avoid format issues with the brute force tools. +# $1 String to clean +function clean_credentials() { + echo "$(echo $1 | sed -E 's/\s//' | tr -d ':')" +} + +# Create a new dictionary directory in resources for the user specified credentials. +# $1 Username +# $2 Password +function create_dictionary_for_user_specified_credentials() { + mkdir -p $USER_SPECIFIED_DICTIONARY_DIR + + username="$(clean_credentials "$1")" + password="$(clean_credentials "$2")" + + echo ":$username:$password" > $USER_SPECIFIED_DICTIONARY_FILE + echo "$username" > $USER_SPECIFIED_USERNAME_FILE + echo "$password" > $USER_SPECIFIED_PASSWORD_FILE +} + +# Main function + +echo "Password test starting on docker container: $TARGET_IP..." + +echo "Checking module_config.json for any default configurations to overwrite..." +NEW_DICTIONARY_DIR="$(get_module_config_value_from_key $MODULE_CONFIG $DICTIONARY_DIR_KEY)" +NEW_HTTP_PORT="$(get_module_config_value_from_key $MODULE_CONFIG $HTTP_PORT_KEY)" +NEW_HTTPS_PORT="$(get_module_config_value_from_key $MODULE_CONFIG $HTTPS_PORT_KEY)" +NEW_SSH_PORT="$(get_module_config_value_from_key $MODULE_CONFIG $SSH_PORT_KEY)" +NEW_TELNET_PORT="$(get_module_config_value_from_key $MODULE_CONFIG $TELNET_PORT_KEY)" +USER_SPECIFIED_USERNAME="$(get_module_config_value_from_key $MODULE_CONFIG $USER_SPECIFIED_USERNAME_KEY)" +USER_SPECIFIED_PASSWORD="$(get_module_config_value_from_key $MODULE_CONFIG $USER_SPECIFIED_PASSWORD_KEY)" + +echo "Overwriting default configurations with user specified values... (If any). User specified credentials takes precedence over user specified dictionaries." + +if [[ $USER_SPECIFIED_USERNAME != "null" && \ + $USER_SPECIFIED_USERNAME != $DEFAULT_PLACE_HOLDER && \ + $USER_SPECIFIED_PASSWORD != "null" && \ + $USER_SPECIFIED_PASSWORD != $DEFAULT_PLACE_HOLDER ]]; then + + create_dictionary_for_user_specified_credentials "$USER_SPECIFIED_USERNAME" "$USER_SPECIFIED_PASSWORD" + DICTIONARY_DIR=$USER_SPECIFIED_DICTIONARY_DIR + echo "User specified username/password pair found! -> $(clean_credentials "$USER_SPECIFIED_USERNAME") $(clean_credentials "$USER_SPECIFIED_PASSWORD")" + echo "Proceeding to use these credentials! Dictionary directory is now -> $DICTIONARY_DIR" + +elif [[ $NEW_DICTIONARY_DIR != "null" ]]; then + + DICTIONARY_DIR=$NEW_DICTIONARY_DIR + echo "User specified username/password pair not found!" + echo "Proceeding to use default/user specifed dictionary_dir -> $DICTIONARY_DIR" + +fi + +[ $NEW_HTTP_PORT != "null" ] && HTTP_PORT=$NEW_HTTP_PORT +[ $NEW_HTTPS_PORT != "null" ] && HTTPS_PORT=$NEW_HTTPS_PORT +[ $NEW_SSH_PORT != "null" ] && SSH_PORT=$NEW_SSH_PORT +[ $NEW_TELNET_PORT != "null" ] && TELNET_PORT=$NEW_TELNET_PORT + +echo "Print out configurations set..." +echo "DICTIONARY_DIR: $DICTIONARY_DIR" +echo "HTTP_PORT: $HTTP_PORT" +echo "HTTPS_PORT: $HTTPS_PORT" +echo "SSH_PORT: $SSH_PORT" +echo "TELNET_PORT: $TELNET_PORT" + +echo "Running password test for each protocol..." +mkdir -p $LOG_DIR +./run_password_test_for_protocol $TARGET_IP http $HTTP_PORT $DICTIONARY_DIR $RESULTS_DIR &> $HTTP_LOG & +./run_password_test_for_protocol $TARGET_IP https $HTTPS_PORT $DICTIONARY_DIR $RESULTS_DIR &> $HTTPS_LOG & +./run_password_test_for_protocol $TARGET_IP ssh $SSH_PORT $DICTIONARY_DIR $RESULTS_DIR &> $SSH_LOG & +./run_password_test_for_protocol $TARGET_IP telnet $TELNET_PORT $DICTIONARY_DIR $RESULTS_DIR &> $TELNET_LOG & +wait + +echo "Writing test results and logs to DAQ report..." +write_out_result $DAQ_REPORT "$(get_test_name "http")" "$(get_test_description "http")" "$(cat $HTTP_LOG)" "$(cat $HTTP_RESULT)" +write_out_result $DAQ_REPORT "$(get_test_name "https")" "$(get_test_description "https")" "$(cat $HTTPS_LOG)" "$(cat $HTTPS_RESULT)" +write_out_result $DAQ_REPORT "$(get_test_name "ssh")" "$(get_test_description "ssh")" "$(cat $SSH_LOG)" "$(cat $SSH_RESULT)" +write_out_result $DAQ_REPORT "$(get_test_name "telnet")" "$(get_test_description "telnet")" "$(cat $TELNET_LOG)" "$(cat $TELNET_RESULT)" + +echo "Done!" diff --git a/subset/security/readme.md b/subset/security/readme.md index a5f65cf277..fc4653e5e6 100644 --- a/subset/security/readme.md +++ b/subset/security/readme.md @@ -3,9 +3,9 @@ ## test_tls The TLS test attempts to verify various versions of TLS support. Separate connections will be attempted with different SSL context with the associated TLS support, 1.0, 1.2 and 1.3. - ### Testing procedure - After establishing connections to devices, the test will proceed to validate the available certificates with the following criteria: - 1. The certificat is in the x509 format +### Testing procedure +After establishing connections to devices, the test will proceed to validate the available certificates with the following criteria: + 1. The certificate is in the x509 format 2. The public key length is at least 2048. Currently handles both RSA and DSA public key formats. 3. The certificate is not expired and active for the current date. The cipher suite used is also checked but does not currently affect the outcome of the results. Currently the expected cipher suites are ECDH and ECSA. If these are not present, a warning message will be logged in the activate.log of the switch node. @@ -43,3 +43,54 @@ The functional test code is included in the `tlstest/src/main/java` folder. - pass -> If the device responds to a connection with TLS 1.3 support and provides a valid certificate. - fail -> If the device responds to a connection with TLS 1.3 support and provides an invalid certificate. - skip -> If no connection to the device can be established. + +## test_password +The password test runs a dictionary brute force on protocols HTTP, HTTPS, SSH and Telnet to check if the device has changed login credentials from defaults to a more secure combination. + +### Testing Procedure: +1. Use Nmap tool to check if needed port is open, and whether the target host is down. +2. If target port is open, and target host is not down, then start the brute force. +3. Run the brute force command for ncrack/medusa as appropriate, and collect the output. +4. Depending on the messages read on the command output, the test will return a specific result case. + - PASS: Test was able to run the brute force but not find the username/password(s). + - FAIL: Test was able to run the brute force and find the username/password(s). + - SKIP: Test was not able to run a brute force successfully due to a variety of issues. In this case: + - Target host is down. + - Target protocol is down. + - HTTP server does not have authentication. + - Brute force tool related issues such as disconnect, missing parameters etc. + +### Available Configurations: +The password test can be run from DAQ without specifying any further configurations, but it is possible to tweak these to your needs by modifying the password field in your local copy of module_config.json to have the following, for example: + +Note the examples shown are the available configurations and the default values used by the password test. +``` +# local/module_config.json +{ + "device_info": { + # Both must be specified for these to be used. Should not be the default value/null/empty string. Spaces and colons are automatically removed. + "default_username": "*** (optional) ***", # If specified, only uses this to brute force. + "default_password": "*** (optional) ***" # If specified, only uses this to brute force. + }, + "modules": { + "password": { + "enabled": true, + "dictionary_dir": "resources/default", # Default are resources/default (full), or resources/faux (debug), user can also create their own custom version. + "http_port": 80, # Custom port to use when brute forcing HTTP + "https_port": 443, # Custom port to use when brute forcing HTTPS + "ssh_port": 22, # Custom port to use when brute forcing SSH + "telnet_port": 23 # Custom port to use when brute forcing Telnet + } + } +} +``` + +Ideally one should specify only either "default_username" and "default_password" OR "dictionary_dir" - If both are specified, the default username/passwords will take precedence and dictionary_dir will be used as a backup if they are considered invalid. + +## test_ssh +The SSH test will check that if a device has an SSH server, this only supports SSHv2 + +### Conditions for seucrity.ssh.version +- pass -> If the device runs an SSH server which only supports SSHv2 +- fail -> If the device runs an SSH server which supports SSHv1 +- skip -> If the device does not run an SSH server diff --git a/subset/security/security_passwords/.classpath b/subset/security/security_passwords/.classpath deleted file mode 100755 index 540156aa7c..0000000000 --- a/subset/security/security_passwords/.classpath +++ /dev/null @@ -1,30 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/subset/security/security_passwords/.gitignore b/subset/security/security_passwords/.gitignore deleted file mode 100755 index 10e776c937..0000000000 --- a/subset/security/security_passwords/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -.idea/* -.settings/* -/bin/* -/reports/* diff --git a/subset/security/security_passwords/build.gradle b/subset/security/security_passwords/build.gradle deleted file mode 100755 index 942946cd45..0000000000 --- a/subset/security/security_passwords/build.gradle +++ /dev/null @@ -1,38 +0,0 @@ -buildscript { - repositories { - jcenter() - } - dependencies { - classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0" - } -} -apply plugin: 'com.github.johnrengelman.shadow' -apply plugin: 'idea' -apply plugin: 'java-library' -apply plugin: 'java' -apply plugin: 'application' - -group 'com.redstone' -version '1.0-SNAPSHOT' - -sourceCompatibility = 1.8 - -apply plugin: 'application' -mainClassName = 'TestPassword' - -repositories { - mavenCentral() -} - -dependencies { - compile group: 'com.google.code.gson', name: 'gson', version: '2.7' - testCompile group: 'junit', name: 'junit', version: '4.13' -} - -jar { - manifest { - attributes( - 'TestPassword-Class': 'TestPassword' - ) - } -} \ No newline at end of file diff --git a/subset/security/security_passwords/gradle/wrapper/gradle-wrapper.properties b/subset/security/security_passwords/gradle/wrapper/gradle-wrapper.properties deleted file mode 100755 index b727a41d73..0000000000 --- a/subset/security/security_passwords/gradle/wrapper/gradle-wrapper.properties +++ /dev/null @@ -1,6 +0,0 @@ -#Fri Sep 13 15:17:11 BST 2019 -distributionBase=GRADLE_USER_HOME -distributionPath=wrapper/dists -zipStoreBase=GRADLE_USER_HOME -zipStorePath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip diff --git a/subset/security/security_passwords/settings.gradle b/subset/security/security_passwords/settings.gradle deleted file mode 100755 index d454a26df8..0000000000 --- a/subset/security/security_passwords/settings.gradle +++ /dev/null @@ -1,2 +0,0 @@ -rootProject.name = 'security_passwords' - diff --git a/subset/security/security_passwords/src/main/java/BruteForceTester.java b/subset/security/security_passwords/src/main/java/BruteForceTester.java deleted file mode 100755 index 5ded435909..0000000000 --- a/subset/security/security_passwords/src/main/java/BruteForceTester.java +++ /dev/null @@ -1,71 +0,0 @@ -/* Runs pentesting tools ncrack and hydra to crack the passwords of the device. */ - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; - -public class BruteForceTester { - - private static final String SSH = "ssh"; - private static final String TELNET = "telnet"; - private static final String HYDRA_SUCCESS_MESSAGE = "successfully completed"; - private static final String NCRACK_SUCCESS_MESSAGE = "Discovered credentials"; - private static final String NCRACK_COMMAND = "ncrack %s %s://%s:%s -U %s -P %s"; - private static final String HYDRA_COMMAND = "hydra -L %s -P %s %s %s -s %s"; - - private static String getCommand( - final String domain, - final String protocol, - final String host, - final String port, - final String usersFile, - final String passwordsFile - ) { - if (protocol.equals(SSH) || protocol.equals(TELNET)) { - return String.format(HYDRA_COMMAND, usersFile, passwordsFile, host, protocol, port); - } - else { - return String.format(NCRACK_COMMAND, domain, protocol, host, port, usersFile, passwordsFile); - } - } - - private static BufferedReader runCommandGetReader(final String commandToRun) throws IOException { - final Process process = Runtime.getRuntime().exec(commandToRun); - return new BufferedReader(new InputStreamReader(process.getInputStream())); - } - - private static boolean lineIndicatesCredentialsFound(final String protocol, final String line) { - if (protocol.equals(SSH) || protocol.equals(TELNET)) { - return line.contains(HYDRA_SUCCESS_MESSAGE); - } - else { - return line.contains(NCRACK_SUCCESS_MESSAGE); - } - } - - public static String start( - final String domain, - final String protocol, - final String host, - final String port, - final String usernamesFile, - final String passwordsFile - ) throws IOException { - final String command = getCommand(domain, protocol, host, port, usernamesFile, passwordsFile); - final BufferedReader outputReader = runCommandGetReader(command); - - ReportHandler.printMessage(command); - String result = ReportHandler.PASS; - String currentLine; - - while ((currentLine = outputReader.readLine()) != null) { - ReportHandler.printMessage(currentLine); - if (lineIndicatesCredentialsFound(protocol, currentLine)) { - result = ReportHandler.FAIL; - } - } - - return result; - } - -} diff --git a/subset/security/security_passwords/src/main/java/DefaultCredentials.java b/subset/security/security_passwords/src/main/java/DefaultCredentials.java deleted file mode 100755 index 05fc3dafd1..0000000000 --- a/subset/security/security_passwords/src/main/java/DefaultCredentials.java +++ /dev/null @@ -1,88 +0,0 @@ -/* Reads from defaultPasswords.json to retrieve default username and password data for a mac. */ - -import com.google.gson.Gson; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; - -import java.io.*; - -public class DefaultCredentials { - - public static final String DEFAULT_PASSWORDS_FILE = "/tmp/%s_passwords.txt"; - public static final String DEFAULT_USERNAMES_FILE = "/tmp/%s_usernames.txt"; - - private static final String DEFAULT_PASSWORDS = "/defaultPasswords.json"; - private static final String JSON_USERNAME_ELEMENT = "Usernames"; - private static final String JSON_PASSWORD_ELEMENT = "Passwords"; - private static final String WRITE_ELEMENT_LINE = "%s\n"; - private static final String MAC_SEPARATOR = ":"; - private static final String EMPTY = ""; - private static final int MAC_ADDRESS_START = 0; - private static final int MAC_ADDRESS_END = 6; - - private static String getFormattedMacAddress(final String macAddress) { - final String simpleMacAddress = macAddress.replace(MAC_SEPARATOR, EMPTY); - final String macAddressOUI = simpleMacAddress.substring(MAC_ADDRESS_START, MAC_ADDRESS_END); - return macAddressOUI.toUpperCase(); - } - - private static JsonObject getJsonFileContentsAsObject() { - final Gson gsonController = new Gson(); - final InputStream jsonStream = DefaultCredentials.class.getResourceAsStream(DEFAULT_PASSWORDS); - return gsonController.fromJson(new InputStreamReader(jsonStream), JsonObject.class); - } - - private static void writeArrayToCredentialFiles( - final JsonArray array, - final String filePath - ) throws IOException { - final File file = new File(filePath); - final BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(file)); - - for (final JsonElement element : array) { - bufferedWriter.write(String.format(WRITE_ELEMENT_LINE, element.getAsString())); - } - - bufferedWriter.close(); - } - - public static String getUsernameFilePath(final String protocol) { - return String.format(DEFAULT_USERNAMES_FILE, protocol); - } - - public static String getPasswordFilePath(final String protocol) { - return String.format(DEFAULT_PASSWORDS_FILE, protocol); - } - - public static void writeUsernamesToFile( - final String macAddress, - final String protocol - ) throws IOException { - final String formattedMac = getFormattedMacAddress(macAddress); - final String formattedUsernameFile = getUsernameFilePath(protocol); - final JsonObject fileContents = getJsonFileContentsAsObject(); - final JsonObject credentialsForMac = fileContents.getAsJsonObject(formattedMac); - final JsonArray usernameJsonArray = credentialsForMac.getAsJsonArray(JSON_USERNAME_ELEMENT); - - writeArrayToCredentialFiles(usernameJsonArray, formattedUsernameFile); - } - - public static void writePasswordsToFile( - final String macAddress, - final String protocol - ) throws IOException { - final String formattedMac = getFormattedMacAddress(macAddress); - final String formattedPasswordFile = getPasswordFilePath(protocol); - final JsonObject fileContents = getJsonFileContentsAsObject(); - final JsonObject credentialsForMac = fileContents.getAsJsonObject(formattedMac); - final JsonArray passwordJsonArray = credentialsForMac.getAsJsonArray(JSON_PASSWORD_ELEMENT); - - writeArrayToCredentialFiles(passwordJsonArray, formattedPasswordFile); - } - - public static boolean credentialsFileHasMacAddress(final String macAddress) { - final String formattedMac = getFormattedMacAddress(macAddress); - return getJsonFileContentsAsObject().has(formattedMac); - } -} diff --git a/subset/security/security_passwords/src/main/java/PortChecker.java b/subset/security/security_passwords/src/main/java/PortChecker.java deleted file mode 100755 index ae95a0d4fe..0000000000 --- a/subset/security/security_passwords/src/main/java/PortChecker.java +++ /dev/null @@ -1,62 +0,0 @@ -/* Runs nmap to check if HTTP, HTTPS, Telnet and SSH ports are open. */ - -import java.io.BufferedReader; -import java.io.IOException; -import java.io.InputStreamReader; - -public class PortChecker { - - private static final String NMAP_COMMAND_STRING = "nmap %s"; - private static final String PORT_CHECK_STRING = "%s/tcp"; - private static final String OPEN_CHECK_STRING = "open"; - - private static String getCommand(final String host) { - return String.format(NMAP_COMMAND_STRING, host); - } - - private static BufferedReader runCommandGetReader(final String command) throws IOException { - final Process process = Runtime.getRuntime().exec(command); - return new BufferedReader(new InputStreamReader(process.getInputStream())); - } - - private static boolean checkIfDesiredPortOpen( - final BufferedReader bufferedReader, - final String port, - final String protocol - ) throws IOException { - boolean isPortOpen = false; - - String currentLine; - while ((currentLine = bufferedReader.readLine()) != null) { - ReportHandler.printMessage(currentLine); - if (currentLine.contains(String.format(PORT_CHECK_STRING, port)) && - currentLine.contains(protocol) && - currentLine.contains(OPEN_CHECK_STRING)) { - isPortOpen = true; - } - } - - return isPortOpen; - } - - private static void closeBufferedReader(final BufferedReader bufferedReader) throws IOException { - bufferedReader.close(); - } - - public static boolean checkDesiredPortOpen( - final String hostAddress, - final String port, - final String protocol - ) throws IOException { - final String command = getCommand(hostAddress); - final BufferedReader bufferedReader = runCommandGetReader(command); - final boolean desiredPortIsOpen = checkIfDesiredPortOpen(bufferedReader, port, protocol); - - ReportHandler.printMessage(command); - - closeBufferedReader(bufferedReader); - - return desiredPortIsOpen; - } - -} diff --git a/subset/security/security_passwords/src/main/java/ReportHandler.java b/subset/security/security_passwords/src/main/java/ReportHandler.java deleted file mode 100755 index 9d47a0dbf9..0000000000 --- a/subset/security/security_passwords/src/main/java/ReportHandler.java +++ /dev/null @@ -1,108 +0,0 @@ -/* ReportHandler writes test results for the current protocol, and also does console output. */ - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; - -public class ReportHandler { - - public final static String PASS = "pass"; - public final static String FAIL = "fail"; - public final static String SKIP_NOPORT = "skip_noport"; - public final static String SKIP_NOMAC = "skip_mac"; - public final static String SKIP_NOMAC_NOPORT = "skip_nomac_noport"; - - private final static String PRINT_MESSAGE_STRING = "%s"; - private final static String UNABLE_TO_WRITE_REPORT_MESSAGE = "Unable to write message."; - private final static String REPORT_FILE_PATH = "reports/%s_result.txt"; - - private final static String SKIP_MESSAGE_NOMAC = - "RESULT skip security.passwords.%s Could not lookup password info for mac-key: %s\n"; - private final static String SKIP_MESSAGE_NOPORT = - "RESULT skip security.passwords.%s Port %s is not open on target device.\n"; - private final static String SKIP_MESSAGE_NOMAC_NOPORT = - "RESULT skip security.passwords.%s Port %s is not open, %s not in password file.\n"; - private final static String FAIL_MESSAGE = - "RESULT fail security.passwords.%s Default passwords have not been changed.\n"; - private final static String PASS_MESSAGE = - "RESULT pass security.passwords.%s Default passwords have been changed.\n"; - private final static String NO_MESSAGE = - "RESULT Unable to get message."; - - private static String getReportFilePath(final String protocol) { - return String.format(REPORT_FILE_PATH, protocol); - } - - private static File setupReportFile(final String reportFilePath) { - final File reportFile = new File(reportFilePath); - reportFile.getParentFile().mkdirs(); - return reportFile; - } - - private static BufferedWriter getFileWriter(final File reportFile) throws IOException { - return new BufferedWriter(new FileWriter(reportFile)); - } - - private static String getReportMessage( - final String result, - final String protocol, - final String port, - final String mac - ) { - String reportMessage; - - switch (result) { - case PASS: { - reportMessage = String.format(PASS_MESSAGE, protocol); - break; - } - case FAIL: { - reportMessage = String.format(FAIL_MESSAGE, protocol); - break; - } - case SKIP_NOMAC: { - reportMessage = String.format(SKIP_MESSAGE_NOMAC, protocol, mac); - break; - } - case SKIP_NOPORT: { - reportMessage = String.format(SKIP_MESSAGE_NOPORT, protocol, port); - break; - } - case SKIP_NOMAC_NOPORT: { - reportMessage = String.format(SKIP_MESSAGE_NOMAC_NOPORT, protocol, port, mac); - break; - } - default: { - reportMessage = NO_MESSAGE; - } - } - - return reportMessage; - } - - public static void writeReportMessage( - final String result, - final String protocol, - final String port, - final String mac - ) { - final String reportFilePath = getReportFilePath(protocol); - final File reportFile = setupReportFile(reportFilePath); - final String reportMessage = getReportMessage(result, protocol, port, mac); - - try { - final BufferedWriter reportWriter = getFileWriter(reportFile); - reportWriter.write(reportMessage); - reportWriter.close(); - } catch (final IOException e) { - printMessage(UNABLE_TO_WRITE_REPORT_MESSAGE); - printMessage(e.getMessage()); - } - } - - public static void printMessage(final String message) { - System.out.println(String.format(PRINT_MESSAGE_STRING, message)); - } - -} diff --git a/subset/security/security_passwords/src/main/java/TestPassword.java b/subset/security/security_passwords/src/main/java/TestPassword.java deleted file mode 100755 index 332d1394b6..0000000000 --- a/subset/security/security_passwords/src/main/java/TestPassword.java +++ /dev/null @@ -1,76 +0,0 @@ -/* Main entry point for test. */ - -import java.io.IOException; - -public class TestPassword { - - private static final int REQUIRED_PARAMETERS = 5; - private static final String NMAP_MESSAGE = "Starting NMAP check..."; - private static final String BRUTE_FORCE_MESSAGE = "Starting brute force..."; - private static final String FINISH_MESSAGE = "Done."; - - private static final String HELP_STRING = - "Usage: target_ip protocol(http(s)/ssh/telnet) target_port target_mac domain"; - private static final String STARTUP_MESSAGE = - "[STARTING WITH IP:%s, MAC:%s, PROTOCOL: %s]"; - - private String host; - private String protocol; - private String port; - private String mac; - private String domain; - - public TestPassword(final String[] args) { - if (args.length != REQUIRED_PARAMETERS) { - throw new IllegalArgumentException(HELP_STRING); - } - else { - host = args[0]; - protocol = args[1]; - port = args[2]; - mac = args[3]; - domain = args[4]; - } - } - - public void runPasswordTest() { - try { - ReportHandler.printMessage(String.format(STARTUP_MESSAGE, host, mac, protocol)); - ReportHandler.printMessage(NMAP_MESSAGE); - final boolean desiredPortOpen = PortChecker.checkDesiredPortOpen(host, port, protocol); - final boolean macIsInCredentialsFile = DefaultCredentials.credentialsFileHasMacAddress(mac); - - if (macIsInCredentialsFile && desiredPortOpen) { - DefaultCredentials.writeUsernamesToFile(mac, protocol); - DefaultCredentials.writePasswordsToFile(mac, protocol); - - final String users = DefaultCredentials.getUsernameFilePath(protocol); - final String passwords = DefaultCredentials.getPasswordFilePath(protocol); - - ReportHandler.printMessage(BRUTE_FORCE_MESSAGE); - final String result = BruteForceTester.start(domain, protocol, host, port, users, passwords); - ReportHandler.writeReportMessage(result, protocol, port, mac); - } - else if (!macIsInCredentialsFile && !desiredPortOpen) { - ReportHandler.writeReportMessage(ReportHandler.SKIP_NOMAC_NOPORT, protocol, port, mac); - } - else if (!macIsInCredentialsFile) { - ReportHandler.writeReportMessage(ReportHandler.SKIP_NOMAC, protocol, port, mac); - } - else { - ReportHandler.writeReportMessage(ReportHandler.SKIP_NOPORT, protocol, port, mac); - } - - ReportHandler.printMessage(FINISH_MESSAGE); - } - catch (final IOException e) { - ReportHandler.printMessage(e.getMessage()); - } - } - - public static void main(final String[] args) { - final TestPassword testPassword = new TestPassword(args); - testPassword.runPasswordTest(); - } - -} diff --git a/subset/security/security_passwords/src/main/resources/defaultPasswords.json b/subset/security/security_passwords/src/main/resources/defaultPasswords.json deleted file mode 100755 index 957ca01e69..0000000000 --- a/subset/security/security_passwords/src/main/resources/defaultPasswords.json +++ /dev/null @@ -1,131 +0,0 @@ -{ - "Axis2":{ - "Usernames":["admin","root" - ], - "Passwords":["axis2","1988" - ] - }, - "DB2":{ - "Usernames":["admin","dasusr1","db2admin","db2fenc1","db2inst1"], - "Passwords":[ - "admin","dasusr1","db2admin","db2fenc1","db2inst1","db2pass","db2password","db2pw" - ] - }, - "FTP":{ - "Usernames":["admin","anonymous","ftp","ftp_admi","ftp_inst","ftp_nmc","ftp_oper", - "ftpuser","login","rapport","root","user","xbox" - ], - "Passwords":["1234","access","chrome@example.com","Exabyte","ftp","help1954","IEUser@", - "kilo1987","mozilla@example.com","pass","password","pbxk1064","r@p8p0r","tuxalize","xbox" - ] - }, - "HTTP":{ - "Usernames":["admin","apc","axis2","cisco","connect","manager","newuser","pass","private", - "root","security","sitecom","sys","system","tomcat","user","wampp","xampp","xampp-dav-unsecure" - ], - "Passwords":["1234","admin","apc","cisco","connect","default","letmein","manager","none","pass", - "password","ppmax2011","root","sanfran","security","sitecom","sys","system","tomcat", - "turnkey","user","wampp","xampp" - ] - }, - "MSSQL":{ - "Usernames":["Administrator","ARAdmin","entldbdbo","entldbreader","mon_user","probe", - "repl_publisher","repl_subscriber","sa","WinCCConnect" - ], - "Passwords":["2WSXcder","AR#Admin#","blank","dbopswd","pass","pass1","password", - "rdrpswd" - ] - }, - "MySQL":{ - "Usernames":["admin","mysql","root" - ], - "Passwords":["blank","pass","pass1","password","vicidia1now" - ] - }, - "PostgreSQL":{ - "Usernames":["admin","postgres","scott","tom"], - "Passwords":["admin","password","postgres","tiger" - ] - }, - "SMB":{ - "Usernames":["backuphelpdesk"], - "Passwords":["backuphpinvent" - ] - }, - "SNMP":{ - "Usernames":["" - ], - "Passwords":["0392a0","1234","2read","4changes","access.adm","Admin","admin", - "agent","agent_steal","all","allprivate","allpublic","ANYCOM","apc","bintec", - "blue","c","C0de","cable-d","canon_admin","cc","CISCO","cisco" - ] - }, - "SSH":{ - "Usernames":["admin","administrator","root" - ], - "Passwords":["1234","admin","changeme123","password","password1","password123","password123!", - "toor" - ] - }, - "Telnet":{ - "Usernames":["admin","administrator","Alphanetworks","cisco","helpdesk","pix","root" - ], - "Passwords":["100","admin","changeme123","cisco","password","password1","password123", - "password123!","sanfran","root","wrgg15_di524" - ] - }, - "VNC":{ - "Usernames":["admin","administrator","root" - ], - "Passwords":["100","1234","admin","changeme123","password","password1","password123","password123!","toor"] - }, - "WinRM":{ - "Usernames":["admin","administrator","root" - ], - "Passwords":["1234","admin","changeme123","password","password1","password123", - "password123!","toor" - ] - }, - "EC1127":{ - "Usernames":["cpt-dev","admin","administrator","root" - ], - "Passwords":["hellocpt","admin","root","Admin" - ] - }, - "38D135":{ - "Usernames":["cpt-dev","admin","administrator","root" - ], - "Passwords":["hellocpt,,admin,root,Admin" - ] - }, - "001AEB":{ - "Usernames":["admin","root","manager" - ], - "Passwords":["root","fire","friend" - ] - }, - "40BD32":{ - "Usernames":["root","Admin","admin" - ], - "Passwords":["root","Adminreds#1","admin123","admin" - ] - }, - "0242AC":{ - "Usernames":["root","admin" - ], - "Passwords":["root","admin","default" - ] - }, - "3C5AB4":{ - "Usernames":["root","admin" - ], - "Passwords":["root","admin","default" - ] - }, - "9A0257":{ - "Usernames":["root","admin" - ], - "Passwords":["root","admin","default" - ] - } -} diff --git a/subset/security/ssh_additions.config b/subset/security/ssh_additions.config deleted file mode 100644 index 7e8895f7c9..0000000000 --- a/subset/security/ssh_additions.config +++ /dev/null @@ -1,5 +0,0 @@ -Port 22 -ListenAddress 0.0.0.0 -PermitRootLogin yes -PasswordAuthentication yes -KexAlgorithms diffie-hellman-group1-sha1,diffie-hellman-group-exchange-sha1 diff --git a/subset/security/test_password b/subset/security/test_password deleted file mode 100755 index 3a5a3c6c0d..0000000000 --- a/subset/security/test_password +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash -e - -source reporting.sh - -REPORT=reports/report.txt -TEST_DESC="Verify all default passwords are updated and new Google provided passwords are set." - -make_log_files() { - touch /tmp/http_report.txt - touch /tmp/https_report.txt - touch /tmp/telnet_report.txt - touch /tmp/ssh_report.txt -} - -run_password_test_all_protocols () { - java -jar security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar $1 http 80 $2 nginx-site > /tmp/http_report.txt & - - java -jar security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar $1 https 443 $2 nginx-site > /tmp/https_report.txt & - - java -jar security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar $1 telnet 23 $2 nginx-site > /tmp/telnet_report.txt & - - java -jar security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar $1 ssh 22 $2 nginx-site > /tmp/ssh_report.txt & - - wait -} - -if [ -n "$TARGET_IP" ]; then - run_password_test_all_protocols $TARGET_IP $TARGET_MAC - - make_log_files - - write_out_result $REPORT \ - "security.passwords.http" \ - "$TEST_DESC" \ - "$(cat /tmp/http_report.txt)" \ - "$(cat reports/http_result.txt)" - - write_out_result $REPORT \ - "security.passwords.https" \ - "$TEST_DESC" \ - "$(cat /tmp/https_report.txt)" \ - "$(cat reports/https_result.txt)" - - write_out_result $REPORT \ - "security.passwords.telnet" \ - "$TEST_DESC" \ - "$(cat /tmp/telnet_report.txt)" \ - "$(cat reports/telnet_result.txt)" - - write_out_result $REPORT \ - "security.passwords.ssh" \ - "$TEST_DESC" \ - "$(cat /tmp/ssh_report.txt)" \ - "$(cat reports/ssh_result.txt)" - - cp -r $REPORT /tmp/report.txt -else - echo Problem with target IP, password test cannot continue. | tee /tmp/report.txt -fi diff --git a/subset/security/test_ssh b/subset/security/test_ssh new file mode 100755 index 0000000000..e1af63282c --- /dev/null +++ b/subset/security/test_ssh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# Checks if a device only support SSHv2 +# Runs NMAP to check if SSH is available +# Uses the 'sshv_1' nmap script to check if the server supports SSHv1 + +source reporting.sh + +TEST_NAME="security.ssh.version" +TEST_DESCRIPTION="Check that device only support SSHv2" +REPORT=/tmp/report.txt +LOG=/tmp/nmap_log.txt + +nmap -sV -sC $TARGET_IP > $LOG + +nmap_log=$(cat $LOG ) + +sshv1=$(grep 'sshv1: Server supports SSHv1' $LOG) + +if [[ -z "${sshv1}" ]]; then + #No SSHv1, but is there an SSHv2 server running ? + sshv2=$(grep -P '^\d+\/tcp\s+open ssh.*protocol 2.0\)$' $LOG) + + if [[ -z "${sshv2}" ]]; then + test_outcome="skip" + test_summary="Device is not running an SSH server" + else + test_outcome="pass" + test_summary="Device only supports SSHv2" + fi + +else + test_outcome="fail" + test_summary="Device supports SSHv1" +fi + +result_and_summary="RESULT ${test_outcome} ${TEST_NAME} ${test_summary}" + +write_out_result $REPORT \ + "$TEST_NAME" \ + "$TEST_DESCRIPTION" \ + "$sshv2" \ + "$result_and_summary" + \ No newline at end of file diff --git a/subset/security/tlstest/gradle/wrapper/gradle-wrapper.properties b/subset/security/tlstest/gradle/wrapper/gradle-wrapper.properties index 622ab64a3c..12d38de6a4 100644 --- a/subset/security/tlstest/gradle/wrapper/gradle-wrapper.properties +++ b/subset/security/tlstest/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/subset/switches/Dockerfile.test_switch b/subset/switches/Dockerfile.test_switch index 9acd5cb4d6..64cba8f29d 100644 --- a/subset/switches/Dockerfile.test_switch +++ b/subset/switches/Dockerfile.test_switch @@ -1,13 +1,13 @@ FROM daqf/aardvark:latest # Do this alone first so it can be re-used by other build files. -RUN $AG update && $AG install openjdk-8-jre - -RUN $AG update && $AG install openjdk-8-jdk git +RUN $AG update && $AG install openjdk-11-jdk git RUN $AG update && $AG install maven tcpdump COPY subset/switches/ switches/ +RUN mkdir -p switches/src/main/proto +COPY usi/src/main/proto/* switches/src/main/proto/ COPY subset/switches/module_manifest.json . RUN cd switches && mvn clean compile assembly:single diff --git a/subset/switches/module_manifest.json b/subset/switches/module_manifest.json index 25fe268a28..2794ab49e1 100644 --- a/subset/switches/module_manifest.json +++ b/subset/switches/module_manifest.json @@ -8,13 +8,8 @@ "connection.port_duplex" : { "description" : "Verify the device supports full duplex" }, - "poe.power" : { + "poe.switch.power" : { "description" : "Verify that the device draws less than the maximum power allocated by the port. This is 15.4W for 802.3af and 30W for 802.3at" - }, - "poe.negotiation" : { - "description" : "Verify the device autonegotiates power requirements" - }, - "poe.support" : { - "description" : "Verify if the device supports PoE" } } + diff --git a/subset/switches/pom.xml b/subset/switches/pom.xml index 7c90249145..93ac0b31ee 100644 --- a/subset/switches/pom.xml +++ b/subset/switches/pom.xml @@ -1,15 +1,28 @@ - 4.0.0 - com.redstone - switchtest - 0.0.1 -jar + 4.0.0 + com.redstone + switchtest + 0.0.1 + jar switchtest UTF-8 1.8 1.8 + + + + + io.grpc + grpc-bom + 1.31.0 + pom + import + + + + junit @@ -22,54 +35,112 @@ commons-net 3.6 + + io.grpc + grpc-netty-shaded + 1.31.0 + + + io.grpc + grpc-protobuf + 1.31.0 + + + io.grpc + grpc-stub + 1.31.0 + + + org.apache.tomcat + annotations-api + 6.0.53 + provided + + + org.junit.jupiter + junit-jupiter + 5.6.2 + compile + - - - - - maven-assembly-plugin - - - - switchtest.Main - - - - jar-with-dependencies - - - - - maven-clean-plugin - 3.1.0 - - - - maven-resources-plugin - 3.1.0 - - - maven-compiler-plugin - 3.8.1 - - - maven-surefire-plugin - 2.22.2 - - - maven-jar-plugin - 3.2.0 - - - maven-install-plugin - 2.5.2 - - - maven-deploy-plugin - 2.8.2 - - - + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.6.1 + + com.google.protobuf:protoc:3.12.0:exe:${os.detected.classifier} + grpc-java + io.grpc:protoc-gen-grpc-java:1.30.0:exe:${os.detected.classifier} + + ${basedir}/src/main/proto + + + + + + compile + compile-custom + + + + + + maven-assembly-plugin + + + + switchtest.Main + + + + jar-with-dependencies + + + + + maven-clean-plugin + 3.1.0 + + + + maven-resources-plugin + 3.1.0 + + + maven-surefire-plugin + 2.22.2 + + + maven-jar-plugin + 3.2.0 + + + maven-install-plugin + 2.5.2 + + + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + 9 + 9 + + + \ No newline at end of file diff --git a/subset/switches/readme.md b/subset/switches/readme.md index 547afd0f45..af54891a89 100644 --- a/subset/switches/readme.md +++ b/subset/switches/readme.md @@ -62,12 +62,17 @@ Example of all necessary parameters in the system.conf related to physical switc # Define the model of the switch to use. This parameter is required. switch_setup.model=CISCO_9300 - #Define the username for the switch. This parameter is optional. + # Define the username for the switch. This parameter is optional. switch_setup.username=tester - #Define the password for the switch. This parameter is optional. + # Define the password for the switch. This parameter is optional. switch_setup.password=switch_p@55 + # If you're using a custom docker network bridge or hosting USI somewhere other than the DAQ machine, do the following: + # Define the usi url using your docker0's ip and port 5000, and re-run bin/setup_base if you're upgrading from versions before 1.9.0 + # Make sure docker's ip range doesn't conflict with that of the switch. Default docker ip range is 172.17.0.0/16. Default switch ip range is 192.168.0.0/16 + usi_setup.url=172.17.0.1:5000 + ## Conditions for connection.port_duplex - pass -> If the duplex mode is detected as full - fail -> If the duplex mode is detected but not full or if the duplex mode cannot be detected @@ -80,17 +85,8 @@ Example of all necessary parameters in the system.conf related to physical switc - pass -> If the speed of the port is auto-negotiated and determiend to be higher than 10 MBPS - fail ->If the speed of the port is determined to be <= 10MBPS -## Conditions for poe.negotiation - - pass -> If the PoE is able to be auto-negotiated with the device and PoE is enabled for the device. - - fail ->If the PoE fails to be auto-negotiated with the device and is enabled for the device. This can also fail if associated power data fails to resolve correctly during switch interrogation. - - skip -> If the PoE option is disabled in the device module_config.json or if the switch reports no PoE support. - ## Conditions for poe.power - pass -> If the a PoE device is connected and power has been detected and supplied to the device. - fail -> If the a PoE device is connected and *NO* power has been detected and supplied to the device. Failure also occurs if the switch reports either a faulty PoE state or is denying power to the device. This can also fail if associated power data fails to resolve correctly during switch interrogation. - skip -> If the PoE option is disabled in the device module_config.json or if the switch reports no PoE support. - ## Conditions for poe.support - - pass -> If the port the device is connected to supports PoE and it is enabled on that port. - - fail -> If the a PoE device is connected to does not support PoE or it is disabled on that port. - - skip -> If the PoE option is disabled in the device module_config.json or if the switch reports no PoE support. diff --git a/subset/switches/src/main/java/switchtest/Main.java b/subset/switches/src/main/java/switchtest/Main.java index f074805ad2..f53b24a6b1 100644 --- a/subset/switches/src/main/java/switchtest/Main.java +++ b/subset/switches/src/main/java/switchtest/Main.java @@ -1,50 +1,47 @@ package switchtest; -import switchtest.allied.AlliedTelesisX230; -import switchtest.cisco.Cisco9300; +import grpc.SwitchInfo; +import grpc.SwitchModel; +import io.grpc.ManagedChannel; +import io.grpc.ManagedChannelBuilder; public class Main { - public static void main(String[] args) throws Exception { + /** + * Switch test runner. + * @param args args + */ + public static void main(String[] args) { - if (args.length < 4) { + if (args.length < 6) { throw new IllegalArgumentException( - "Expected ipAddress && port && supportPOE && switchModel as arguments"); + "args: usiUrl rpcTimeoutSec switchIpAddress port supportPOE switchModel" + + " [username] [password]"); } - String ipAddress = args[0]; - - int interfacePort = Integer.parseInt(args[1]); - - boolean supportsPOE = args[2].equals("true"); - - SupportedSwitchModelsEnum switchModel = null; - try { - switchModel = SupportedSwitchModelsEnum.valueOf(args[3]); - } catch (Exception e) { - System.out.println("Unknown Switch Model: " + args[3]); - throw e; + String usiUrl = args[0]; + int rpcTimeoutSec = Integer.parseInt(args[1]); + String ipAddress = args[2]; + + int interfacePort = Integer.parseInt(args[3]); + boolean supportsPoe = args[4].equals("true"); + SwitchModel switchModel = SwitchModel.valueOf(args[5]); + String username = ""; + String password = ""; + if (args.length > 6) { + username = args[6]; } - - String user = null; - if (args.length >= 5) { - user = args[4]; - } - String password = null; - if (args.length >= 6) { - password = args[5]; - } - - SwitchInterrogator switchInterrogator = null; - switch (switchModel) { - case CISCO_9300: - switchInterrogator = new Cisco9300(ipAddress, interfacePort, supportsPOE, user, password); - break; - case ALLIED_TELESIS_X230: - switchInterrogator = - new AlliedTelesisX230(ipAddress, interfacePort, supportsPOE, user, password); + if (args.length > 7) { + password = args[7]; } - Thread switchInterrogatorThread = new Thread(switchInterrogator); - switchInterrogatorThread.start(); + SwitchInfo switchInfo = SwitchInfo.newBuilder() + .setDevicePort(interfacePort) + .setIpAddr(ipAddress) + .setModel(switchModel) + .setUsername(username) + .setPassword(password).build(); + ManagedChannel channel = ManagedChannelBuilder.forTarget(usiUrl).usePlaintext().build(); + SwitchTest switchTest = new SwitchTest(channel, rpcTimeoutSec, supportsPoe, false); + switchTest.test(switchInfo); } } diff --git a/subset/switches/src/main/java/switchtest/SupportedSwitchModelsEnum.java b/subset/switches/src/main/java/switchtest/SupportedSwitchModelsEnum.java deleted file mode 100644 index fb074b2d8c..0000000000 --- a/subset/switches/src/main/java/switchtest/SupportedSwitchModelsEnum.java +++ /dev/null @@ -1,6 +0,0 @@ -package switchtest; - -public enum SupportedSwitchModelsEnum { - ALLIED_TELESIS_X230, - CISCO_9300 -} diff --git a/subset/switches/src/main/java/switchtest/SwitchInterrogator.java b/subset/switches/src/main/java/switchtest/SwitchInterrogator.java deleted file mode 100644 index 9114767afa..0000000000 --- a/subset/switches/src/main/java/switchtest/SwitchInterrogator.java +++ /dev/null @@ -1,172 +0,0 @@ -package switchtest; - -/* - * Licensed to the Google under one or more contributor license agreements. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import java.io.BufferedWriter; -import java.io.File; -import java.io.FileWriter; -import java.io.IOException; -import java.util.HashMap; - -public abstract class SwitchInterrogator implements Runnable { - - // Define Common Variables Required for All Switch Interrogators - protected SwitchTelnetClientSocket telnetClientSocket; - protected Thread telnetClientSocketThread; - - protected String remoteIpAddress; - protected int interfacePort = 12; - protected boolean deviceConfigPoeEnabled = false; - protected int remotePort = 23; - - protected boolean switchSupportsPoe = false; - private boolean userAuthorised = false; - private boolean userEnabled = false; - - private String device_hostname = ""; - protected String reportFilename = "tmp/report.txt"; - protected String login_report = ""; - - protected boolean debug = true; - - // TODO: enabled the user to input their own username and password - protected String username = "admin"; - protected String password = "password"; - - public SwitchInterrogator( - String remoteIpAddress, int interfacePort, boolean deviceConfigPoeEnabled) { - this.remoteIpAddress = remoteIpAddress; - this.interfacePort = interfacePort; - this.deviceConfigPoeEnabled = deviceConfigPoeEnabled; - // Load all the switch specific variables - this.command = commands(); - this.commandToggle = commandToggle(); - this.expected = expected(); - this.interface_expected = interfaceExpected(); - this.login_expected = loginExpected(); - this.platform_expected = platformExpected(); - this.power_expected = powerExpected(); - this.show_interface_expected = showInterfaceExpected(); - this.show_interface_port_expected = showInterfacePortExpected(); - this.show_platform_expected = showPlatformExpected(); - this.show_platform_port_expected = showPlatformPortExpected(); - this.show_power_expected = showPowerExpected(); - this.stack_expected = stackExpected(); - this.show_stack_expected = showStackExpected(); - } - - protected String[] command; - protected String[] commandToggle; - protected String[] expected; - protected String[] interface_expected; - public String[] login_expected; - protected String[] platform_expected; - protected String[] power_expected; - protected String[] show_platform_expected; - protected String[] show_power_expected; - protected String[] show_interface_expected; - protected String[] show_interface_port_expected; - protected String[] show_platform_port_expected; - protected String[] stack_expected; - protected String[] show_stack_expected; - - protected abstract String[] commands(); - - protected abstract String[] commandToggle(); - - protected abstract String[] expected(); - - protected abstract String[] interfaceExpected(); - - protected abstract String[] loginExpected(); - - protected abstract String[] platformExpected(); - - protected abstract String[] powerExpected(); - - protected abstract String[] showInterfaceExpected(); - - protected abstract String[] showInterfacePortExpected(); - - protected abstract String[] showPlatformExpected(); - - protected abstract String[] showPlatformPortExpected(); - - protected abstract String[] showPowerExpected(); - - protected abstract String[] stackExpected(); - - protected abstract String[] showStackExpected(); - - protected HashMap interface_map = new HashMap(); - - protected HashMap platform_map = new HashMap(); - - protected HashMap power_map = new HashMap(); - - protected HashMap stack_map = new HashMap(); - - public void setHostname(String device_hostname) { - this.device_hostname = device_hostname; - } - - public String getHostname() { - return device_hostname; - } - - public boolean getUserAuthorised() { - return userAuthorised; - } - - public void setUserAuthorised(boolean userAuthorised) { - this.userAuthorised = userAuthorised; - } - - public boolean getUserEnabled() { - return userEnabled; - } - - public void setUserEnabled(boolean userEnabled) { - this.userEnabled = userEnabled; - } - - public abstract void receiveData(String data); - - @Override - public void run() { - telnetClientSocketThread = new Thread(telnetClientSocket); - telnetClientSocketThread.start(); - } - - protected void writeReport() { - try { - if (debug) { - System.out.println("login_report:" + login_report); - } - - String[] directory = reportFilename.split("/"); - - File dir = new File(directory[directory.length - 2]); - if (!dir.exists()) dir.mkdirs(); - - BufferedWriter writer = new BufferedWriter(new FileWriter(reportFilename)); - writer.write(login_report); - writer.close(); - } catch (IOException e) { - System.err.println("Exception writeReport:" + e.getMessage()); - } - } -} diff --git a/subset/switches/src/main/java/switchtest/SwitchTest.java b/subset/switches/src/main/java/switchtest/SwitchTest.java new file mode 100644 index 0000000000..991027026f --- /dev/null +++ b/subset/switches/src/main/java/switchtest/SwitchTest.java @@ -0,0 +1,155 @@ +package switchtest; + +import grpc.InterfaceResponse; +import grpc.LinkStatus; +import grpc.POENegotiation; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import grpc.SwitchInfo; +import grpc.USIServiceGrpc; +import io.grpc.Channel; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.TimeUnit; + +public class SwitchTest { + + enum Result { + PASS, + FAIL, + SKIP + } + + private final USIServiceGrpc.USIServiceBlockingStub blockingStub; + protected String reportFilename = "tmp/report.txt"; + protected boolean debug; + protected boolean deviceConfigPoeEnabled; + protected int rpcTimeoutSec; + protected List results = new ArrayList<>(); + /** + * Generic switch test. + * @param channel GRPC channel + * @param rpcTimeoutSec Timeout in seconds for rpc calls + * @param deviceConfigPoeEnabled poe config from module_config + * @param debug print debug output + */ + + public SwitchTest(Channel channel, int rpcTimeoutSec, boolean deviceConfigPoeEnabled, + boolean debug) { + this.debug = debug; + blockingStub = USIServiceGrpc.newBlockingStub(channel); + this.deviceConfigPoeEnabled = deviceConfigPoeEnabled; + this.rpcTimeoutSec = rpcTimeoutSec; + } + + protected void captureResult(String test, Result result, String additional) { + results.add("RESULT " + result.name().toLowerCase() + " " + test + " " + additional); + } + + protected void testLink(InterfaceResponse interfaceResponse) { + final String testName = "connection.port_link"; + if (interfaceResponse.getLinkStatus() == LinkStatus.State.UP) { + captureResult(testName, Result.PASS, "Link is up"); + } else { + captureResult(testName, Result.FAIL, "Link is down"); + } + } + + protected void testSpeed(InterfaceResponse interfaceResponse) { + final String testName = "connection.port_speed"; + int linkSpeed = interfaceResponse.getLinkSpeed(); + if (linkSpeed > 0) { + if (linkSpeed >= 10) { + captureResult(testName, Result.PASS, + "Speed auto-negotiated successfully. Speed is greater than 10 MBPS"); + } else { + captureResult(testName, Result.FAIL, + "Speed is too slow. Speed is less than or equal to 10 mbps"); + } + } else { + captureResult(testName, Result.FAIL, "Cannot detect current speed"); + } + } + + protected void testDuplex(InterfaceResponse interfaceResponse) { + final String testName = "connection.port_duplex"; + String duplex = interfaceResponse.getDuplex(); + if (duplex != null) { + if (duplex.equals("full")) { + captureResult(testName, Result.PASS, "Full duplex mode detected"); + } else { + captureResult(testName, Result.FAIL, "Incorrect duplex mode set"); + } + } else { + captureResult(testName, Result.FAIL, " Cannot detect duplex mode"); + } + } + + protected void testPower(PowerResponse powerResponse) { + if (!deviceConfigPoeEnabled) { + captureResult("poe.switch.power", Result.SKIP, "This test is disabled"); + return; + } + + POEStatus.State poeStatus = powerResponse.getPoeStatus(); + // Determine PoE power test result + if (poeStatus == POEStatus.State.ON) { + if (powerResponse.getMaxPowerConsumption() >= powerResponse.getCurrentPowerConsumption()) { + captureResult("poe.switch.power", Result.PASS, "PoE is applied to device"); + } else { + captureResult("poe.switch.power", Result.FAIL, "device wattage exceeds the max wattage"); + } + } else if (poeStatus == POEStatus.State.OFF) { + captureResult("poe.switch.power", Result.FAIL, "No PoE is applied"); + } else if (poeStatus == POEStatus.State.FAULT) { + captureResult("poe.switch.power", Result.FAIL, + "Device detection or a powered device is in a faulty state"); + } else { + captureResult("poe.switch.power", Result.FAIL, "A powered device is detected, " + + "but no PoE is available, or the maximum wattage exceeds the " + + "detected powered-device maximum."); + } + } + + protected void writeReport() { + try { + String report = String.join("\n", results); + if (debug) { + System.out.println("report:" + report); + } + + String[] directory = reportFilename.split("/"); + + File dir = new File(directory[directory.length - 2]); + dir.mkdirs(); + + BufferedWriter writer = new BufferedWriter(new FileWriter(reportFilename)); + writer.write(report); + writer.close(); + } catch (IOException e) { + System.err.println("Exception when writing report:" + e.getMessage()); + } + } + + /** + * Run a switch test with the specified switch info. + * @param switchInfo SwitchInfo from the USI proto file + */ + public void test(SwitchInfo switchInfo) { + final PowerResponse powerResponse = blockingStub + .withDeadlineAfter(rpcTimeoutSec, TimeUnit.SECONDS).getPower(switchInfo); + final InterfaceResponse interfaceResponse = blockingStub + .withDeadlineAfter(rpcTimeoutSec, TimeUnit.SECONDS).getInterface(switchInfo); + testLink(interfaceResponse); + testSpeed(interfaceResponse); + testDuplex(interfaceResponse); + testPower(powerResponse); + writeReport(); + } +} + diff --git a/subset/switches/src/main/java/switchtest/allied/AlliedSwitchTelnetClientSocket.java b/subset/switches/src/main/java/switchtest/allied/AlliedSwitchTelnetClientSocket.java deleted file mode 100644 index 79e3cfa609..0000000000 --- a/subset/switches/src/main/java/switchtest/allied/AlliedSwitchTelnetClientSocket.java +++ /dev/null @@ -1,189 +0,0 @@ -package switchtest.allied; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import switchtest.SwitchInterrogator; -import switchtest.SwitchTelnetClientSocket; - -public class AlliedSwitchTelnetClientSocket extends SwitchTelnetClientSocket { - public AlliedSwitchTelnetClientSocket( - String remoteIpAddress, int remotePort, SwitchInterrogator interrogator, boolean debug) { - super(remoteIpAddress, remotePort, interrogator, debug); - } - - protected void gatherData() { - StringBuilder rxData = new StringBuilder(); - String rxGathered = ""; - - boolean parseFlag = false; - - int count = 0; - int flush = 0; - int rxQueueCount = 0; - int rxTempCount = 0; - int expectedLength = 1000; - int requestFlag = 0; - - while (telnetClient.isConnected()) { - try { - - if (rxQueue.isEmpty()) { - Thread.sleep(100); - rxQueueCount++; - if (debug) { - System.out.println("rxQueue.isEmpty:" + rxQueueCount); - System.out.println("expectedLength:" + expectedLength); - System.out.println("requestFlag:" + requestFlag); - } - if (rxQueueCount > 70) { - rxQueueCount = 0; - writeData("\n"); - } - } else { - rxQueueCount = 0; - String rxTemp = rxQueue.poll(); - if (rxTemp.equals("")) { - Thread.sleep(100); - rxTempCount++; - if (debug) { - System.out.println("rxTemp.equals:" + rxTempCount); - } - } else if (rxTemp.indexOf("--More--") > 0) { - Thread.sleep(20); - writeData("\n"); - - if (debug) { - System.out.println("more position:" + rxTemp.indexOf("--More--")); - System.out.println("rxTemp.length" + rxTemp.length() + "rxTemp pre:" + rxTemp); - // Useful for debugging - // char[] tempChar = rxTemp.toCharArray(); - // for(char temp:tempChar) { - // System.out.println("tempChar:"+(byte)temp); - // } - } - - rxTemp = rxTemp.substring(0, rxTemp.length() - 9); - - if (debug) { - System.out.println("rxTemp.length" + rxTemp.length() + "rxTemp post:" + rxTemp); - } - - rxData.append(rxTemp); - } else { - rxQueueCount = 0; - rxTempCount = 0; - rxData.append(rxTemp); - rxGathered = rxData.toString(); - System.out.println( - java.time.LocalTime.now() - + "rxDataLen:" - + rxGathered.length() - + "rxData:" - + rxGathered); - - int position = -1; - - int charLength = 1; - int beginPosition = 0; - System.out.println("count is:" + count); - - String[] loginExpected = interrogator.login_expected; - int[] loginExpectedLength = {5, 5, 40}; - - String hostname = interrogator.getHostname(); - requestFlag = ((AlliedTelesisX230) interrogator).getRequestFlag() - 1; - - boolean[] requestFlagIndexOf = {false, false, false, true, false}; - String[] requestFlagExpected = {hostname, hostname, hostname, "end", hostname}; - int[] requestFlagExpectedLength = {600, 600, 50, 1000, 290}; - int[] requestFlagCharLength = { - hostname.length() + 1, hostname.length() + 1, hostname.length() + 1, 3, -1 - }; - int[] requestFlagFlush = {0, 0, 0, 15, 0}; - - // login & enable process - if (count < 3) { - position = rxGathered.indexOf(loginExpected[count]); - if (position >= 0) { - expectedLength = loginExpectedLength[count]; - if (count == 2) { - interrogator.setUserAuthorised(true); - } - count++; - } - } else if ((count % 2) != 0) { - position = rxGathered.indexOf(interrogator.getHostname()); - if (position >= 0) { - interrogator.setUserEnabled(true); - expectedLength = 2; - charLength = interrogator.getHostname().length() + 1; - flush = 0; - beginPosition = 0; - count++; - } - } else { - position = - findPosition( - rxGathered, - requestFlagExpected[requestFlag], - requestFlagIndexOf[requestFlag]); - if (position >= 0) { - expectedLength = requestFlagExpectedLength[requestFlag]; - charLength = requestFlagCharLength[requestFlag]; - flush = requestFlagFlush[requestFlag]; - if (rxGathered.length() >= expectedLength) { - beginPosition = 4; - count++; - } - } - } - System.out.println("Position: " + position); - System.out.println("RxGathered: " + rxGathered.length()); - System.out.println("Expected Length: " + expectedLength); - System.out.println("rxGathered: " + rxGathered); - - if (position >= 0 && rxGathered.length() >= expectedLength) { - rxGathered = rxGathered.substring(beginPosition, position + charLength); - System.out.println( - java.time.LocalTime.now() - + "rxGatheredLen:" - + rxGathered.length() - + "rxGathered:" - + rxGathered); - rxData.delete(0, position + charLength + flush); - - interrogator.receiveData(rxGathered); - } - } - } - } catch (InterruptedException e) { - System.err.println("InterruptedException gatherData:" + e.getMessage()); - } - } - } - - protected int findPosition(String rxGathered, String value, boolean indexOf) { - int position = -1; - if (indexOf) { - position = rxGathered.indexOf(value); - } else { - position = rxGathered.lastIndexOf(value); - } - return position; - } -} diff --git a/subset/switches/src/main/java/switchtest/allied/AlliedTelesisX230.java b/subset/switches/src/main/java/switchtest/allied/AlliedTelesisX230.java deleted file mode 100644 index 306eb8ba64..0000000000 --- a/subset/switches/src/main/java/switchtest/allied/AlliedTelesisX230.java +++ /dev/null @@ -1,676 +0,0 @@ -package switchtest.allied; - -/* - * Licensed to the Google under one or more contributor license agreements. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import switchtest.SwitchInterrogator; - -import java.util.HashMap; - -public class AlliedTelesisX230 extends SwitchInterrogator { - - protected int interfacePos = 1; - protected int platformPos = 2; - protected int powerinlinePos = 3; - - protected int shortPacketLength = 20; - protected int requestFlag = 0; - protected int number_switch_ports = 1; // 48 - protected boolean extendedTests = false; - - protected int[] show_platform_pointers; - protected String[] show_platform_data; - protected int[] show_platform_port_pointers; - protected int[] show_interface_pointers; - protected String[] show_interface_data; - protected int[] show_interface_port_pointers; - protected int[] show_power_pointers; - protected String[] show_power_data; - protected int[] show_stack_pointers; - protected String[] show_stack_data; - - public AlliedTelesisX230( - String remoteIpAddress, - int interfacePort, - boolean deviceConfigPoeEnabled, - String user, - String password) { - super(remoteIpAddress, interfacePort, deviceConfigPoeEnabled); - telnetClientSocket = - new AlliedSwitchTelnetClientSocket(remoteIpAddress, remotePort, this, debug); - this.username = user == null ? "manager" : user; - this.password = password == null ? "friend" : password; - // Adjust commands to active switch configuration - command[interfacePos] = command[interfacePos] + interfacePort; - command[platformPos] = command[platformPos] + interfacePort; - command[powerinlinePos] = command[powerinlinePos] + interfacePort; - - // Initialize data arrays based on switch specific command sets - show_interface_pointers = new int[show_interface_expected.length]; - show_interface_data = new String[show_interface_expected.length / 2]; - show_interface_port_pointers = new int[show_interface_port_expected.length]; - - show_platform_pointers = new int[show_platform_expected.length]; - show_platform_data = new String[show_platform_expected.length / 2]; - show_platform_port_pointers = new int[show_platform_port_expected.length]; - - show_power_pointers = new int[show_power_expected.length]; - show_power_data = new String[show_power_expected.length - 1]; - - show_stack_pointers = new int[show_stack_expected.length]; - show_stack_data = new String[show_stack_expected.length - 1]; - } - - public int getRequestFlag() { - return requestFlag - 1; - } - - public void receiveData(String data) { - if (debug) { - System.out.println( - java.time.LocalTime.now() + "receiveDataLen:" + data.length() + "receiveData:" + data); - } - if (data != null) { - Thread parseThread = new Thread(() -> parseData(data)); - parseThread.start(); - } - } - - private String cleanShowRunData(String data) { - data = data.replace("\n\n\n\n", "\n"); - data = data.replace("\n\n\n", "\n"); - data = data.replace("\n\n", "\n"); - data = data.replace("end", ""); - return data; - } - - private String poeNormalizeData(String data) { - data = trashLagLines(data, data.indexOf("Interface"), 0); - data = trashLagLines(data, data.indexOf("port"), 1); - return data; - } - - protected void parseData(String data) { - try { - if (data.length() > 0) { - - if (!getUserAuthorised()) { - data = data.substring(0, data.indexOf(":") + 1); - System.out.println("decoded_data:" + data); - - // login procedure - if (data.indexOf(expected[0]) >= 0) { - // username request - String[] data_array = data.split(" "); - setHostname(data_array[0]); - telnetClientSocket.writeData(username + "\n"); - } else if (data.indexOf(expected[1]) >= 0) { - // password request - telnetClientSocket.writeData(password + "\n"); - } - } else { - if (!getUserEnabled()) { - data = data.substring(0, data.indexOf(":") + 1); - if (data.indexOf(expected[2]) >= 0) { - // login success - telnetClientSocket.writeData(command[requestFlag] + "\n"); - requestFlag = 1; - } - } else { - // running configuration requests - if (data.indexOf(getHostname()) >= 0 && data.length() < shortPacketLength) { - int requestFinish = powerinlinePos; - if (extendedTests) { - requestFinish = command.length; - } - if (requestFlag <= requestFinish) { - telnetClientSocket.writeData(command[requestFlag] + "\n"); - System.out.println( - "command:" + command[requestFlag] + " request_flag:" + requestFlag); - requestFlag += 1; - } else { - System.out.println("finished running configuration requests"); - validateTests(); - telnetClientSocket.disposeConnection(); - } - } else { - parseRequestFlag(data, requestFlag); - } - } - } - } - } catch (Exception e) { - System.err.println("Exception parseData:" + e.getMessage()); - } - } - - private void parseRequestFlag(String data, int requestFlag) { - try { - switch (requestFlag) { - case 2: - // parse show interface - login_report += "show interface:\n"; - parse_packet(data, show_interface_port_expected, show_interface_port_pointers); - interface_map = dataToMap(interface_expected, show_interface_data); - writeReport(); - telnetClientSocket.writeData("\n"); - break; - case 3: - // parse show platform - login_report += "\nshow platform:\n"; - parse_packet(data, show_platform_port_expected, show_platform_port_pointers); - platform_map = dataToMap(platform_expected, show_platform_data); - writeReport(); - telnetClientSocket.writeData("\n"); - break; - case 4: - // parse show power-inline - login_report += "\nshow power-inline:\n"; - if (data.contains("Power-inline is disabled")) { - login_report += "Power-inline is disabled\n"; - } else { - switchSupportsPoe = true; - data = poeNormalizeData(data); - parse_inline(data, show_power_expected, show_power_pointers, show_power_data); - power_map = dataToMap(power_expected, show_power_data); - } - writeReport(); - telnetClientSocket.writeData("\n"); - break; - case 5: - // parse show run - data = cleanShowRunData(data); - login_report += "\n" + data; - writeReport(); - telnetClientSocket.writeData("\n"); - break; - case 6: - // parse show stack - login_report += "\nshow stack:\n"; - parse_inline(data, show_stack_expected, show_stack_pointers, show_stack_data); - stack_map = dataToMap(stack_expected, show_stack_data); - writeReport(); - telnetClientSocket.writeData("\n"); - break; - } - } catch (Exception e) { - System.err.println("Exception parseRequestFlag:" + e.getMessage()); - System.exit(1); - } - } - - private void parse_packet(String raw_data, String[] show_expected, int[] show_pointers) { - try { - int start = 0; - for (int port = 0; port < number_switch_ports; port++) { - for (int x = 0; x < show_expected.length; x++) { - start = recursive_data(raw_data, show_expected[x], start); - show_pointers[x] = start; - } - - int chunk_s = show_pointers[0]; - int chunk_e = show_pointers[3]; - - if (chunk_e == -1) chunk_e = raw_data.length(); - - String temp_data = raw_data.substring(chunk_s, chunk_e); - - if (debug) System.out.println("length" + temp_data.length() + "temp_data:" + temp_data); - - if (requestFlag == (interfacePos + 1)) { - parse_single( - temp_data, show_interface_expected, show_interface_pointers, show_interface_data); - } else if (requestFlag == (platformPos + 1)) { - parse_single( - temp_data, show_platform_expected, show_platform_pointers, show_platform_data); - } - } - } catch (Exception e) { - System.err.println("Exception parse_packet:" + e.getMessage()); - System.exit(1); - } - } - - private void parse_single( - String raw_data, String[] expected_array, int[] pointers_array, String[] data_array) { - try { - int start = 0; - for (int x = 0; x < expected_array.length; x++) { - start = recursive_data(raw_data, expected_array[x], start); - if (start == -1) { - start = pointers_array[x - 1]; - pointers_array[x] = -1; - } else { - pointers_array[x] = start; - } - } - - for (int x = 0; x < data_array.length; x++) { - int chunk_start = x * 2; - int chunk_end = 1 + (x * 2); - - int chunk_s = pointers_array[chunk_start]; - int chunk_e = pointers_array[chunk_end]; - - if (chunk_s > 0) { - if (chunk_e == -1) chunk_e = raw_data.length(); - - data_array[x] = raw_data.substring(chunk_s, chunk_e); - - data_array[x] = data_array[x].substring(expected_array[chunk_start].length()); - - String extracted_data = expected_array[chunk_start] + data_array[x]; - - if (debug) System.out.println(extracted_data); - - login_report += extracted_data + "\n"; - } - } - } catch (Exception e) { - System.err.println("Exception parse_single:" + e.getMessage()); - System.exit(1); - } - } - - protected void parse_inline( - String raw_data, String[] expected, int[] pointers, String[] data_array) { - try { - int start = 0; - - for (int x = 0; x < expected.length; x++) { - start = recursive_data(raw_data, expected[x], start); - pointers[x] = start; - if (x > 0) { - pointers[x - 1] = pointers[x] - pointers[x - 1]; - } - } - int chunk_start = pointers[pointers.length - 1]; - int chunk_end = pointers[pointers.length - 1]; - - for (int x = 0; x < data_array.length; x++) { - chunk_start = chunk_end; - - if (x > 0) { - chunk_start += 1; - } - - chunk_end += pointers[x]; - - if (x != data_array.length - 1) { - data_array[x] = - raw_data.substring(chunk_start, chunk_end).replace("\n", "").replace(" ", ""); - } else { - chunk_end = recursive_data(raw_data, "\n", chunk_start); - data_array[x] = raw_data.substring(chunk_start, chunk_end).replace("\n", ""); - } - login_report += expected[x] + ":" + data_array[x] + "\n"; - } - } catch (Exception e) { - System.err.println("Exception parse_inline:" + e.getMessage()); - System.exit(1); - } - } - - private int recursive_data(String data, String search, int start) { - int pointer = data.indexOf(search, start); - if (debug) { - System.out.println("pointer:" + pointer); - } - return pointer; - } - - protected HashMap dataToMap(String[] expected_key, String[] data_array) { - System.out.println("Data To mape"); - HashMap hashMap = new HashMap(); - for (int i = 0; i < expected_key.length; i++) { - System.out.println("expectedKey: " + expected_key[i]); - hashMap.put(expected_key[i], data_array[i]); - } - return hashMap; - } - - protected String trashLagLines(String data, int index, int lineIndex) { - byte[] dataBytes = data.getBytes(); - int counter = 0; - for (int i = 0; i < index; i++) { - if (dataBytes[i] == '\n') { - counter++; - } - } - if (counter > 0) { - counter -= lineIndex; - } - for (int i = counter; i > 0; i--) { - data = trash_line(data, lineIndex); - } - return data; - } - - private String trash_line(String data, int trash_line) { - String[] lineArray = data.split("\n"); - String tempData = ""; - for (int i = 0; i < lineArray.length; i++) { - if (i != trash_line) { - tempData += lineArray[i] + "\n"; - } - } - return tempData; - } - - private void validateTests() { - try { - login_report += "\n"; - - if (interface_map.get("link_status").equals("UP")) { - login_report += "RESULT pass connection.port_link Link is up\n"; - } else { - login_report += "RESULT fail connection.port_link Link is down\n"; - } - - if (interface_map.get("current_speed") != null) { - if (interface_map.get("configured_speed").equals("auto") - && Integer.parseInt(interface_map.get("current_speed")) >= 10) { - login_report += - "RESULT pass connection.port_speed Speed auto-negotiated successfully. Speed is greater than 10 MBPS\n"; - } else { - login_report += "RESULT fail connection.port_speed Speed is too slow\n"; - } - } else { - login_report += "RESULT fail connection.port_speed Cannot detect current speed\n"; - } - - if (interface_map.get("current_duplex") != null) { - if (interface_map.get("configured_duplex").equals("auto") - && interface_map.get("current_duplex").equals("full")) { - login_report += "RESULT pass connection.port_duplex Full duplex mode detected\n"; - } else { - login_report += "RESULT fail connection.port_duplex Incorrect duplex mode set\n"; - } - } else { - login_report += "RESULT fail connection.port_duplex Cannot detect duplex mode\n"; - } - - if (switchSupportsPoe && deviceConfigPoeEnabled) { - String current_max_power = power_map.get("max").replaceAll("\\D+", ""); - String current_power = power_map.get("power").replaceAll("\\D+", ""); - String current_PoE_admin = power_map.get("admin"); - String current_oper = power_map.get("oper"); - - System.out.println( - "current_max_power:" - + current_max_power - + "current_power:" - + current_power - + "current_PoE_admin:" - + current_PoE_admin - + "current_oper:" - + current_oper); - - if (current_max_power.length() > 0 - && current_power.length() > 0 - && current_PoE_admin.length() > 0 - && current_oper.length() > 0) { - if (Integer.parseInt(current_max_power) > Integer.parseInt(current_power) - && !current_oper.equals("Fault")) { - login_report += "RESULT pass poe.power PoE is applied to device\n"; - } else { - login_report += - "RESULT fail poe.power The DUT is drawing too much current or there is a fault on the line\n"; - } - if (current_PoE_admin.equals("Enabled")) { - login_report += "RESULT pass poe.negotiation PoE auto-negotiated successfully\n"; - } else { - login_report += "RESULT fail poe.negotiation Incorrect privilege for negotiation\n"; - } - if (!current_oper.equals("Off")) { - login_report += "RESULT pass poe.support PoE supported and enabled\n"; - } else { - login_report += - "RESULT fail poe.support The AT switch does not support PoE or it is disabled\n"; - } - } else { - login_report += "RESULT fail poe.power Could not detect any current being drawn\n"; - login_report += "RESULT fail poe.negotiation Could not detect any current being drawn\n"; - login_report += "RESULT fail poe.support Could not detect any current being drawn\n"; - } - } else { - login_report += - "RESULT skip poe.power The AT switch does not support PoE or this test is disabled\n"; - login_report += - "RESULT skip poe.negotiation The AT switch does not support PoE or this test is disabled\n"; - login_report += - "RESULT skip poe.support The AT switch does not support PoE or this test is disabled\n"; - } - - writeReport(); - } catch (Exception e) { - System.err.println("Exception validateTests:" + e.getMessage()); - e.printStackTrace(); - } - } - - public String[] commands() { - return new String[] { - "enable", - "show interface port1.0.", - "show platform port port1.0.", - "show power-inline interface port1.0.", - "show run", - "show stack" - }; - } - - public String[] commandToggle() { - return new String[] {"interface ethernet port1.0.", "shutdown", "no shutdown"}; - } - - public String[] expected() { - return new String[] { - "login:", - "Password:", - "Last login:", - "#", - "Login incorrect", - "Connection closed by foreign host." - }; - } - - public String[] interfaceExpected() { - return new String[] { - "port_number", - "link_status", - "administrative_state", - "current_duplex", - "current_speed", - "current_polarity", - "configured_duplex", - "configured_speed", - "configured_polarity", - "left_chevron", - "input_packets", - "bytes", - "dropped", - "multicast_packets", - "output_packets", - "multicast_packets2", - "broadcast_packets", - "input_average_rate", - "output_average_rate", - "input_peak_rate", - "time_since_last_state_change" - }; - } - - public String[] loginExpected() { - return new String[] {":", ":", ">"}; - } - - public String[] platformExpected() { - return new String[] { - "port_number", - "enabled", - "loopback", - "link", - "speed", - "max_speed", - "duplex", - "linkscan", - "autonegotiate", - "master", - "tx_pause", - "rx_pause", - "untagged_vlan", - "vlan_filter", - "stp_state", - "learn", - "discard", - "jam", - "max_frame_size", - "mc_disable_sa", - "mc_disable_ttl", - "mc_egress_untag", - "mc_egress_vid", - "mc_ttl_threshold" - }; - } - - public String[] powerExpected() { - return new String[] { - "dev_interface", "admin", "pri", "oper", "power", "device", "dev_class", "max" - }; - } - - public String[] showInterfaceExpected() { - return new String[] { - "port1", - "\n", - "Link is ", - ",", - "administrative state is ", - "\n", - "current duplex ", - ",", - "current speed ", - ",", - "current polarity ", - "\n", - "configured duplex ", - ",", - "configured speed ", - ",", - "configured polarity ", - "\n", - "<", - "\n", - "input packets ", - ",", - "bytes ", - ",", - "dropped ", - ",", - "multicast packets ", - "\n", - "output packets ", - ",", - "multicast packets ", - ",", - "broadcast packets ", - "\n", - "input average rate : ", - "\n", - "output average rate: ", - "\n", - "input peak rate ", - "\n", - "Time since last state change: ", - "\n" - }; - } - - public String[] showInterfacePortExpected() { - return new String[] {"port1", "\n", "Time since last state change: ", "\n"}; - } - - public String[] showPlatformExpected() { - return new String[] { - "port1", - "\n", - "enabled:", - "\n", - "loopback:", - "\n", - "link:", - "\n", - "speed:", - "m", - "max speed:", - "\n", - "duplex:", - "\n", - "linkscan:", - "\n", - "autonegotiate:", - "\n", - "master:", - "\n", - "tx pause:", - "r", - "rx pause:", - "\n", - "untagged vlan:", - "\n", - "vlan filter:", - "\n", - "stp state:", - "\n", - "learn:", - "\n", - "discard:", - "\n", - "jam:", - "\n", - "max frame size:", - "\n", - "MC Disable SA:", - "\n", - "MC Disable TTL:", - "\n", - "MC egress untag:", - "\n", - "MC egress vid:", - "\n", - "MC TTL threshold:", - "\n" - }; - } - - public String[] showPlatformPortExpected() { - return new String[] {"port1", "\n", "MC TTL threshold:", "\n"}; - } - - public String[] showPowerExpected() { - return new String[] { - "Interface", "Admin", "Pri", "Oper", "Power", "Device", "Class", "Max", "\n" - }; - } - - public String[] stackExpected() { - return new String[] {"id", "pending_id", "mac_address", "priority", "status", "role"}; - } - - public String[] showStackExpected() { - return new String[] {"ID", "Pending ID", "MAC address", "Priority", "Status", "Role", "\n"}; - } -} diff --git a/subset/switches/src/main/java/switchtest/cisco/Cisco9300.java b/subset/switches/src/main/java/switchtest/cisco/Cisco9300.java deleted file mode 100644 index eaa420e402..0000000000 --- a/subset/switches/src/main/java/switchtest/cisco/Cisco9300.java +++ /dev/null @@ -1,523 +0,0 @@ -package switchtest.cisco; - -/* - * Licensed to the Google under one or more contributor license agreements. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import switchtest.SwitchInterrogator; - -import java.util.Arrays; -import java.util.HashMap; -import java.util.Map; - -public class Cisco9300 extends SwitchInterrogator { - - int commandIndex = 0; - boolean commandPending = false; - boolean promptReady = false; - StringBuilder rxData = new StringBuilder(); - - /** Cisco Terminal Prompt ends with # when enabled */ - String consolePromptEndingEnabled = "#"; - - String consolePromptEndingLogin = ">"; - - public Cisco9300( - String remoteIpAddress, - int interfacePort, - boolean deviceConfigPoeEnabled, - String user, - String password) { - super(remoteIpAddress, interfacePort, deviceConfigPoeEnabled); - telnetClientSocket = - new CiscoSwitchTelnetClientSocket(remoteIpAddress, remotePort, this, debug); - this.username = user == null ? "admin" : user; - this.password = password == null ? "password" : password; - } - - /** Generic Cisco Switch command to retrieve the Status of an interface. */ - private String showIfaceStatusCommand() { - return "show interface gigabitethernet1/0/" + interfacePort + " status"; - } - - /** - * Generic Cisco Switch command to retrieve the Power Status of an interface. Replace asterisk - * with actual port number for complete message - */ - private String showIfacePowerStatusCommand() { - return "show power inline gigabitethernet1/0/" + interfacePort + " detail"; - } - - /** - * Builds an array of currently supported commands to send to the Cisco Switch for the port - * specified. - * - * @return String array of commands to be submitted to the switch - */ - public String[] commands() { - return new String[] {showIfaceStatusCommand(), showIfacePowerStatusCommand()}; - } - - /** Run all current tests in order and create and store the results */ - public void generateTestResults() { - login_report += "\n"; - login_report += validateLinkTest(); - login_report += validateSpeedTests(); - login_report += validateDuplexTests(); - login_report += validatePowerTests(); - } - - public boolean handleCommandResponse(String consoleData) { - if (consoleData == null) return false; - if (consoleData.endsWith(getHostname() + consolePromptEndingEnabled)) { - // Strip trailing command prompt - String response = - consoleData.substring(0, consoleData.length() - (getHostname() + "#").length()); - // Strip leading command that was sent - response = response.substring(command[commandIndex].length()); - processCommandResponse(response); - promptReady = true; - commandPending = false; - ++commandIndex; - return true; - } - return false; - } - - /** - * Handles the process when using the enter command. Enable is a required step before commands can - * be sent to the switch. - * - * @param consoleData Raw console data received the the telnet connection. - * @return True if the data provided was understood and processed. False if the data is not an - * expected result or the enable process failed. - */ - public boolean handleEnableMessage(String consoleData) throws Exception { - if (consoleData == null) return false; - if (consoleData.indexOf("Password:") >= 0) { - telnetClientSocket.writeData(password + "\n"); - return true; - } else if (consoleData.endsWith(consolePromptEndingEnabled)) { - setUserEnabled(true); - return true; - } else if (consoleData.indexOf("% Bad passwords") >= 0) { - telnetClientSocket.disposeConnection(); - throw new Exception("Could not Enable the User, Bad Password"); - } - return false; - } - - /** - * Handles the process when logging into the switch. - * - * @param consoleData Raw console data received the the telnet connection. - * @return True if the data provided was understood and processed. False if the data is not an - * expected result or if the login failed. - */ - public boolean handleLoginMessage(String consoleData) throws Exception { - if (consoleData == null) return false; - if (consoleData.indexOf("Username:") >= 0) { - telnetClientSocket.writeData(username + "\n"); - return true; - } else if (consoleData.indexOf("Password:") >= 0) { - telnetClientSocket.writeData(password + "\n"); - return true; - } else if (consoleData.endsWith(consolePromptEndingLogin)) { - setUserAuthorised(true); - setHostname(consoleData.split(">")[0]); - telnetClientSocket.writeData("enable\n"); - return true; - } else if (consoleData.indexOf("% Login invalid") >= 0) { - telnetClientSocket.disposeConnection(); - throw new Exception("Failked to Login, Login Invalid"); - } else if (consoleData.indexOf("% Bad passwords") >= 0) { - telnetClientSocket.disposeConnection(); - throw new Exception("Failed to Login, Bad Password"); - } - return false; - } - - /** - * If the message --More-- is present in the current data packet, this indicates the message is - * incomplete. To complete the message, we need to tell the console to continue the response and - * strip the --More-- entry from the data packet as it is not actually part of the response. - * - * @param consoleData Current unprocessed data packet - */ - public void handleMore(String consoleData) { - consoleData = consoleData.substring(0, consoleData.length() - "--More--".length()); - telnetClientSocket.writeData("\n"); - rxData.append(consoleData); - } - - /** - * Receive the raw data packet from the telnet connection and process accordingly. - * - * @param data Most recent data read from the telnet socket buffer - */ - public void receiveData(String data) { - if (debug) { - System.out.println( - java.time.LocalTime.now() + "receiveDataLen:" + data.length() + "receiveData:" + data); - } - if (data != null) { - if (!data.isEmpty()) { - if (data.indexOf("--More--") > 0) { - handleMore(data); - return; - } else { - rxData.append(data); - } - } - try { - if (parseData(rxData.toString())) { - // If we have processed the current buffers data we will clear the buffer - rxData = new StringBuilder(); - } - } catch (Exception e) { - telnetClientSocket.disposeConnection(); - e.printStackTrace(); - } - } - } - - /** - * Handles current data in the buffer read from the telnet console InputStream and sends it to the - * appropriate process. - * - * @param consoleData Current unhandled data in the buffered reader - * @return true if the data was an expected value and appropriately processed and return false if - * the data is not-expected. - */ - public boolean parseData(String consoleData) throws Exception { - consoleData = consoleData.trim(); - if (!getUserAuthorised()) { - return handleLoginMessage(consoleData); - } else if (!getUserEnabled()) { - return handleEnableMessage(consoleData); - } else { - // Logged in and enabled - if (commandPending) { // Command has been sent and awaiting a response - if (handleCommandResponse(consoleData)) { - telnetClientSocket.writeData("\n"); - return true; - } - } else if (command.length > commandIndex) { - if (consoleData.endsWith(getHostname() + consolePromptEndingEnabled)) { - sendNextCommand(); - return true; - } - } else { - generateTestResults(); - writeReport(); - telnetClientSocket.disposeConnection(); - } - } - return false; - } - - public String validateLinkTest() { - String testResults = ""; - if (interface_map.get("status").equals("connected")) { - testResults += "RESULT pass connection.port_link Link is up\n"; - } else { - testResults += "RESULT fail connection.port_link Link is down\n"; - } - return testResults; - } - - public String validateSpeedTests() { - String testResults = ""; - if (interface_map.get("speed") != null) { - String speed = interface_map.get("speed"); - if (speed.startsWith("a-")) { // Interface in Auto Speed - speed = speed.replaceFirst("a-", ""); - } - if (Integer.parseInt(speed) >= 10) { - testResults += - "RESULT pass connection.port_speed Speed auto-negotiated successfully. Speed is greater than 10 MBPS\n"; - } else { - testResults += - "RESULT fail connection.port_speed Speed is too slow. Speed is less than or equal to 10 mbps\n"; - } - } else { - testResults += "RESULT fail connection.port_speed Cannot detect current speed\n"; - } - return testResults; - } - - public String validateDuplexTests() { - String testResults = ""; - if (interface_map.get("duplex") != null) { - String duplex = interface_map.get("duplex"); - if (duplex.startsWith("a-")) { // Interface in Auto Duplex - duplex = duplex.replaceFirst("a-", ""); - } - if (duplex.equals("full")) { - testResults += "RESULT pass connection.port_duplex Full duplex mode detected\n"; - } else { - testResults += "RESULT fail connection.port_duplex Incorrect duplex mode set\n"; - } - } else { - testResults += "RESULT fail connection.port_duplex Cannot detect duplex mode\n"; - } - return testResults; - } - - public String validatePowerTests() { - String testResults = ""; - double maxPower = 0; - double currentPower = 0; - boolean powerAuto = false; - boolean poeDisabled = false; - boolean poeOn = false; - boolean poeOff = false; - boolean poeFault = false; - boolean poeDeny = false; - try { - // Generate test data from mapped results - maxPower = Double.parseDouble(power_map.get("max")); - currentPower = Double.parseDouble(power_map.get("power")); - powerAuto = "auto".equals(power_map.get("admin")); - poeDisabled = "off".equals(power_map.get("admin")); - poeOn = "on".equals(power_map.get("oper")); - poeOff = "off".equals(power_map.get("oper")); - poeFault = "fault".equals(power_map.get("oper")); - poeDeny = "power-deny".equals(power_map.get("oper")); - } catch (Exception e) { - // ToDo: Make these failures specific to the data resolve errors instead of all or nothing - System.out.println("Power Tests Failed: " + e.getMessage()); - e.printStackTrace(); - testResults += "RESULT fail poe.power Could not detect any current being drawn\n"; - testResults += "RESULT fail poe.negotiation Could not detect any current being drawn\n"; - testResults += "RESULT fail poe.support Could not detect any current being drawn\n"; - return testResults; - } - - if (!deviceConfigPoeEnabled) { - testResults += "RESULT skip poe.power This test is disabled\n"; - testResults += "RESULT skip poe.negotiation This test is disabled\n"; - testResults += "RESULT skip poe.support This test is disabled\n"; - - } else if (poeDisabled) { - testResults += "RESULT skip poe.power The switch does not support PoE\n"; - testResults += "RESULT skip poe.negotiation The switch does not support PoE\n"; - testResults += "RESULT skip poe.support The switch does not support PoE\n"; - } else { - - // Determine PoE power test result - if (maxPower >= currentPower && poeOn) { - testResults += "RESULT pass poe.power PoE is applied to device\n"; - } else if (poeOff) { - testResults += "RESULT fail poe.power No poE is applied\n"; - } else if (poeFault) { - testResults += - "RESULT fail poe.power Device detection or a powered device is in a faulty state\n"; - } else if (poeDeny) { - testResults += - "RESULT fail poe.power A powered device is detected, but no PoE is available, or the maximum wattage exceeds the detected powered-device maximum.\n"; - } - - // Determine PoE auto negotiation result - if (powerAuto) { - testResults += "RESULT pass poe.negotiation PoE auto-negotiated successfully\n"; - } else { - testResults += "RESULT fail poe.negotiation Incorrect privilege for negotiation\n"; - } - - // Determine PoE support result - if (poeOn) { - testResults += "RESULT pass poe.support PoE supported and enabled\n"; - } else { - testResults += - "RESULT fail poe.support The switch does not support PoE or it is disabled\n"; - } - } - - return testResults; - } - - private void processCommandResponse(String response) { - response = response.trim(); - System.out.println("\nProcessing Command Response:\n" + response); - login_report += "\n\n" + response; - switch (commandIndex) { - case 0: // show interface status - processInterfaceStatus(response); - break; - case 1: // show power status - processPowerStatusInline(response); - } - } - - public HashMap processInterfaceStatus(String response) { - interface_map = mapSimpleTable(response, show_interface_expected, interface_expected); - return interface_map; - } - - public Map processPowerStatusInline(String response) { - Map inlineMap = powerInlineMap(); - Arrays.stream(response.split("\n")) - .forEach( - line -> { - String[] lineParts = line.trim().split(":"); - if (lineParts.length > 1) { - String powerMapKey = inlineMap.getOrDefault(lineParts[0], null); - if (powerMapKey != null) { - power_map.put(powerMapKey, lineParts[1].trim()); - } - } - }); - return power_map; - } - - /** - * Map a simple table containing a header and 1 row of data to a hashmap - * - *

    his method will also attempt ot correct for mis-aligned tabular data as well as empty - * columns values. - * - * @param rawPacket Raw table response from a switch command - * @param colNames Array containing the names of the columns in the response - * @param mapNames Array containing names key names to map values to - * @return A HashMap containing the values mapped to the key names provided in the mapNames array - */ - public HashMap mapSimpleTable( - String rawPacket, String[] colNames, String[] mapNames) { - HashMap colMap = new HashMap(); - String[] lines = rawPacket.split("\n"); - if (lines.length > 0) { - String header = lines[0].trim(); - String values = lines[1].trim(); - int lastSectionEnd = 0; - for (int i = 0; i < colNames.length; ++i) { - int secStart = lastSectionEnd; - int secEnd; - if ((i + 1) >= colNames.length) { - // Resolving last column - secEnd = values.length(); - } else { - // Tabular data is not always reported in perfectly alignment, we need to calculate the - // correct values based off of the sections in between white spaces - int firstWhiteSpace = - getFirstWhiteSpace(values.substring(lastSectionEnd)) + lastSectionEnd; - int lastWhiteSpace = - getIndexOfNonWhitespaceAfterWhitespace(values.substring(firstWhiteSpace)) - + firstWhiteSpace; - int nextHeaderStart = header.indexOf(colNames[i + 1]); - secEnd = Math.min(lastWhiteSpace, nextHeaderStart); - } - lastSectionEnd = secEnd; - String sectionRaw = values.substring(secStart, secEnd).trim(); - colMap.put(mapNames[i], sectionRaw); - } - } - return colMap; - } - - public static int getFirstWhiteSpace(String string) { - char[] characters = string.toCharArray(); - for (int i = 0; i < string.length(); i++) { - if (Character.isWhitespace(characters[i])) { - return i; - } - } - return -1; - } - - public static int getIndexOfNonWhitespaceAfterWhitespace(String string) { - char[] characters = string.toCharArray(); - boolean lastWhitespace = false; - for (int i = 0; i < string.length(); i++) { - if (Character.isWhitespace(characters[i])) { - lastWhitespace = true; - } else if (lastWhitespace) { - return i; - } - } - return -1; - } - - public void sendNextCommand() { - login_report += "\n" + command[commandIndex]; - telnetClientSocket.writeData(command[commandIndex] + "\n"); - commandPending = true; - promptReady = false; - } - - public String[] interfaceExpected() { - return new String[] {"interface", "name", "status", "vlan", "duplex", "speed", "type"}; - } - - public String[] powerExpected() { - return new String[] {"dev_interface", "admin", "oper", "power", "device", "dev_class", "max"}; - } - - public String[] showInterfaceExpected() { - return new String[] {"Port", "Name", "Status", "Vlan", "Duplex", "Speed", "Type"}; - } - - public String[] showPowerExpected() { - return new String[] {"Interface", "Admin", "Oper", "Power", "Device", "Class", "Max"}; - } - - // Unused methods implemented for compiling only - public String[] commandToggle() { - return new String[] {}; - } - - public String[] expected() { - return new String[] {}; - } - - public String[] loginExpected() { - return new String[] {}; - } - - public String[] platformExpected() { - return new String[] {}; - } - - public String[] showInterfacePortExpected() { - return new String[] {}; - } - - public String[] showPlatformExpected() { - return new String[] {}; - } - - public String[] showPlatformPortExpected() { - return new String[] {}; - } - - public String[] stackExpected() { - return new String[] {}; - } - - public String[] showStackExpected() { - return new String[] {}; - } - - private static HashMap powerInlineMap() { - HashMap map = new HashMap(); - map.put("Interface", "dev_interface"); - map.put("Inline Power Mode", "admin"); - map.put("Operational status", "oper"); - map.put("Measured at the port", "power"); - map.put("Device Type", "device"); - map.put("IEEE Class", "dev_class"); - map.put("Power available to the device", "max"); - return map; - } -} diff --git a/subset/switches/src/main/java/switchtest/cisco/CiscoSwitchTelnetClientSocket.java b/subset/switches/src/main/java/switchtest/cisco/CiscoSwitchTelnetClientSocket.java deleted file mode 100644 index 111b6071f5..0000000000 --- a/subset/switches/src/main/java/switchtest/cisco/CiscoSwitchTelnetClientSocket.java +++ /dev/null @@ -1,60 +0,0 @@ -package switchtest.cisco; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import switchtest.SwitchInterrogator; -import switchtest.SwitchTelnetClientSocket; - -public class CiscoSwitchTelnetClientSocket extends SwitchTelnetClientSocket { - public CiscoSwitchTelnetClientSocket( - String remoteIpAddress, int remotePort, SwitchInterrogator interrogator, boolean debug) { - super(remoteIpAddress, remotePort, interrogator, debug); - } - - /** - * Continuous scan of data in the rxQueue and send to SwitchInterrogator for processing. If no - * data can be read for 70 scans, send a new line to force something into the queue. - */ - protected void gatherData() { - int rxQueueCount = 0; - - int expectedLength = 1000; - - while (telnetClient.isConnected()) { - try { - if (rxQueue.isEmpty()) { - Thread.sleep(100); - rxQueueCount++; - if (debug) { - System.out.println("rxQueue.isEmpty:" + rxQueueCount); - System.out.println("expectedLength:" + expectedLength); - } - if (rxQueueCount > 70) { - rxQueueCount = 0; - writeData("\n"); - } - } else { - String rxGathered = rxQueue.poll(); - interrogator.receiveData(rxGathered); - } - } catch (InterruptedException e) { - System.err.println("InterruptedException gatherData:" + e.getMessage()); - } - } - } -} diff --git a/subset/switches/test_switch b/subset/switches/test_switch index 549c3fdccc..e20ff5728b 100755 --- a/subset/switches/test_switch +++ b/subset/switches/test_switch @@ -7,11 +7,15 @@ MONO_LOG=/tmp/monolog.switch.txt RESULT_LINES=/tmp/results.switch.txt MODULE_CONFIG=/config/device/module_config.json +route add default gw $GATEWAY_IP +ping -c 2 $GATEWAY_IP # Setup for accessing control plane switch. If LOCAL_IP is defined, which # is the intended local address for this node on the control plane then # SWITCH_IP will be the IP address of the OpenFlow switch. if [ -n "$LOCAL_IP" ]; then + USI_URL=`jq -r .run_info.usi.url $MODULE_CONFIG` + RPC_TIMEOUT=`jq -r .run_info.usi.rpc_timeout_sec $MODULE_CONFIG` || 10 SWITCH_IP=`jq -r .run_info.switch.ip $MODULE_CONFIG` SWITCH_MODEL=`jq -r .run_info.switch.model $MODULE_CONFIG` SWITCH_USERNAME=`jq -r .run_info.switch.username $MODULE_CONFIG` @@ -24,7 +28,7 @@ if [ -n "$LOCAL_IP" ]; then echo Switch test with username:password $SWITCH_USERNAME:$SWITCH_PASSWORD ping -n -c 10 $SWITCH_IP POE_ENABLED=`jq -r .modules.switch.poe.enabled $MODULE_CONFIG` - java -jar switches/target/switchtest-0.0.1-jar-with-dependencies.jar $SWITCH_IP $TARGET_PORT $POE_ENABLED $SWITCH_MODEL $SWITCH_USERNAME $SWITCH_PASSWORD + java -jar switches/target/switchtest-0.0.1-jar-with-dependencies.jar $USI_URL $RPC_TIMEOUT $SWITCH_IP $TARGET_PORT $POE_ENABLED $SWITCH_MODEL $SWITCH_USERNAME $SWITCH_PASSWORD grep -v "RESULT" $LOCAL_REPORT | tee -a $MONO_LOG grep "RESULT" $LOCAL_REPORT | tee -a $RESULT_LINES @@ -59,21 +63,9 @@ else "RESULT $RESULT connection.port_duplex $SUMMARY" write_out_result $REPORT \ - "poe.power" \ - "$(jq -r '.["poe.power"].description' $MANIFEST)" \ + "poe.switch.power" \ + "$(jq -r '.["poe.switch.power"].description' $MANIFEST)" \ "$SKIP_REASON" \ - "RESULT $RESULT poe.power $SUMMARY" - - write_out_result $REPORT \ - "poe.negotiation" \ - "$(jq -r '.["poe.negotiation"].description' $MANIFEST)" \ - "$SKIP_REASON" \ - "RESULT $RESULT poe.negotiation $SUMMARY" - - write_out_result $REPORT \ - "poe.support" \ - "$(jq -r '.["poe.support"].description' $MANIFEST)" \ - "$SKIP_REASON" \ - "RESULT $RESULT poe.support $SUMMARY" + "RESULT $RESULT poe.switch.power $SUMMARY" fi diff --git a/testing/run_unit_tests.sh b/testing/run_unit_tests.sh index b82892e384..a0705015a3 100755 --- a/testing/run_unit_tests.sh +++ b/testing/run_unit_tests.sh @@ -10,7 +10,7 @@ source venv/bin/activate coverage erase -export PYTHONPATH=$BASEDIR/daq:$BASEDIR/mininet:$BASEDIR/faucet:$BASEDIR/forch:$BASEDIR/bin/python +export PYTHONPATH=$BASEDIR/daq:$BASEDIR/mininet:$BASEDIR/faucet:$BASEDIR/forch:$BASEDIR/bin/python:$BASEDIR/libs:$BASEDIR/libs/proto coverage run \ --source $BASEDIR/daq,$BASEDIR/bin/python/ \ -m unittest discover \ diff --git a/testing/test_aux.gcp b/testing/test_aux.gcp index 0f08897abe..9c5cc28467 100644 --- a/testing/test_aux.gcp +++ b/testing/test_aux.gcp @@ -5,18 +5,21 @@ Running testing/test_aux.sh "AHU-22" : "True", "GAT-123" : "True", "SNS-4" : "True" + }, + "Version" : { + "main" : "1.1.0" } } -inst/test_site/devices/AHU-1/metadata_norm.json: "hash": "ddf813e3" -inst/test_site/devices/AHU-22/metadata_norm.json: "hash": "bf82176c" -inst/test_site/devices/GAT-123/metadata_norm.json: "hash": "030193c8" -inst/test_site/devices/SNS-4/metadata_norm.json: "hash": "f701f900" +inst/test_site/devices/AHU-1/metadata_norm.json: "hash": "175e704a" +inst/test_site/devices/AHU-22/metadata_norm.json: "hash": "bf0ba5fa" +inst/test_site/devices/GAT-123/metadata_norm.json: "hash": "cbcf045e" +inst/test_site/devices/SNS-4/metadata_norm.json: "hash": "879557b4" +RESULT pass cloud.udmi.state Payload successfully validated +RESULT fail cloud.udmi.pointset "Unrecognized field \"extraField\" (class com.google.daq.mqtt.registrar.UdmiSchema$PointsetMessage), not marked as ignorable (3 known properties: \"version\", \"points\", \"timestamp\"])\n at [Source: UNKNOWN; line: -1, column: -1] (through reference chain: com.google.daq.mqtt.registrar.UdmiSchema$PointsetMessage[\"extraField\"])" +RESULT pass cloud.udmi.system Payload successfully validated +RESULT pass cloud.udmi.state Payload successfully validated +RESULT pass cloud.udmi.pointset Payload successfully validated +RESULT pass cloud.udmi.system Payload successfully validated RESULT skip cloud.udmi.state No device id RESULT skip cloud.udmi.pointset No device id RESULT skip cloud.udmi.system No device id -RESULT fail cloud.udmi.state No result found -RESULT pass cloud.udmi.pointset Payload successfully validated -RESULT pass cloud.udmi.system Payload successfully validated -RESULT fail cloud.udmi.state No result found -RESULT fail cloud.udmi.pointset #: extraneous key [extraField] is not permitted -RESULT pass cloud.udmi.system Payload successfully validated diff --git a/testing/test_aux.out b/testing/test_aux.out index acf12f7aa7..8a6bd9e9cf 100644 --- a/testing/test_aux.out +++ b/testing/test_aux.out @@ -1,7 +1,5 @@ Running testing/test_aux.sh Aux Tests -Lint checks -check_style exit code 0 RESULT pass base.startup.dhcp RESULT skip base.switch.ping No local IP has been set, check system config RESULT pass base.target.ping target reached @@ -23,8 +21,6 @@ RESULT info protocol.bacnet.version Protocol version: 1 RESULT skip protocol.bacnet.pic BACnet device found, but pics.csv not found in device type directory. RESULT info protocol.bacnet.version Protocol version: 1 RESULT pass protocol.bacnet.pic The devices matches the PICS -RESULT fail connection.mac_oui Manufacturer prefix not found! -RESULT pass connection.mac_oui Manufacturer: Google found for address 3c:5a:b4:1e:8f:0a RESULT skip security.tls.v1 IOException unable to connect to server RESULT skip security.tls.v1.x509 IOException unable to connect to server RESULT skip security.tls.v1_2 IOException unable to connect to server @@ -43,24 +39,42 @@ RESULT pass security.tls.v1_2 Certificate active for current date and public key RESULT pass security.tls.v1_2.x509 Certificate active for current date and public key length > 2048. RESULT pass security.tls.v1_3 Certificate active for current date and public key length > 2048. RESULT pass security.tls.v1_3.x509 Certificate active for current date and public key length > 2048. -RESULT skip security.passwords.http Port 80 is not open on target device. -RESULT skip security.passwords.https Port 443 is not open on target device. -RESULT skip security.passwords.telnet Port 23 is not open on target device. -RESULT skip security.passwords.ssh Port 22 is not open on target device. -RESULT fail security.passwords.http Default passwords have not been changed. -RESULT fail security.passwords.https Default passwords have not been changed. -RESULT fail security.passwords.telnet Default passwords have not been changed. -RESULT fail security.passwords.ssh Default passwords have not been changed. -RESULT pass security.passwords.http Default passwords have been changed. -RESULT pass security.passwords.https Default passwords have been changed. -RESULT pass security.passwords.telnet Default passwords have been changed. -RESULT pass security.passwords.ssh Default passwords have been changed. +RESULT skip security.passwords.http Port 80 not open on target device. +RESULT skip security.passwords.https Port 443 not open on target device. +RESULT skip security.passwords.ssh Port 22 not open on target device. +RESULT skip security.passwords.telnet Port 23 not open on target device. +RESULT fail security.passwords.http Was able to brute force using dictionary. +RESULT fail security.passwords.https Was able to brute force using dictionary. +RESULT fail security.passwords.ssh Was able to brute force using dictionary. +RESULT fail security.passwords.telnet Was able to brute force using dictionary. +RESULT pass security.passwords.http Was not able to brute force using dictionary. +RESULT pass security.passwords.https Was not able to brute force using dictionary. +RESULT pass security.passwords.ssh Was not able to brute force using dictionary. +RESULT pass security.passwords.telnet Was not able to brute force using dictionary. RESULT skip security.firmware Could not retrieve a firmware version with nmap. Check bacnet port. RESULT pass security.firmware version found: ?\xFF\xFF\x19,>u\x08\x00no -dhcp requests 1 1 0 1 -01: [] -02: ['02:macoui:TimeoutError', '02:ping:TimeoutError'] -03: [] +RESULT pass connection.min_send ARP packets received. Data packets were sent at a frequency of less than 5 minutes +RESULT info communication.type.broadcast Broadcast packets received. Unicast packets received. +RESULT pass connection.network.ntp_support Using NTPv4. +RESULT pass connection.network.ntp_update Device clock synchronized. +RESULT fail connection.mac_oui Manufacturer prefix not found! +RESULT skip connection.dns.hostname_connect Device did not send any DNS requests +RESULT pass connection.min_send ARP packets received. Data packets were sent at a frequency of less than 5 minutes +RESULT info communication.type.broadcast Broadcast packets received. Unicast packets received. +RESULT fail connection.network.ntp_support Not using NTPv4. +RESULT fail connection.network.ntp_update Device clock not synchronized with local NTP server. +RESULT pass connection.mac_oui Manufacturer: Google found for address 3c:5a:b4:1e:8f:0b +RESULT fail connection.dns.hostname_connect Device sent DNS requests to servers other than the DHCP provided server +RESULT pass connection.min_send ARP packets received. Data packets were sent at a frequency of less than 5 minutes +RESULT info communication.type.broadcast Broadcast packets received. Unicast packets received. +RESULT skip connection.network.ntp_support No NTP packets received. +RESULT skip connection.network.ntp_update Not enough NTP packets received. +RESULT pass connection.mac_oui Manufacturer: Google found for address 3c:5a:b4:1e:8f:0a +RESULT pass connection.dns.hostname_connect Device sends DNS requests and resolves host names +dhcp requests 1 1 1 1 +3c5ab41e8f0a: [] +3c5ab41e8f0b: ['3c5ab41e8f0b:ping:TimeoutError'] +9a02571e8f01: [] arp.txt dp_port_acls.yaml dp_sec_port_1_acl.yaml @@ -91,10 +105,16 @@ port-01 module_config modules "enabled": false }, "ipaddr": { - "timeout_sec": 300 - }, - "macoui": { - "enabled": true + "dhcp_ranges": [ + { + "end": "192.168.255.254", + "prefix_length": 16, + "start": "192.168.0.1" + } + ], + "enabled": false, + "port_flap_timeout_sec": 20, + "timeout_sec": 900 }, "manual": { "enabled": true @@ -103,14 +123,19 @@ port-01 module_config modules "enabled": true }, "nmap": { - "enabled": true + "enabled": true, + "timeout_sec": 600 }, "pass": { "enabled": true }, "password": { + "dictionary_dir": "resources/faux", "enabled": true }, + "ssh": { + "enabled": false + }, "switch": { "enabled": true, "poe": { @@ -139,11 +164,16 @@ port-02 module_config modules "enabled": true }, "ipaddr": { - "timeout_sec": 300 - }, - "macoui": { - "enabled": true, - "timeout_sec": 1 + "dhcp_ranges": [ + { + "end": "192.168.255.254", + "prefix_length": 16, + "start": "192.168.0.1" + } + ], + "enabled": false, + "port_flap_timeout_sec": 20, + "timeout_sec": 900 }, "manual": { "enabled": true @@ -152,14 +182,19 @@ port-02 module_config modules "enabled": true }, "nmap": { - "enabled": true + "enabled": true, + "timeout_sec": 600 }, "pass": { "enabled": false }, "password": { + "dictionary_dir": "resources/faux", "enabled": true }, + "ssh": { + "enabled": false + }, "switch": { "enabled": true }, @@ -182,30 +217,15 @@ port-02 module_config modules `\.-''__.-' \ ( \_ `''' `\__ /\ ') -Host: X.X.X.X () Status: Up -Host: X.X.X.X () Ports: 47808/closed/udp//bacnet/// Ignored State: closed (3) +Host: X.X.X.X () Status: Up +Host: X.X.X.X () Ports: 47808/closed/udp//bacnet/// +Host: X.X.X.X () Status: Up +Host: X.X.X.X () Ports: 10000/open/tcp//snet-sensor-mgmt?/// Redacted docs diff No report diff -01: ['01:ping:Exception'] -02: ['02:hold:Exception'] -03: ['03:hold:DaqException', '03:ping:ValueError'] -inst/gw01/nodes/gw01/activate.log -inst/gw02/nodes/gw02/activate.log -inst/gw03/nodes/gw03/activate.log -inst/run-port-01/nodes/fail01/activate.log -inst/run-port-01/nodes/nmap01/activate.log -inst/run-port-01/nodes/pass01/activate.log -inst/run-port-01/nodes/ping01/activate.log -inst/run-port-02/nodes/fail02/activate.log -inst/run-port-02/nodes/hold02/activate.log -inst/run-port-02/nodes/nmap02/activate.log -inst/run-port-02/nodes/pass02/activate.log -inst/run-port-02/nodes/ping02/activate.log -inst/run-port-03/nodes/fail03/activate.log -inst/run-port-03/nodes/hold03/activate.log -inst/run-port-03/nodes/nmap03/activate.log -inst/run-port-03/nodes/pass03/activate.log -inst/run-port-03/nodes/ping03/activate.log +9a02571e8f01: ['9a02571e8f01:ping:Exception'] +9a02571e8f02: ['9a02571e8f02:hold:Exception'] +9a02571e8f03: ['9a02571e8f03:hold:DaqException', '9a02571e8f03:ping:ValueError'] Enough port disconnects: 1 -01: ['01:hold:DaqException'] +9a02571e8f00: ['9a02571e8f00:hold:DaqException'] Done with tests diff --git a/testing/test_aux.sh b/testing/test_aux.sh index ebf75ee79c..d00e8267aa 100755 --- a/testing/test_aux.sh +++ b/testing/test_aux.sh @@ -4,11 +4,6 @@ source testing/test_preamble.sh echo Aux Tests >> $TEST_RESULTS -# Runs lint checks and some similar things -echo Lint checks | tee -a $TEST_RESULTS -bin/check_style -echo check_style exit code $? | tee -a $TEST_RESULTS - # Function to create pubber config files (for use in cloud tests) function make_pubber { @@ -31,6 +26,7 @@ function make_pubber { "cloudRegion": $cloud_region, "registryId": $registry_id, "extraField": $fail, + "keyFile": "local/rsa_private.pkcs8", "gatewayId": $gateway, "deviceId": "$device" } @@ -40,7 +36,9 @@ EOF function capture_test_results { module_name=$1 - fgrep -h RESULT inst/run-port-*/nodes/$module_name*/tmp/report.txt | tee -a $TEST_RESULTS + for mac in 9a02571e8f01 3c5ab41e8f0b 3c5ab41e8f0a; do + fgrep -h RESULT inst/run-$mac/nodes/$module_name*/tmp/report.txt | tee -a $TEST_RESULTS + done } # Setup an instance test site @@ -53,22 +51,27 @@ cp -r resources/test_site/device_types/rocket local/site/device_types/ mkdir -p local/site/device_types/rocket/aux/ cp subset/bacnet/bacnetTests/src/main/resources/pics.csv local/site/device_types/rocket/aux/ cp -r resources/test_site/mac_addrs local/site/ + +# Add extra configs to a copy of the baseline module config for the password test to select which dictionaries to use. +cat resources/setups/baseline/module_config.json | jq '.modules.password += {"dictionary_dir":"resources/faux"}' > local/module_config.json + cat < local/system.yaml --- include: config/system/all.conf +base_conf: local/module_config.json finish_hook: bin/dump_network test_config: resources/runtime_configs/long_wait site_path: inst/test_site schema_path: schemas/udmi interfaces: faux-1: - opts: brute broadcast_client ntp_pass + opts: brute broadcast_client ntpv4 faux-2: - opts: nobrute expiredtls bacnetfail pubber passwordfail ntp_fail opendns + opts: nobrute expiredtls bacnetfail pubber passwordfail ntpv3 opendns ssh curl faux-3: - opts: tls macoui passwordpass bacnet pubber broadcast_client + opts: tls macoui passwordpass bacnet pubber broadcast_client ssh curl long_dhcp_response_sec: 0 -monitor_scan_sec: 0 +monitor_scan_sec: 20 EOF if [ -f "$gcp_cred" ]; then @@ -84,7 +87,7 @@ if [ -f "$gcp_cred" ]; then make_pubber AHU-1 daq-faux-2 null null make_pubber SNS-4 daq-faux-3 1234 \"GAT-123\" - GOOGLE_APPLICATION_CREDENTIALS=$gcp_cred bin/registrar $project_id + GOOGLE_APPLICATION_CREDENTIALS=$gcp_cred udmi/bin/registrar inst/test_site $project_id cat inst/test_site/registration_summary.json | tee -a $GCP_RESULTS echo | tee -a $GCP_RESULTS fgrep hash inst/test_site/devices/*/metadata_norm.json | tee -a $GCP_RESULTS @@ -104,9 +107,10 @@ echo %%%%%%%%%%%%%%%%%%%%%%%%% Starting aux test run cmd/run -s # Capture RESULT lines from ping activation logs (not generated report). -fgrep -h RESULT inst/run-port*/nodes/ping*/activate.log \ +for mac in 9a02571e8f01 3c5ab41e8f0b 3c5ab41e8f0a; do + fgrep -h RESULT inst/run-$mac/nodes/ping*/activate.log \ | sed -e 's/\s*\(%%.*\)*$//' | tee -a $TEST_RESULTS - +done # Add the RESULT lines from all aux test report files. capture_test_results bacext capture_test_results macoui @@ -116,32 +120,32 @@ capture_test_results discover capture_test_results network # Capture peripheral logs -more inst/run-port-*/scans/ip_triggers.txt | cat -dhcp_done=$(fgrep done inst/run-port-01/scans/ip_triggers.txt | wc -l) -dhcp_long=$(fgrep long inst/run-port-01/scans/ip_triggers.txt | wc -l) +more inst/run-*/scans/ip_triggers.txt | cat +dhcp_done=$(fgrep done inst/run-9a02571e8f01/scans/ip_triggers.txt | wc -l) +dhcp_long=$(fgrep long inst/run-9a02571e8f01/scans/ip_triggers.txt | wc -l) echo dhcp requests $((dhcp_done > 1)) $((dhcp_done < 3)) \ - $((dhcp_long > 1)) $((dhcp_long < 4)) | tee -a $TEST_RESULTS + $((dhcp_long >= 1)) $((dhcp_long < 4)) | tee -a $TEST_RESULTS sort inst/result.log | tee -a $TEST_RESULTS # Show partial logs from each test -head inst/gw*/nodes/gw*/activate.log -head inst/run-port-*/nodes/*/activate.log -head inst/run-port-*/nodes/*/tmp/report.txt -ls inst/run-port-01/finish/fail01/ | tee -a $TEST_RESULTS +head -20 inst/gw*/nodes/gw*/activate.log +head -20 inst/run-*/nodes/*/activate.log +head -20 inst/run-*/nodes/*/tmp/report.txt +ls inst/run-9a02571e8f01/finish/fail*/ | tee -a $TEST_RESULTS # Add the port-01 and port-02 module config into the file echo port-01 module_config modules | tee -a $TEST_RESULTS -jq .modules inst/run-port-01/nodes/ping01/tmp/module_config.json | tee -a $TEST_RESULTS +jq .modules inst/run-9a02571e8f01/nodes/ping*/tmp/module_config.json | tee -a $TEST_RESULTS echo port-02 module_config modules | tee -a $TEST_RESULTS -jq .modules inst/run-port-02/nodes/ping02/tmp/module_config.json | tee -a $TEST_RESULTS +jq .modules inst/run-3c5ab41e8f0b/nodes/ping*/tmp/module_config.json | tee -a $TEST_RESULTS # Add a lovely snake and a lizard into this file for testing device/type mappings. -cat inst/run-port-03/nodes/ping03/tmp/snake.txt | tee -a $TEST_RESULTS -cat inst/run-port-03/nodes/ping03/tmp/lizard.txt | tee -a $TEST_RESULTS +cat inst/run-3c5ab41e8f0a/nodes/ping*/tmp/snake.txt | tee -a $TEST_RESULTS +cat inst/run-3c5ab41e8f0a/nodes/ping*/tmp/lizard.txt | tee -a $TEST_RESULTS # Add the results for cloud tests into a different file, since cloud tests may not run if # our test environment isn't set up correctly. See bin/test_daq for more insight. -fgrep -h RESULT inst/run-port-*/nodes/udmi*/tmp/report.txt | tee -a $GCP_RESULTS +fgrep -h RESULT inst/run-*/nodes/udmi*/tmp/report.txt | tee -a $GCP_RESULTS for num in 1 2 3; do echo docker logs daq-faux-$num @@ -161,8 +165,12 @@ cat inst/reports/report_9a02571e8f01_*.md | redact > out/redacted_file.md fgrep Host: out/redacted_file.md | tee -a $TEST_RESULTS echo Redacted docs diff | tee -a $TEST_RESULTS -(diff out/redacted_docs.md out/redacted_file.md && echo No report diff) \ - | tee -a $TEST_RESULTS +diff out/redacted_docs.md out/redacted_file.md > out/redacted_file.diff +cat -vet out/redacted_file.diff | tee -a $TEST_RESULTS +diff_lines=`cat out/redacted_file.diff | wc -l` +if [ $diff_lines == 0 ]; then + echo No report diff | tee -a $TEST_RESULTS +fi # Make sure there's no file pollution from the test run. git status --porcelain | tee -a $TEST_RESULTS @@ -173,9 +181,9 @@ cat < local/system.yaml --- include: config/system/multi.conf fail_module: - ping_01: finalize - hold_02: initialize - ping_03: callback + ping_9a02571e8f01: finalize + hold_9a02571e8f02: initialize + ping_9a02571e8f03: callback EOF function kill_gateway { @@ -186,20 +194,20 @@ function kill_gateway { } # Check that killing the dhcp monitor aborts the run. -MARKER=inst/run-port-03/nodes/hold03/activate.log +MARKER=inst/run-9a02571e8f03/nodes/hold*/activate.log monitor_marker $MARKER "kill_gateway gw03" echo %%%%%%%%%%%%%%%%%%%%%%%%% Starting hold test run +rm -r inst/run-* cmd/run -k -s finish_hook=bin/dump_network cat inst/result.log | sort | tee -a $TEST_RESULTS -find inst/ -name activate.log | sort | tee -a $TEST_RESULTS -head inst/run-port-*/nodes/nmap*/activate.log -head inst/run-port-*/finish/nmap*/* - -tcpdump -en -r inst/run-port-01/scans/test_nmap.pcap icmp or arp +head inst/run-*/nodes/nmap*/activate.log +head inst/run-*/finish/nmap*/* +tcpdump -en -r inst/run-9a02571e8f01/scans/test_nmap.pcap icmp or arp +echo %%%%%%%%%%%%%%%%%%%%%%%%% Running port toggle test # Check port toggling does not cause a shutdown cat < local/system.yaml --- @@ -208,7 +216,8 @@ port_flap_timeout_sec: 10 port_debounce_sec: 0 EOF monitor_log "Port 1 dpid 2 is now active" "sudo ifconfig faux down;sleep 5; sudo ifconfig faux up" -monitor_log "Target port 1 test hold running" "sudo ifconfig faux down" +monitor_log "Target device 9a02571e8f00 test hold running" "sudo ifconfig faux down" +rm -r inst/run-* cmd/run -s -k disconnections=$(cat inst/cmdrun.log | grep "Port 1 dpid 2 is now inactive" | wc -l) echo Enough port disconnects: $((disconnections >= 2)) | tee -a $TEST_RESULTS diff --git a/testing/test_base.out b/testing/test_base.out index 86580802e2..c0208bd8de 100644 --- a/testing/test_base.out +++ b/testing/test_base.out @@ -2,7 +2,7 @@ Running testing/test_base.sh Base Tests %%%%%%%%%%%%%%%%%%%%%% Base tests DAQ result code 1 -01: ['01:hold:DaqException'] +9a02571e8f00: ['9a02571e8f00:hold:DaqException'] # Device 9a02571e8f00, XXX to XXX Sample device description. @@ -26,25 +26,17 @@ Overall device result PASS |Expectation|pass|skip| |---|---|---| -|Other|3|1| +|Other|4|1| |Result|Test|Category|Expectation|Notes| |---|---|---|---|---| |pass|base.startup.dhcp|Other|Other|| |skip|base.switch.ping|Other|Other|No local IP has been set, check system config| |pass|base.target.ping|Other|Other|target reached| -|pass|security.ports.nmap|Other|Other|Only allowed ports found open.| +|pass|security.nmap.http|Other|Other|No running http servers have been found.| +|pass|security.nmap.ports|Other|Other|Only allowed ports found open.| -## Module ipaddr - - -#### Module Config - -|Attribute|Value| -|---|---| -|timeout_sec|300| - ## Module pass @@ -103,16 +95,28 @@ RESULT pass base.target.ping target reached ``` -------------------- -security.ports.nmap +security.nmap.ports -------------------- Automatic TCP/UDP port scan using nmap -------------------- -# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -sT --host-timeout=4m --open -p1-1024 -oG /tmp/nmap.log X.X.X.X -# Ports scanned: TCP(1024;1-1024) UDP(0;) SCTP(0;) PROTOCOLS(0;) +# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -sT --host-timeout=4m --open -p1-65535 -oG /tmp/nmap.log X.X.X.X +# Ports scanned: TCP(65535;1-65535) UDP(0;) SCTP(0;) PROTOCOLS(0;) # Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX No invalid ports found. -------------------- -RESULT pass security.ports.nmap Only allowed ports found open. +RESULT pass security.nmap.ports Only allowed ports found open. + +-------------------- +security.nmap.http +-------------------- +Check that the device does not have open ports exposing an unencrypted web interface using HTTP +-------------------- +# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -A --script http-methods --host-timeout=4m --open -p- -oG /tmp/http.log X.X.X.X +# Ports scanned: TCP(65535;1-65535) UDP(0;) SCTP(0;) PROTOCOLS(0;) +# Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX +No running http servers have been found. +-------------------- +RESULT pass security.nmap.http No running http servers have been found. ``` @@ -120,6 +124,7 @@ RESULT pass security.ports.nmap Only allowed ports found open. |Attribute|Value| |---|---| +|timeout_sec|600| |enabled|True| ## Module hold @@ -133,42 +138,45 @@ terminated %%%%%%%%%%%%%%%%%%%%%% Telnet fail DAQ result code 1 -01: ['01:hold:DaqException'] -|fail|security.ports.nmap|Other|Other|Some disallowed ports are open: 23| -security.ports.nmap -RESULT fail security.ports.nmap Some disallowed ports are open: 23 +9a02571e8f00: ['9a02571e8f00:hold:DaqException'] +|fail|security.nmap.ports|Other|Other|Some disallowed ports are open: 23.| +security.nmap.ports +RESULT fail security.nmap.ports Some disallowed ports are open: 23. %%%%%%%%%%%%%%%%%%%%%% Default MUD DAQ result code 0 -01: [] -|pass|security.ports.nmap|Other|Other|Only allowed ports found open.| -security.ports.nmap -RESULT pass security.ports.nmap Only allowed ports found open. +9a02571e8f00: [] +|pass|security.nmap.ports|Other|Other|Only allowed ports found open.| +security.nmap.ports +RESULT pass security.nmap.ports Only allowed ports found open. %%%%%%%%%%%%%%%%%%%%%% External switch tests -02: [] +9a02571e8f00: [] dp_id: 1 dp_id: 4886718345 -Switch test with target 192.0.2.138:2 -RESULT pass base.switch.ping target %% 192.0.2.138:2 -Switch test with target 192.0.2.138:2 -Monolog processing base.switch.ping... +Switch test with target 192.0.2.138:2 +RESULT pass base.switch.ping target %% 192.0.2.138:2 +Switch test with target 192.0.2.138:2 +Monolog processing base.switch.ping... switch ping 2 +%%%%%%%%%%%%%%%%%%%%%% Alt switch tests +XXX runner INFO 9a:02:57:1e:8f:00 learned on vid 1001 +9a02571e8f00: ['9a02571e8f00:ping:1'] %%%%%%%%%%%%%%%%%%%%%% Mud profile tests -result open 01: [] 02: [] 03: [] +result open 9a02571e8f01: [] 9a02571e8f02: [] 9a02571e8f03: [] device open 1 1 1 cntrlr open 1 1 1 -result base 01: [] 02: [] 03: [] +result base 9a02571e8f01: [] 9a02571e8f02: [] 9a02571e8f03: [] device base 1 1 0 cntrlr base 1 1 0 -result todev 01: ['01:ping:1'] 02: ['02:ping:1'] 03: [] +result todev 9a02571e8f01: ['9a02571e8f01:ping:1'] 9a02571e8f02: ['9a02571e8f02:ping:1'] 9a02571e8f03: [] device todev 0 0 0 cntrlr todev 0 0 0 -result frdev 01: [] 02: ['02:ping:1'] 03: [] +result frdev 9a02571e8f01: [] 9a02571e8f02: ['9a02571e8f02:ping:1'] 9a02571e8f03: [] device frdev 1 0 0 cntrlr frdev 1 0 0 -result none 01: ['01:ping:1'] 02: ['02:ping:1'] 03: [] +result none 9a02571e8f01: ['9a02571e8f01:ping:1'] 9a02571e8f02: ['9a02571e8f02:ping:1'] 9a02571e8f03: [] device none 0 0 0 cntrlr none 0 0 0 -result star 01: [] 02: [] 03: [] +result star 9a02571e8f01: [] 9a02571e8f02: [] 9a02571e8f03: [] device star 1 1 1 cntrlr star 1 1 0 %%%%%%%%%%%%%%%%%%%%%% Done with tests diff --git a/testing/test_base.sh b/testing/test_base.sh index d39e1c692c..1a0701144e 100755 --- a/testing/test_base.sh +++ b/testing/test_base.sh @@ -15,11 +15,11 @@ bin/build_proto check || exit 1 echo %%%%%%%%%%%%%%%%%%%%%% Base tests | tee -a $TEST_RESULTS rm -f local/system.yaml local/system.conf # Check that bringing down the trunk interface terminates DAQ. -MARKER=inst/run-port-01/nodes/hold01/activate.log +MARKER=inst/run-9a02571e8f00/nodes/hold*/activate.log monitor_marker $MARKER "sudo ip link set pri-eth1 down" cmd/run -b -k -s site_path=inst/tmp_site echo DAQ result code $? | tee -a $TEST_RESULTS -more inst/result.log | tee -a $TEST_RESULTS +cat inst/result.log | tee -a $TEST_RESULTS echo Redacted report for 9a02571e8f00: cat inst/reports/report_9a02571e8f00_*.md | redact | tee -a $TEST_RESULTS @@ -29,38 +29,44 @@ echo %%%%%%%%%%%%%%%%%%%%%% Telnet fail | tee -a $TEST_RESULTS docker rmi daqf/test_hold:latest # Check case of missing image cmd/run -s -k interfaces.faux.opts=telnet echo DAQ result code $? | tee -a $TEST_RESULTS -more inst/result.log | tee -a $TEST_RESULTS -cat inst/run-port-01/nodes/nmap01/activate.log -fgrep 'security.ports.nmap' inst/reports/report_9a02571e8f00_*.md | tee -a $TEST_RESULTS +cat inst/result.log | tee -a $TEST_RESULTS +cat inst/run-9a02571e8f00/nodes/nmap01/activate.log +fgrep 'security.nmap.ports' inst/reports/report_9a02571e8f00_*.md | tee -a $TEST_RESULTS DAQ_TARGETS=test_hold cmd/build # Except with a default MUD file that blocks the port. echo %%%%%%%%%%%%%%%%%%%%%% Default MUD | tee -a $TEST_RESULTS cmd/run -s interfaces.faux.opts=telnet device_specs=resources/device_specs/simple.json echo DAQ result code $? | tee -a $TEST_RESULTS -more inst/result.log | tee -a $TEST_RESULTS -fgrep 'security.ports.nmap' inst/reports/report_9a02571e8f00_*.md | tee -a $TEST_RESULTS -cat inst/run-port-01/nodes/nmap01/activate.log +cat inst/result.log | tee -a $TEST_RESULTS +fgrep 'security.nmap.ports' inst/reports/report_9a02571e8f00_*.md | tee -a $TEST_RESULTS +cat inst/run-9a02571e8f00/nodes/nmap01/activate.log echo %%%%%%%%%%%%%%%%%%%%%% External switch tests | tee -a $TEST_RESULTS cp config/system/ext.yaml local/system.yaml cmd/run -s cat inst/result.log | tee -a $TEST_RESULTS fgrep dp_id inst/faucet.yaml | tee -a $TEST_RESULTS -fgrep -i switch inst/run-port-02/nodes/ping02/activate.log | tee -a $TEST_RESULTS -cat -vet inst/run-port-02/nodes/ping02/activate.log -count=$(fgrep icmp_seq=5 inst/run-port-02/nodes/ping02/activate.log | wc -l) +fgrep -i switch inst/run-9a02571e8f00/nodes/ping*/activate.log | sed -e "s/\r//g" | tee -a $TEST_RESULTS +cat -vet inst/run-9a02571e8f00/nodes/ping*/activate.log +count=$(fgrep icmp_seq=5 inst/run-9a02571e8f00/nodes/ping*/activate.log | wc -l) echo switch ping $count | tee -a $TEST_RESULTS +echo %%%%%%%%%%%%%%%%%%%%%% Alt switch tests | tee -a $TEST_RESULTS +cp config/system/alt.yaml local/system.yaml +# TODO: Replace this with proper test once VLAN-triggers are added. +timeout 1200s cmd/run -s +fgrep '9a:02:57:1e:8f:00 learned on vid 1001' inst/cmdrun.log | head -1 | redact | tee -a $TEST_RESULTS +cat inst/result.log | tee -a $TEST_RESULTS # ping test should fail since there are no dhcp packets captured echo %%%%%%%%%%%%%%%%%%%%%% Mud profile tests | tee -a $TEST_RESULTS rm -f local/system.yaml cp config/system/muddy.conf local/system.conf -device_traffic="tcpdump -en -r inst/run-port-01/scans/monitor.pcap port 47808" +device_traffic="tcpdump -en -r inst/run-9a02571e8f01/scans/monitor.pcap port 47808" device_bcast="$device_traffic and ether broadcast" device_ucast="$device_traffic and ether dst 9a:02:57:1e:8f:02" device_xcast="$device_traffic and ether dst 9a:02:57:1e:8f:03" -cntrlr_traffic="tcpdump -en -r inst/run-port-02/scans/monitor.pcap port 47808" +cntrlr_traffic="tcpdump -en -r inst/run-9a02571e8f02/scans/monitor.pcap port 47808" cntrlr_bcast="$cntrlr_traffic and ether broadcast" cntrlr_ucast="$cntrlr_traffic and ether dst 9a:02:57:1e:8f:01" cntrlr_xcast="$cntrlr_traffic and ether dst 9a:02:57:1e:8f:03" @@ -78,7 +84,7 @@ function test_mud { ucast=$($cntrlr_ucast | wc -l) xcast=$($cntrlr_xcast | wc -l) echo cntrlr $type $(($bcast > 2)) $(($ucast > 2)) $(($xcast > 0)) | tee -a $TEST_RESULTS - more inst/run-port-*/nodes/*/activate.log | cat + more inst/run-*/nodes/*/activate.log | cat } test_mud open diff --git a/testing/test_dhcp.out b/testing/test_dhcp.out index 6d2dc2e822..06ba950d71 100644 --- a/testing/test_dhcp.out +++ b/testing/test_dhcp.out @@ -1,12 +1,15 @@ Running testing/test_dhcp.sh DHCP Tests -01: [] -02: ['02:ipaddr:TimeoutError'] -03: [] -04: [] +9a02571e8f01: [] +9a02571e8f02: ['9a02571e8f02:acquire:TimeoutError'] +9a02571e8f03: [] +9a02571e8f04: [] +9a02571e8f05: [] Device 1 ip triggers: 1 0 Device 2 ip triggers: 0 0 Device 3 long ip triggers: 1 Device 4 ip triggers: 1 -Number of ips: 2 +Device 4 subnet 1 ip: 1 subnet 2 ip: 1 subnet 3 ip: 2 +Device 5 ip triggers: 1 +Device 5 num of ips: 2 Done with tests diff --git a/testing/test_dhcp.sh b/testing/test_dhcp.sh index c979de2a98..30f15ec103 100755 --- a/testing/test_dhcp.sh +++ b/testing/test_dhcp.sh @@ -7,11 +7,12 @@ echo DHCP Tests >> $TEST_RESULTS cat < local/system.conf source config/system/default.yaml site_description="Multi-Device Configuration" -switch_setup.uplink_port=5 +switch_setup.uplink_port=6 interfaces.faux-1.opts= interfaces.faux-2.opts=xdhcp interfaces.faux-3.opts= interfaces.faux-4.opts= +interfaces.faux-5.opts= monitor_scan_sec=1 EOF @@ -29,14 +30,33 @@ cat < local/site/mac_addrs/$intf_mac/module_config.json } EOF +# Multi subnet multi subnet tests intf_mac="9a02571e8f04" mkdir -p local/site/mac_addrs/$intf_mac cat < local/site/mac_addrs/$intf_mac/module_config.json { "modules": { "ipaddr": { - "timeout_sec": 320, - "dhcp_mode": "ip_change" + "enabled": true, + "port_flap_timeout_sec": 20, + "dhcp_ranges": [{"start": "192.168.0.1", "end": "192.168.255.254", "prefix_length": 16}, + {"start": "10.255.255.1", "end": "10.255.255.255", "prefix_length": 24}, + {"start": "172.16.0.1", "end": "172.16.0.200", "prefix_length": 24}] + } + } +} +EOF + +# ip change test +intf_mac="9a02571e8f05" +mkdir -p local/site/mac_addrs/$intf_mac +cat < local/site/mac_addrs/$intf_mac/module_config.json +{ + "modules": { + "ipaddr": { + "enabled": true, + "port_flap_timeout_sec": 20, + "dhcp_ranges": [] } } } @@ -47,19 +67,25 @@ cmd/run -b -s settle_sec=0 dhcp_lease_time=120s cat inst/result.log | sort | tee -a $TEST_RESULTS -for iface in $(seq 1 4); do +for iface in $(seq 1 5); do intf_mac=9a:02:57:1e:8f:0$iface - ip_file=inst/run-port-0$iface/scans/ip_triggers.txt + ip_file=inst/run-9a02571e8f0$iface/scans/ip_triggers.txt cat $ip_file ip_triggers=$(fgrep done $ip_file | wc -l) long_triggers=$(fgrep long $ip_file | wc -l) num_ips=$(cat $ip_file | cut -d ' ' -f 1 | sort | uniq | wc -l) echo Found $ip_triggers ip triggers and $long_triggers long ip responses. - if [ $iface == 4 ]; then - echo "Device $iface ip triggers: $(((ip_triggers + long_triggers) >= 2))" | tee -a $TEST_RESULTS - echo "Number of ips: $num_ips" | tee -a $TEST_RESULTS + if [ $iface == 5 ]; then + echo "Device $iface ip triggers: $(((ip_triggers + long_triggers) >= 3))" | tee -a $TEST_RESULTS + echo "Device $iface num of ips: $num_ips" | tee -a $TEST_RESULTS + elif [ $iface == 4 ]; then + echo "Device $iface ip triggers: $(((ip_triggers + long_triggers) >= 4))" | tee -a $TEST_RESULTS + subnet_ip=$(fgrep "ip notification 192.168" inst/run-*/nodes/ipaddr*/activate.log | wc -l) + subnet2_ip=$(fgrep "ip notification 10.255.255" inst/run-*/nodes/ipaddr*/activate.log | wc -l) + subnet3_ip=$(fgrep "ip notification 172.16.0" inst/run-*/nodes/ipaddr*/activate.log | wc -l) + echo "Device $iface subnet 1 ip: $subnet_ip subnet 2 ip: $subnet2_ip subnet 3 ip: $subnet3_ip" | tee -a $TEST_RESULTS elif [ $iface == 3 ]; then - echo "Device $iface long ip triggers: $((long_triggers > 0))" | tee -a $TEST_RESULTS + echo "Device $iface long ip triggers: $((long_triggers > 0))" | tee -a $TEST_RESULTS else echo "Device $iface ip triggers: $((ip_triggers > 0)) $((long_triggers > 0))" | tee -a $TEST_RESULTS fi diff --git a/testing/test_many.out b/testing/test_many.out index fdef3cc356..7e9ff645b2 100644 --- a/testing/test_many.out +++ b/testing/test_many.out @@ -4,6 +4,10 @@ DAQ stress test Enough results: 1 Enough DHCP timeouts: 1 Enough static ips: 1 +Found NTP and DNS traffic for static ip devices: 1 1 +Enough ipaddr tests: 1 +Enough alternate subnet ips: 1 +Enough ipaddr timeouts: 1 Redacted soak diff No soak report diff Done with many diff --git a/testing/test_many.sh b/testing/test_many.sh index 2dabc2e0c0..e0699857e1 100755 --- a/testing/test_many.sh +++ b/testing/test_many.sh @@ -3,11 +3,16 @@ source testing/test_preamble.sh # num of devices need to less than 10 -NUM_DEVICES=8 +NUM_DEVICES=9 RUN_LIMIT=20 # num of timeout devices need to be less or equal to num dhcp devices NUM_NO_DHCP_DEVICES=4 NUM_TIMEOUT_DEVICES=2 + +# Extended DHCP tests +NUM_IPADDR_TEST_DEVICES=2 +NUM_IPADDR_TEST_TIMEOUT_DEVICES=1 + echo Many Tests >> $TEST_RESULTS echo source config/system/default.yaml > local/system.conf @@ -15,14 +20,15 @@ echo source config/system/default.yaml > local/system.conf echo monitor_scan_sec=5 >> local/system.conf echo switch_setup.uplink_port=$((NUM_DEVICES+1)) >> local/system.conf echo gcp_cred=$gcp_cred >> local/system.conf +echo dhcp_lease_time=120s >> local/system.conf for iface in $(seq 1 $NUM_DEVICES); do xdhcp="" + intf_mac="9a02571e8f0$iface" + mkdir -p local/site/mac_addrs/$intf_mac if [[ $iface -le $NUM_NO_DHCP_DEVICES ]]; then - ip="10.20.0.$((iface+5))" - intf_mac="9a02571e8f0$iface" - xdhcp="xdhcp=$ip" - mkdir -p local/site/mac_addrs/$intf_mac + ip="10.20.255.$((iface+5))" + xdhcp="xdhcp=$ip opendns ntp_fail" if [[ $iface -gt $NUM_TIMEOUT_DEVICES ]]; then #Install site specific configs for xdhcp ips cat < local/site/mac_addrs/$intf_mac/module_config.json @@ -39,6 +45,31 @@ EOF } } } +EOF + fi + elif [[ $iface -le $((NUM_NO_DHCP_DEVICES + NUM_IPADDR_TEST_DEVICES)) ]]; then + if [[ $iface -le $((NUM_NO_DHCP_DEVICES + NUM_IPADDR_TEST_TIMEOUT_DEVICES)) ]]; then + cat < local/site/mac_addrs/$intf_mac/module_config.json + { + "modules": { + "ipaddr": { + "enabled": true, + "port_flap_timeout_sec": 20, + "timeout_sec": 1 + } + } + } +EOF + else + cat < local/site/mac_addrs/$intf_mac/module_config.json + { + "modules": { + "ipaddr": { + "enabled": true, + "port_flap_timeout_sec": 20 + } + } + } EOF fi fi @@ -49,28 +80,41 @@ echo DAQ stress test | tee -a $TEST_RESULTS start_time=`date -u -Isec` cmd/run -b run_limit=$RUN_LIMIT settle_sec=0 dhcp_lease_time=120s -end_time=`date -u -Isec` +end_time=`date -u -Isec --date="+5min"` # Adding additional time to account for slower cloud function calls for updating timestamp. cat inst/result.log results=$(fgrep [] inst/result.log | wc -l) timeouts=$(fgrep "ipaddr:TimeoutError" inst/result.log | wc -l) +ipaddr_timeouts=$(fgrep "ipaddr:TimeoutError" inst/result.log | wc -l) +ip_notifications=$(fgrep "ip notification" inst/run-*/nodes/ipaddr*/activate.log | wc -l) +alternate_subnet_ip=$(fgrep "ip notification 192.168" inst/run-*/nodes/ipaddr*/activate.log | wc -l) -cat inst/run-port-*/scans/ip_triggers.txt -static_ips=$(fgrep nope inst/run-port-*/scans/ip_triggers.txt | wc -l) +cat inst/run-*/scans/ip_triggers.txt +static_ips=$(fgrep nope inst/run-*/scans/ip_triggers.txt | wc -l) +ntp_traffic=$(fgrep "RESULT fail base.startup.ntp" inst/run-*/nodes/ping*/tmp/result_lines.txt | wc -l) +dns_traffic=$(fgrep "RESULT fail base.startup.dns" inst/run-*/nodes/ping*/tmp/result_lines.txt | wc -l) -more inst/run-port-*/nodes/ping*/activate.log | cat +more inst/run-*/nodes/ping*/activate.log | cat +more inst/run-*/nodes/ipaddr*/activate.log | cat echo Found $results clean runs, $timeouts timeouts, and $static_ips static_ips. +echo ipaddr had $ip_notifications notifications and $ipaddr_timeouts timeouts. # This is broken -- should have many more results available! -echo Enough results: $((results >= 6*RUN_LIMIT/10)) | tee -a $TEST_RESULTS +echo Enough results: $((results >= 5*RUN_LIMIT/10)) | tee -a $TEST_RESULTS # $timeouts should strictly equal $NUM_TIMEOUT_DEVICES when dhcp step is fixed. echo Enough DHCP timeouts: $((timeouts >= NUM_TIMEOUT_DEVICES)) | tee -a $TEST_RESULTS echo Enough static ips: $((static_ips >= (NUM_NO_DHCP_DEVICES - NUM_TIMEOUT_DEVICES))) | tee -a $TEST_RESULTS +echo Found NTP and DNS traffic for static ip devices: $((ntp_traffic > 0)) $((dns_traffic > 0)) | tee -a $TEST_RESULTS + +echo Enough ipaddr tests: $((ip_notifications >= (NUM_IPADDR_TEST_DEVICES - NUM_IPADDR_TEST_TIMEOUT_DEVICES) * 2 )) | tee -a $TEST_RESULTS +echo Enough alternate subnet ips: $((alternate_subnet_ip >= (NUM_IPADDR_TEST_DEVICES - NUM_IPADDR_TEST_TIMEOUT_DEVICES) )) | tee -a $TEST_RESULTS +echo Enough ipaddr timeouts: $((ipaddr_timeouts >= NUM_IPADDR_TEST_TIMEOUT_DEVICES)) | tee -a $TEST_RESULTS -echo bin/combine_reports device=9a:02:57:1e:8f:05 from_time=$start_time to_time=$end_time count=2 -bin/combine_reports device=9a:02:57:1e:8f:05 from_time=$start_time to_time=$end_time count=2 +combine_cmd="bin/combine_reports device=9a:02:57:1e:8f:05 from_time=$start_time to_time=$end_time count=2" +echo $combine_cmd +$combine_cmd cat inst/reports/combo_*.md @@ -82,9 +126,15 @@ echo Redacted soak diff | tee -a $TEST_RESULTS if [ -f "$gcp_cred" ]; then mv inst/reports/combo_*.md out/report_local.md - echo Pulling reports from gcp... - bin/combine_reports device=9a:02:57:1e:8f:05 from_time=$start_time to_time=$end_time \ - count=2 from_gcp=true + echo '******Local reports******' + ls -l inst/reports/report_9a02571e8f05*.md + echo '*************************' + + daq_run_id=$(< inst/daq_run_id.txt) + echo Pulling reports from gcp for daq RUN id $daq_run_id + gcp_extras="daq_run_id=$daq_run_id from_gcp=true" + echo $combine_cmd $gcp_extras + $combine_cmd $gcp_extras echo GCP results diff | tee -a $GCP_RESULTS diff inst/reports/combo_*.md out/report_local.md | tee -a $GCP_RESULTS fi diff --git a/testing/test_modules.out b/testing/test_modules.out index ff067a6826..2e2dfadd4d 100644 --- a/testing/test_modules.out +++ b/testing/test_modules.out @@ -26,9 +26,21 @@ RESULT fail security.tls.v1_2.x509 Certificate is expired. RESULT fail security.tls.v1_3 Certificate could not be validated. RESULT fail security.tls.v1_3.x509 Certificate could not be validated. Testing nmap -RESULT pass security.ports.nmap Only allowed ports found open. +RESULT pass security.nmap.ports Only allowed ports found open. +RESULT pass security.nmap.http No running http servers have been found. Testing nmap bacnet -RESULT pass security.ports.nmap Only allowed ports found open. +RESULT pass security.nmap.ports Only allowed ports found open. +RESULT pass security.nmap.http No running http servers have been found. Testing nmap telnet -RESULT fail security.ports.nmap Some disallowed ports are open: 23 +RESULT fail security.nmap.ports Some disallowed ports are open: 23. +RESULT pass security.nmap.http No running http servers have been found. +Testing nmap ohttp +RESULT fail security.nmap.ports Some disallowed ports are open: 443,12345,54321. +RESULT fail security.nmap.http Some ports are running http servers: 12345,54321. +Testing ssh +RESULT skip security.ssh.version Device is not running an SSH server +Testing ssh ssh +RESULT pass security.ssh.version Device only supports SSHv2 +Testing ssh sshv1 +RESULT fail security.ssh.version Device supports SSHv1 Testing complete. diff --git a/testing/test_modules.sh b/testing/test_modules.sh index bc123230e1..711765a9e1 100755 --- a/testing/test_modules.sh +++ b/testing/test_modules.sh @@ -17,6 +17,10 @@ tls alt expiredtls nmap nmap bacnet nmap telnet +nmap ohttp +ssh +ssh ssh +ssh sshv1 EOF DAQ_TARGETS=aardvark,faux1,faux2 bin/docker_build force inline @@ -37,7 +41,5 @@ cat $TEST_LIST | while read module args; do fi done -testing/run_unit_tests.sh || exit 1 - echo echo Testing complete. | tee -a $TEST_RESULTS diff --git a/testing/test_preamble.sh b/testing/test_preamble.sh index 93ca247d89..f2028d089d 100644 --- a/testing/test_preamble.sh +++ b/testing/test_preamble.sh @@ -56,13 +56,20 @@ function redact { -e 's/[0-9]{4}-.*T.*Z/XXX/' \ -e 's/[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2} [A-Z]{3}/XXX/' \ -e 's/[a-zA-Z]{3} [a-zA-Z]{3}\s+[0-9]{1,2} [0-9]{1,2}:[0-9]{1,2}:[0-9]{1,2} [0-9]{4}/XXX/' \ + -e 's/[A-Za-z]{3} [0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}/XXX/' \ -e 's/[0-9]{4}-(0|1)[0-9]-(0|1|2|3)[0-9] [0-9]{2}:[0-9]{2}:[0-9]{2}(\+00:00)?/XXX/g' \ -e 's/[0-9]+\.[0-9]{2} seconds/XXX/' \ -e 's/0\.[0-9]+s latency/XXX/' \ -e 's/open\|filtered/closed/' \ -e 's/DAQ version.*//' \ + -e 's/Seq Index.*//' \ + -e 's/Ignored State.*//' \ -e 's/Not shown: .* ports//' \ + -e 's/[ \t]*$//' \ + -e 's/\t/ /g' \ -e 's/([0-9]{1,3}\.){3}[0-9]{1,3}/X.X.X.X/' + + # NOTE: Whitespace redaction (\t) is because many IDEs automatically strip/convert tabs to spaces. } function monitor_log { diff --git a/testing/test_topo.out b/testing/test_topo.out index da2f1ed03b..a2d2414c41 100644 --- a/testing/test_topo.out +++ b/testing/test_topo.out @@ -2,7 +2,6 @@ Running testing/test_topo.sh Topology Tests mudacl tests Mudacl exit code 0 -Validator exit code 0 Running open 3 check_socket 01 02 1 1 check_socket 02 01 1 1 diff --git a/testing/test_topo.sh b/testing/test_topo.sh index 82da4d6000..d802e73483 100755 --- a/testing/test_topo.sh +++ b/testing/test_topo.sh @@ -4,14 +4,9 @@ source testing/test_preamble.sh echo Topology Tests >> $TEST_RESULTS -# Test the mudacl config and the test_schema to make sure they -# make sense for tests that use them - echo mudacl tests | tee -a $TEST_RESULTS mudacl/bin/test.sh echo Mudacl exit code $? | tee -a $TEST_RESULTS -validator/bin/test_schema -echo Validator exit code $? | tee -a $TEST_RESULTS bacnet_file=/tmp/bacnet_result.txt socket_file=/tmp/socket_result.txt diff --git a/testing/test_utils.sh b/testing/test_utils.sh index 26d0e1331d..9d06feea09 100644 --- a/testing/test_utils.sh +++ b/testing/test_utils.sh @@ -120,9 +120,9 @@ function run_test { test -d $conf_dir || (mkdir -p $conf_dir; echo sleep 30 >> $cmd_file) done cmd/run -s - cat inst/run-port-*/nodes/ping*${socket_file} | tee -a $TEST_RESULTS - cat inst/run-port-*/nodes/ping*${bacnet_file} | tee -a $TEST_RESULTS - more inst/run-port-*/nodes/ping*/activate.log | cat + cat inst/run-*/nodes/ping*${socket_file} | tee -a $TEST_RESULTS + cat inst/run-*/nodes/ping*${bacnet_file} | tee -a $TEST_RESULTS + more inst/run-*/nodes/ping*/activate.log | cat more inst/gw0*/nodes/gw0*/activate.log | cat more inst/gw0*/dhcp_monitor.txt | cat } diff --git a/testing/unit/mock/test_combine_reports_from_date_range/report_1.json b/testing/unit/mock/test_combine_reports/report_1.json similarity index 100% rename from testing/unit/mock/test_combine_reports_from_date_range/report_1.json rename to testing/unit/mock/test_combine_reports/report_1.json diff --git a/testing/unit/mock/test_combine_reports_from_date_range/report_2.json b/testing/unit/mock/test_combine_reports/report_2.json similarity index 100% rename from testing/unit/mock/test_combine_reports_from_date_range/report_2.json rename to testing/unit/mock/test_combine_reports/report_2.json diff --git a/testing/unit/test_combine_reports_from_date_range.py b/testing/unit/test_combine_reports.py similarity index 91% rename from testing/unit/test_combine_reports_from_date_range.py rename to testing/unit/test_combine_reports.py index 4f62f6a872..d7b944de40 100644 --- a/testing/unit/test_combine_reports_from_date_range.py +++ b/testing/unit/test_combine_reports.py @@ -1,4 +1,4 @@ -"""Unit tests for combine_reports_from_date_range""" +"""Unit tests for combine_reports""" import unittest import sys @@ -8,8 +8,8 @@ from unittest.mock import MagicMock, mock_open, patch import daq -import combine_reports_from_date_range -from combine_reports_from_date_range import _render_results, main, os +import combine_reports +from combine_reports import _render_results, main, os from daq.report import MdTable @@ -18,7 +18,7 @@ class TestCombineReportsFromDateRange(unittest.TestCase): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.script_path = os.path.dirname(os.path.realpath(__file__)) - mock_path = os.path.join(self.script_path, 'mock', 'test_combine_reports_from_date_range') + mock_path = os.path.join(self.script_path, 'mock', 'test_combine_reports') self.mocks = {'empty': ''} for filename in os.listdir(mock_path): with open(os.path.join(mock_path, filename)) as f: @@ -93,7 +93,7 @@ def custom_open(report, mode=None): raise Exception(report + ' is not expected') return mock_open(read_data=self.mocks[mock_name]).return_value - combine_reports_from_date_range._render_results = MagicMock(return_value="fake results") + combine_reports._render_results = MagicMock(return_value="fake results") with patch("builtins.open", new=custom_open): main('device1', start=datetime.fromisoformat('2020-05-29')) expected_results = { @@ -110,8 +110,8 @@ def custom_open(report, mode=None): 'report_1_timestamp': True, 'report_2_timestamp': True }} - combine_reports_from_date_range._render_results.assert_called() - call_args = combine_reports_from_date_range._render_results.call_args[0][0] + combine_reports._render_results.assert_called() + call_args = combine_reports._render_results.call_args[0][0] assert self._dict_compare(call_args, expected_results) diff --git a/testing/unit/test_runner.py b/testing/unit/test_runner.py index bdc0f813a1..70e285816b 100644 --- a/testing/unit/test_runner.py +++ b/testing/unit/test_runner.py @@ -43,16 +43,14 @@ def setUp(self): def test_reap_stale_ports(self): """Test port flap timeout config override""" self.runner.target_set_error = MagicMock() - self.runner._port_info = {1: PortInfo()} + device = self.runner._devices.new_device("0000000000", None) self.runner._reap_stale_ports() self.runner.target_set_error.assert_not_called() ConnectedHost.__init__ = MagicMock(return_value=None) host = ConnectedHost() host.test_name = "test_test" - port_info = PortInfo() - port_info.flapping_start = time.time() - 1 - port_info.host = host - self.runner._port_info = {1: port_info} + device.port.flapping_start = time.time() - 1 + device.host = host host.get_port_flap_timeout = MagicMock(return_value=10000) self.runner._reap_stale_ports() diff --git a/topology/alta-dev/faucet.yaml b/topology/alta-dev/faucet.yaml deleted file mode 100644 index 0dcd804b7e..0000000000 --- a/topology/alta-dev/faucet.yaml +++ /dev/null @@ -1,92 +0,0 @@ -dps: - us-mtv-900-t1sw2-0-1: - dp_id: 147058200621 - faucet_dp_mac: 0e:00:00:00:01:01 - hardware: GenericTFM - interfaces: - 9: - lldp_beacon: {enable: true} - lldp_peer_mac: 0e:00:00:00:02:01 - tagged_vlans: [171] - receive_lldp: true - 10: - lldp_beacon: {enable: true} - lldp_peer_mac: 0e:00:00:00:02:02 - tagged_vlans: [171] - receive_lldp: true - 28: - description: Juniper-Uplink-1 - lacp: 3 - lacp_passthrough: [9, 10] - lldp_beacon: {enable: true} - native_vlan: 171 - receive_lldp: true - lldp_beacon: {max_per_interval: 5, send_interval: 5} - use_hard_timeout: true - us-mtv-900-t1sw2-0-2: - dp_id: 147058200561 - faucet_dp_mac: 0e:00:00:00:01:02 - hardware: GenericTFM - interfaces: - 9: - lldp_beacon: {enable: true} - lldp_peer_mac: 0e:00:00:00:02:01 - tagged_vlans: [171] - receive_lldp: true - 10: - lldp_beacon: {enable: true} - lldp_peer_mac: 0e:00:00:00:02:02 - tagged_vlans: [171] - receive_lldp: true - 28: - description: Juniper-Uplink-2 - lacp: 3 - lacp_passthrough: [9, 10] - lldp_beacon: {enable: true} - native_vlan: 171 - receive_lldp: true - lldp_beacon: {max_per_interval: 5, send_interval: 5} - use_hard_timeout: true - us-mtv-900-t2sw2-0-1: - dp_id: 246406200719452 - faucet_dp_mac: 0e:00:00:00:02:01 - hardware: Allied-Telesis - interface_ranges: - 1-46: {description: IoT Host, native_vlan: 171} - interfaces: - 47: - lldp_beacon: {enable: true} - lldp_failover: 48 - loop_protect_external: true - tagged_vlans: [171] - receive_lldp: true - 48: - lldp_beacon: {enable: true} - loop_protect_external: true - tagged_vlans: [171] - receive_lldp: true - lldp_beacon: {max_per_interval: 5, send_interval: 5} - use_hard_timeout: true - us-mtv-900-t2sw2-0-2: - dp_id: 246406200719346 - faucet_dp_mac: 0e:00:00:00:02:02 - hardware: Allied-Telesis - interface_ranges: - 1-46: {description: IoT Host, native_vlan: 171} - interfaces: - 47: - lldp_beacon: {enable: true} - loop_protect_external: true - tagged_vlans: [171] - receive_lldp: true - 48: - lldp_beacon: {enable: true} - lldp_failover: 47 - loop_protect_external: true - tagged_vlans: [171] - receive_lldp: true - lldp_beacon: {max_per_interval: 5, send_interval: 5} - use_hard_timeout: true -version: 2 -vlans: - 171: {description: BOS-IOT} diff --git a/topology/alta-dev/gauge.yaml b/topology/alta-dev/gauge.yaml deleted file mode 100644 index bf0f0e0f1e..0000000000 --- a/topology/alta-dev/gauge.yaml +++ /dev/null @@ -1,14 +0,0 @@ -dbs: - prometheus: {prometheus_addr: 0.0.0.0, prometheus_port: 9303, type: prometheus} -faucet_configs: [/etc/faucet/faucet.yaml] -watchers: - flow_table: - db: prometheus - dps: [us-mtv-900-t1sw2-0-1, us-mtv-900-t2sw2-0-1, us-mtv-900-t1sw2-0-2, us-mtv-900-t2sw2-0-2] - interval: 10 - type: flow_table - port_stats: - db: prometheus - dps: [us-mtv-900-t1sw2-0-1, us-mtv-900-t2sw2-0-1, us-mtv-900-t1sw2-0-2, us-mtv-900-t2sw2-0-2] - interval: 10 - type: port_stats diff --git a/topology/normalize.sh b/topology/normalize.sh deleted file mode 100755 index 71fb4da312..0000000000 --- a/topology/normalize.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/.. -if [ ! -d "$1" ]; then - echo $0 [topology dir] - false -fi - -TDIR=$(realpath $1) - -$ROOT/bin/generate_topology raw_topo=$TDIR topo_dir=$TDIR - diff --git a/topology/setup.json b/topology/setup.json deleted file mode 100644 index 7206b7cfdf..0000000000 --- a/topology/setup.json +++ /dev/null @@ -1,69 +0,0 @@ -{ - 'faucet_yaml': '/etc/faucet/faucet.yaml', - 'faucet_dp_mac_format': '0e:00:00:00:%02x:%02x', - 'lacp_timeout': 5, - 'default_hardware': 'GenericTFM', - 'egress_description': 'egress', - 'combinatorial_port_flood': true, - 'naming': { - 'tier1': '-t1sw', - 'tier2': '-t2sw', - 'control': '-ctr' - }, - 'device_description': 'IoT Device', - 'vlan': { - 'description': 'Faucet IoT', - 'name': 'Faucet_IoT' - }, - 'gauge': { - 'db_type': 'prometheus', - 'interval': 10 - }, - 'db_types': { - 'prometheus': { - 'prometheus_addr': '0.0.0.0', - 'prometheus_port': 9303, - 'type': 'prometheus' - } - }, - 'receive_lldp': true, - 'switch_lldp_beacon': { - 'max_per_interval': 5, - 'send_interval': 5 - }, - 'port_lldp_beacon': { - 'enable': true - }, - 'loop_protect_external': true, - "pre_acls": [ - { - "description": "ICMP Allow", - "nw_proto": 1 - }, - { - "description": "ARP Allow", - "dl_type": "0x0806" - }, - { - "description": "DHCP Allow", - "udp_src": 68, - "udp_dst": 67 - }, - { - "description": "DNS Allow", - "udp_dst": 53 - }, - { - "description": "DHCP Broadcast", - "dl_dst": "ff:ff:ff:ff:ff:ff", - "udp_src": 68, - "udp_dst": 67 - } - ], - "post_acls": [ - { - "description": "Default Deny", - "allow": false - } - ] -} diff --git a/usi/.gitignore b/usi/.gitignore new file mode 100644 index 0000000000..4b12f8ab84 --- /dev/null +++ b/usi/.gitignore @@ -0,0 +1,3 @@ +tmp/* +target/* +.idea/* diff --git a/usi/Dockerfile.usi b/usi/Dockerfile.usi new file mode 100644 index 0000000000..4fb6601310 --- /dev/null +++ b/usi/Dockerfile.usi @@ -0,0 +1,11 @@ +FROM daqf/aardvark:latest + +# Do this alone first so it can be re-used by other build files. + +RUN $AG update && $AG install openjdk-11-jdk git maven + +COPY usi/ usi/ + +RUN cd usi && mvn clean compile assembly:single + +CMD ["./usi/start"] diff --git a/usi/build.conf b/usi/build.conf new file mode 100644 index 0000000000..d469dd0503 --- /dev/null +++ b/usi/build.conf @@ -0,0 +1,2 @@ +build usi +add usi diff --git a/usi/pom.xml b/usi/pom.xml new file mode 100644 index 0000000000..0b3ff43238 --- /dev/null +++ b/usi/pom.xml @@ -0,0 +1,146 @@ + + 4.0.0 + com.redstone + usi + 0.0.1 +jar + usi + + UTF-8 + 1.8 + 1.8 + + + + + + io.grpc + grpc-bom + 1.31.1 + pom + import + + + + + + + junit + junit + 4.13 + test + + + commons-net + commons-net + 3.7 + + + io.grpc + grpc-netty-shaded + 1.31.1 + + + io.grpc + grpc-protobuf + 1.31.1 + + + io.grpc + grpc-stub + 1.31.1 + + + org.apache.tomcat + annotations-api + 6.0.53 + provided + + + org.junit.jupiter + junit-jupiter + 5.6.2 + compile + + + + + + kr.motd.maven + os-maven-plugin + 1.6.2 + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.6.1 + + com.google.protobuf:protoc:3.12.0:exe:${os.detected.classifier} + grpc-java + io.grpc:protoc-gen-grpc-java:1.30.0:exe:${os.detected.classifier} + + ${basedir}/src/main/proto + + + + + + compile + compile-custom + + + + + + maven-assembly-plugin + + + + daq.usi.UsiServer + + + + jar-with-dependencies + + + + + maven-clean-plugin + 3.1.0 + + + + maven-resources-plugin + 3.2.0 + + + maven-surefire-plugin + 2.22.2 + + + maven-jar-plugin + 3.2.0 + + + maven-install-plugin + 2.5.2 + + + maven-deploy-plugin + 2.8.2 + + + org.apache.maven.plugins + maven-compiler-plugin + 3.8.1 + + 9 + 9 + + + + + \ No newline at end of file diff --git a/usi/src/main/java/daq/usi/BaseSwitchController.java b/usi/src/main/java/daq/usi/BaseSwitchController.java new file mode 100644 index 0000000000..dc2f736d37 --- /dev/null +++ b/usi/src/main/java/daq/usi/BaseSwitchController.java @@ -0,0 +1,181 @@ +package daq.usi; + +import java.util.HashMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + + +public abstract class BaseSwitchController implements SwitchController { + /** + * Terminal Prompt ends with '#' when enabled, '>' when not enabled. + */ + public static final String CONSOLE_PROMPT_ENDING_ENABLED = "#"; + public static final String CONSOLE_PROMPT_ENDING_LOGIN = ">"; + public static final int TELNET_PORT = 23; + + // Define Common Variables Required for All Switch Interrogators + protected SwitchTelnetClientSocket telnetClientSocket; + protected Thread telnetClientSocketThread; + protected String remoteIpAddress; + protected boolean debug; + protected String username; + protected String password; + protected boolean userAuthorised = false; + protected boolean userEnabled = false; + protected String hostname = null; + protected boolean commandPending = false; + + public BaseSwitchController(String remoteIpAddress, String username, + String password) { + this(remoteIpAddress, username, password, false); + } + + /** + * Abstract Switch controller. Override this class for switch specific implementation + * + * @param remoteIpAddress switch ip address + * @param username switch username + * @param password switch password + * @param debug for verbose logging + */ + public BaseSwitchController( + String remoteIpAddress, String username, String password, boolean debug) { + this.remoteIpAddress = remoteIpAddress; + this.username = username; + this.password = password; + this.debug = debug; + telnetClientSocket = + new SwitchTelnetClientSocket(remoteIpAddress, TELNET_PORT, this, debug); + } + + /** + * Map a simple table containing a header and 1 row of data to a hashmap + * This method will also attempt to correct for mis-aligned tabular data as well as empty + * columns values. + * + * @param rawPacket Raw table response from a switch command + * @param colNames Array containing the names of the columns in the response + * @param mapNames Array containing names key names to map values to + * @return A HashMap containing the values mapped to the key names provided in the mapNames array + */ + protected static HashMap mapSimpleTable( + String rawPacket, String[] colNames, String[] mapNames) { + HashMap colMap = new HashMap<>(); + String[] lines = rawPacket.split("\n"); + int headerLine = 0; + if (lines.length >= headerLine + 2) { + String header = lines[headerLine].trim(); + String values = lines[headerLine + 1].trim(); + int lastSectionEnd = 0; + for (int i = 0; i < colNames.length; i++) { + int secStart = lastSectionEnd; + int secEnd; + if ((i + 1) >= colNames.length) { + // Resolving last column + secEnd = values.length(); + } else { + // Tabular data is not always reported in perfectly alignment, we need to calculate the + // correct values based off of the sections in between white spaces + int firstWhiteSpace = + getFirstWhiteSpace(values.substring(lastSectionEnd)) + lastSectionEnd; + // Wrong table header line + if (firstWhiteSpace < 0) { + headerLine++; + break; + } + int lastWhiteSpace = + getIndexOfNonWhitespaceAfterWhitespace(values.substring(firstWhiteSpace)) + + firstWhiteSpace; + int nextHeaderStart = header.indexOf(colNames[i + 1]); + if (nextHeaderStart >= firstWhiteSpace) { + secEnd = nextHeaderStart; + } else { + secEnd = Math.max(lastWhiteSpace, nextHeaderStart); + } + } + lastSectionEnd = secEnd; + // \u00A0 is non-breaking space which trim ignores. + String rawString = values.substring(secStart, secEnd) + .replace('\u00A0', ' ').trim(); + colMap.put(mapNames[i], rawString); + } + } + return colMap; + } + + private static int getFirstWhiteSpace(String string) { + char[] characters = string.toCharArray(); + for (int i = 0; i < string.length(); i++) { + if (Character.isWhitespace(characters[i])) { + return i; + } + } + return -1; + } + + private static int getIndexOfNonWhitespaceAfterWhitespace(String string) { + char[] characters = string.toCharArray(); + boolean lastWhitespace = false; + for (int i = 0; i < string.length(); i++) { + if (Character.isWhitespace(characters[i])) { + lastWhitespace = true; + } else if (lastWhitespace) { + return i; + } + } + return -1; + } + + protected boolean containsPrompt(String consoleData) { + // Prompts usually hostname# or hostname(config)# + Pattern r = Pattern.compile(hostname + "\\s*(\\(.+\\))?" + CONSOLE_PROMPT_ENDING_ENABLED, 'g'); + Matcher m = r.matcher(consoleData); + return m.find(); + } + + protected boolean promptReady(String consoleData) { + // Prompts usually hostname# or hostname(config)# + Pattern r = Pattern.compile(hostname + "\\s*(\\(.+\\))?" + CONSOLE_PROMPT_ENDING_ENABLED + "$"); + Matcher m = r.matcher(consoleData); + return m.find(); + } + + /** + * Receive the raw data packet from the telnet connection and process accordingly. + * + * @param consoleData Most recent data read from the telnet socket buffer + */ + public void receiveData(String consoleData) { + if (debug) { + System.out.println( + java.time.LocalTime.now() + " receivedData:\t" + consoleData); + } + if (consoleData != null) { + try { + consoleData = consoleData.trim(); + if (!userAuthorised) { + handleLoginMessage(consoleData); + } else if (!userEnabled) { + handleEnableMessage(consoleData); + } else { + parseData(consoleData); + } + } catch (Exception e) { + telnetClientSocket.disposeConnection(); + e.printStackTrace(); + } + } + } + + protected abstract void parseData(String consoleData) throws Exception; + + protected abstract void handleLoginMessage(String consoleData) throws Exception; + + protected abstract void handleEnableMessage(String consoleData) throws Exception; + + @Override + public void start() { + telnetClientSocketThread = new Thread(telnetClientSocket); + telnetClientSocketThread.start(); + } +} diff --git a/usi/src/main/java/daq/usi/ResponseHandler.java b/usi/src/main/java/daq/usi/ResponseHandler.java new file mode 100644 index 0000000000..4fd96af577 --- /dev/null +++ b/usi/src/main/java/daq/usi/ResponseHandler.java @@ -0,0 +1,5 @@ +package daq.usi; + +public interface ResponseHandler { + void receiveData(T data) throws Exception; +} diff --git a/usi/src/main/java/daq/usi/SwitchController.java b/usi/src/main/java/daq/usi/SwitchController.java new file mode 100644 index 0000000000..82ae4ce663 --- /dev/null +++ b/usi/src/main/java/daq/usi/SwitchController.java @@ -0,0 +1,21 @@ +package daq.usi; + +import grpc.InterfaceResponse; +import grpc.PowerResponse; +import grpc.SwitchActionResponse; + +public interface SwitchController { + + void getPower(int devicePort, ResponseHandler handler) throws Exception; + + void getInterface(int devicePort, ResponseHandler handler) + throws Exception; + + void connect(int devicePort, ResponseHandler handler) + throws Exception; + + void disconnect(int devicePort, ResponseHandler handler) + throws Exception; + + void start(); +} diff --git a/subset/switches/src/main/java/switchtest/SwitchTelnetClientSocket.java b/usi/src/main/java/daq/usi/SwitchTelnetClientSocket.java similarity index 64% rename from subset/switches/src/main/java/switchtest/SwitchTelnetClientSocket.java rename to usi/src/main/java/daq/usi/SwitchTelnetClientSocket.java index 5ba215537a..0530f44022 100644 --- a/subset/switches/src/main/java/switchtest/SwitchTelnetClientSocket.java +++ b/usi/src/main/java/daq/usi/SwitchTelnetClientSocket.java @@ -1,21 +1,4 @@ -package switchtest; - -/* - * Licensed to the Apache Software Foundation (ASF) under one or more - * contributor license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright ownership. - * The ASF licenses this file to You under the Apache License, Version 2.0 - * (the "License"); you may not use this file except in compliance with - * the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ +package daq.usi; import java.io.IOException; import java.io.InputStream; @@ -31,9 +14,15 @@ import org.apache.commons.net.telnet.TelnetNotificationHandler; import org.apache.commons.net.telnet.TerminalTypeOptionHandler; -public abstract class SwitchTelnetClientSocket implements TelnetNotificationHandler, Runnable { - protected TelnetClient telnetClient = null; - protected SwitchInterrogator interrogator; +public class SwitchTelnetClientSocket implements TelnetNotificationHandler, Runnable { + public static String MORE_INDICATOR = "--More--"; + + protected static final int SLEEP_MS = 100; + // Rx empty space timeout before sending \n + protected static final int MAX_EMPTY_WAIT_COUNT = 70; + + protected TelnetClient telnetClient; + protected BaseSwitchController interrogator; protected String remoteIpAddress = ""; protected int remotePort = 23; @@ -46,10 +35,17 @@ public abstract class SwitchTelnetClientSocket implements TelnetNotificationHand protected Thread readerThread; protected Thread gatherThread; - protected boolean debug = false; + protected boolean debug; + /** + * Telnet Client. + * @param remoteIpAddress switch ip address + * @param remotePort telent port + * @param interrogator switch specific switch controller + * @param debug For more verbose output. + */ public SwitchTelnetClientSocket( - String remoteIpAddress, int remotePort, SwitchInterrogator interrogator, boolean debug) { + String remoteIpAddress, int remotePort, BaseSwitchController interrogator, boolean debug) { this.remoteIpAddress = remoteIpAddress; this.remotePort = remotePort; this.interrogator = interrogator; @@ -71,7 +67,7 @@ protected void connectTelnetSocket() { attempts++; try { - Thread.sleep(100); + Thread.sleep(SLEEP_MS); } catch (InterruptedException e) { System.err.println("Exception while connecting:" + e.getMessage()); } @@ -101,18 +97,68 @@ public void run() { outputStream = telnetClient.getOutputStream(); } - protected abstract void gatherData(); + protected void gatherData() { + StringBuilder rxData = new StringBuilder(); + + int rxQueueCount = 0; + + while (telnetClient.isConnected()) { + try { + if (rxQueue.isEmpty()) { + Thread.sleep(SLEEP_MS); + rxQueueCount++; + if (!interrogator.commandPending && rxQueueCount > MAX_EMPTY_WAIT_COUNT) { + if (debug) { + System.out.println("rxQueue Empty. Sending new line."); + } + rxQueueCount = 0; + writeData("\n"); + } + continue; + } + rxQueueCount = 0; + while (rxQueue.peek().trim() == "") { + rxQueue.poll(); + } + String rxTemp = rxQueue.poll(); + if (rxTemp.indexOf(MORE_INDICATOR) > 0) { + writeData("\n"); + if (debug) { + System.out.println("more position:" + rxTemp.indexOf(MORE_INDICATOR)); + System.out.println("Data: " + rxTemp); + } + rxTemp = rxTemp.replace(MORE_INDICATOR, ""); + rxData.append(rxTemp); + } else if ((interrogator.userAuthorised && interrogator.userEnabled) + && !interrogator.promptReady((rxData.toString() + rxTemp).trim())) { + rxData.append(rxTemp); + if (debug) { + System.out.println("Waiting for more data till prompt ready: "); + System.out.println(rxData.toString().trim()); + } + } else { + rxQueueCount = 0; + rxData.append(rxTemp); + String rxGathered = rxData.toString().trim(); + rxData = new StringBuilder(); + interrogator.receiveData(rxGathered); + } + } catch (InterruptedException e) { + System.err.println("InterruptedException gatherData:" + e.getMessage()); + } + } + } /** * * Callback method called when TelnetClient receives an option negotiation command. * - * @param negotiation_code - type of negotiation command received (RECEIVED_DO, RECEIVED_DONT, - * RECEIVED_WILL, RECEIVED_WONT, RECEIVED_COMMAND) - * @param option_code - code of the option negotiated * + * @param negotiationCode - type of negotiation command received (RECEIVED_DO, RECEIVED_DONT, + * RECEIVED_WILL, RECEIVED_WONT, RECEIVED_COMMAND) + * @param optionCode - code of the option negotiated * */ - public void receivedNegotiation(int negotiation_code, int option_code) { + public void receivedNegotiation(int negotiationCode, int optionCode) { String command = null; - switch (negotiation_code) { + switch (negotiationCode) { case TelnetNotificationHandler.RECEIVED_DO: command = "DO"; break; @@ -129,10 +175,10 @@ public void receivedNegotiation(int negotiation_code, int option_code) { command = "COMMAND"; break; default: - command = Integer.toString(negotiation_code); // Should not happen + command = Integer.toString(negotiationCode); // Should not happen break; } - System.out.println("Received " + command + " for option code " + option_code); + System.out.println("Received " + command + " for option code " + optionCode); } private void addOptionHandlers() { @@ -141,13 +187,13 @@ private void addOptionHandlers() { EchoOptionHandler echoOptionHandler = new EchoOptionHandler(false, false, false, false); - SuppressGAOptionHandler suppressGAOptionHandler = + SuppressGAOptionHandler suppressGaOptionHandler = new SuppressGAOptionHandler(true, true, true, true); try { telnetClient.addOptionHandler(terminalTypeOptionHandler); telnetClient.addOptionHandler(echoOptionHandler); - telnetClient.addOptionHandler(suppressGAOptionHandler); + telnetClient.addOptionHandler(suppressGaOptionHandler); } catch (InvalidTelnetOptionException e) { System.err.println( "Error registering option handlers InvalidTelnetOptionException: " + e.getMessage()); @@ -157,12 +203,10 @@ private void addOptionHandlers() { } private String normalizeLineEnding(byte[] bytes, char endChar) { - String data = new String(bytes); - List bytesBuffer = new ArrayList(); int countBreak = 0; - int countESC = 0; + int countEsc = 0; for (int i = 0; i < bytes.length; i++) { if (bytes[i] != 0) { @@ -182,13 +226,13 @@ private String normalizeLineEnding(byte[] bytes, char endChar) { break; case 27: // escape \x1B - countESC = 2; + countEsc = 2; break; case 33: // character:! break; default: - if (countESC == 0) { + if (countEsc == 0) { if (countBreak > 1) { int size = bytesBuffer.size(); for (int x = 0; x < countBreak - 1; x++) { @@ -198,7 +242,7 @@ private String normalizeLineEnding(byte[] bytes, char endChar) { } bytesBuffer.add(bytes[i]); } else { - countESC--; + countEsc--; } break; } @@ -231,7 +275,7 @@ protected void readData() { // rxQueue.add(new String(buffer, 0, bytesRead, StandardCharsets.UTF_8)); } else { try { - Thread.sleep(100); + Thread.sleep(SLEEP_MS); } catch (InterruptedException e) { System.err.println("InterruptedException readData:" + e.getMessage()); } @@ -243,25 +287,21 @@ protected void readData() { } public void writeData(String data) { - Runnable runnable = - () -> { - writeOutputStream(data); - }; - Thread writeThread = new Thread(runnable); - writeThread.start(); + writeOutputStream(data); } private void writeOutputStream(String data) { try { outputStream.write(data.getBytes()); - // Useful for debugging - // outputStream.write(data.getBytes(StandardCharsets.UTF_8)); outputStream.flush(); } catch (IOException e) { System.err.println("Exception while writing socket:" + e.getMessage()); } } + /** + * Closes telnet connection. + */ public void disposeConnection() { try { telnetClient.disconnect(); diff --git a/usi/src/main/java/daq/usi/UsiImpl.java b/usi/src/main/java/daq/usi/UsiImpl.java new file mode 100644 index 0000000000..a8dd1e87db --- /dev/null +++ b/usi/src/main/java/daq/usi/UsiImpl.java @@ -0,0 +1,138 @@ +package daq.usi; + +import daq.usi.allied.AlliedTelesisX230; +import daq.usi.cisco.Cisco9300; +import daq.usi.ovs.OpenVSwitch; +import grpc.InterfaceResponse; +import grpc.PowerResponse; +import grpc.SwitchActionResponse; +import grpc.SwitchInfo; +import grpc.USIServiceGrpc; +import io.grpc.stub.StreamObserver; +import java.util.HashMap; +import java.util.Map; + +public class UsiImpl extends USIServiceGrpc.USIServiceImplBase { + private final Map switchControllers; + private final boolean debug; + + /** + * UsiImpl. + * + * @param debug for verbose output + */ + public UsiImpl(boolean debug) { + super(); + switchControllers = new HashMap<>(); + this.debug = debug; + } + + private SwitchController createController(SwitchInfo switchInfo) { + SwitchController newController; + switch (switchInfo.getModel()) { + case ALLIED_TELESIS_X230: { + newController = new AlliedTelesisX230(switchInfo.getIpAddr(), switchInfo.getUsername(), + switchInfo.getPassword(), debug); + break; + } + case CISCO_9300: { + newController = new Cisco9300(switchInfo.getIpAddr(), switchInfo.getUsername(), + switchInfo.getPassword(), debug); + break; + } + case OVS_SWITCH: { + newController = new OpenVSwitch(); + break; + } + default: + throw new IllegalArgumentException("Unrecognized switch model " + switchInfo.getModel()); + } + newController.start(); + return newController; + } + + private SwitchController getSwitchController(SwitchInfo switchInfo) { + String repr = String.join(",", switchInfo.getModel().toString(), switchInfo.getIpAddr(), + switchInfo.getUsername(), + switchInfo.getPassword()); + return switchControllers.computeIfAbsent(repr, key -> createController(switchInfo)); + } + + @Override + public void getPower(SwitchInfo request, StreamObserver responseObserver) { + System.out.println("Received request in getPower"); + SwitchController sc = getSwitchController(request); + try { + sc.getPower(request.getDevicePort(), data -> { + System.out.println("Sent response in getPower"); + if (debug) { + System.out.println(data); + } + System.out.println("Received request in getPower"); + responseObserver.onNext(data); + responseObserver.onCompleted(); + }); + } catch (Exception e) { + e.printStackTrace(); + responseObserver.onError(e); + } + } + + @Override + public void getInterface(SwitchInfo request, StreamObserver responseObserver) { + System.out.println("Received request in getInterface"); + SwitchController sc = getSwitchController(request); + try { + sc.getInterface(request.getDevicePort(), data -> { + System.out.println("Sent response in getInterface"); + if (debug) { + System.out.println(data); + } + responseObserver.onNext(data); + responseObserver.onCompleted(); + }); + } catch (Exception e) { + e.printStackTrace(); + responseObserver.onError(e); + } + } + + @Override + public void connect(SwitchInfo request, StreamObserver responseObserver) { + System.out.println("Received request in connect"); + SwitchController sc = getSwitchController(request); + try { + sc.connect(request.getDevicePort(), data -> { + System.out.println("Sent response in connect"); + if (debug) { + System.out.println(data); + } + responseObserver.onNext(data); + responseObserver.onCompleted(); + }); + } catch (Exception e) { + e.printStackTrace(); + responseObserver.onError(e); + } + } + + @Override + public void disconnect(SwitchInfo request, + StreamObserver responseObserver) { + System.out.println("Received request in disconnect"); + SwitchController sc = getSwitchController(request); + try { + sc.disconnect(request.getDevicePort(), data -> { + System.out.println("Sent response in disconnect"); + if (debug) { + System.out.println(data); + } + responseObserver.onNext(data); + responseObserver.onCompleted(); + }); + } catch (Exception e) { + e.printStackTrace(); + responseObserver.onError(e); + } + } +} diff --git a/usi/src/main/java/daq/usi/UsiServer.java b/usi/src/main/java/daq/usi/UsiServer.java new file mode 100644 index 0000000000..83fd51325e --- /dev/null +++ b/usi/src/main/java/daq/usi/UsiServer.java @@ -0,0 +1,63 @@ +package daq.usi; + +import io.grpc.Server; +import io.grpc.ServerBuilder; +import java.io.IOException; +import java.util.concurrent.TimeUnit; + +public class UsiServer { + private Server server; + + private void start(boolean debug) throws IOException { + /* The port on which the server should run */ + int port = 5000; + server = ServerBuilder.forPort(port) + .addService(new UsiImpl(debug)) + .build() + .start(); + if (debug) { + System.out.println("************DEBUG MODE************"); + } + System.out.println("Server started, listening on " + port); + Runtime.getRuntime().addShutdownHook(new Thread() { + @Override + public void run() { + // Use stderr here since the logger may have been reset by its JVM shutdown hook. + System.err.println("*** shutting down gRPC server since JVM is shutting down"); + try { + UsiServer.this.stop(); + } catch (InterruptedException e) { + e.printStackTrace(System.err); + } + System.err.println("*** server shut down"); + } + }); + } + + private void stop() throws InterruptedException { + if (server != null) { + server.shutdown().awaitTermination(30, TimeUnit.SECONDS); + } + } + + /** + * Await termination on the main thread since the grpc library uses daemon threads. + */ + private void blockUntilShutdown() throws InterruptedException { + if (server != null) { + server.awaitTermination(); + } + } + + /** + * Main method. + * @param args "debug". + * @throws Exception Maybe a refactor is needed to throw more specific exceptions. + */ + public static void main(String[] args) throws Exception { + final boolean debug = args.length > 0 ? args[0].equals("debug") : false; + final UsiServer server = new UsiServer(); + server.start(debug); + server.blockUntilShutdown(); + } +} diff --git a/usi/src/main/java/daq/usi/allied/AlliedTelesisX230.java b/usi/src/main/java/daq/usi/allied/AlliedTelesisX230.java new file mode 100644 index 0000000000..affc02d2da --- /dev/null +++ b/usi/src/main/java/daq/usi/allied/AlliedTelesisX230.java @@ -0,0 +1,287 @@ +package daq.usi.allied; + +import daq.usi.BaseSwitchController; +import daq.usi.ResponseHandler; +import grpc.InterfaceResponse; +import grpc.LinkStatus; +import grpc.POENegotiation; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import grpc.SwitchActionResponse; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Queue; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + + +public class AlliedTelesisX230 extends BaseSwitchController { + private static final String[] powerExpected = + {"dev_interface", "admin", "pri", "oper", "power", "device", "dev_class", "max"}; + private static final String[] showPowerExpected = + {"Interface", "Admin", "Pri", "Oper", "Power", "Device", "Class", "Max"}; + private static final Map poeStatusMap = Map.of("Powered", + POEStatus.State.ON, "Off", POEStatus.State.OFF, + "Fault", POEStatus.State.FAULT, "Deny", POEStatus.State.DENY); + // TODO Not certain about AT power "Deny" status string. Can't find a device to produce that state + private static final Map poeSupportMap = Map.of("Enabled", + POESupport.State.ENABLED, "Disabled", POESupport.State.DISABLED); + private static final Map poeNegotiationMap = Map.of("Enabled", + POENegotiation.State.ENABLED, "Disabled", POENegotiation.State.DISABLED); + private static final Map interfaceProcessMap = + Map.of(Pattern.compile("Link is (\\w+)"), "link", + Pattern.compile("current duplex (\\w+)"), "duplex", + Pattern.compile("current speed (\\w+)"), "speed"); + + private static final int WAIT_MS = 100; + private ResponseHandler responseHandler; + + /** + * ATX230 Switch Controller. + * + * @param remoteIpAddress switch ip address + * @param user switch username + * @param password switch password + */ + public AlliedTelesisX230( + String remoteIpAddress, + String user, + String password) { + this(remoteIpAddress, user, password, false); + } + + /** + * ATX230 Switch Controller. + * + * @param remoteIpAddress switch ip address + * @param user switch username + * @param password switch password + * @param debug for verbose output + */ + public AlliedTelesisX230( + String remoteIpAddress, + String user, + String password, boolean debug) { + super(remoteIpAddress, user, password, debug); + this.username = user == null ? "manager" : user; + this.password = password == null ? "friend" : password; + commandPending = true; + } + + @Override + protected void parseData(String consoleData) throws Exception { + if (commandPending) { + responseHandler.receiveData(consoleData); + } + } + + /** + * Generic ATX230 Switch command to retrieve the Status of an interface. + */ + private String showIfaceStatusCommand(int interfacePort) { + return "show interface port1.0." + interfacePort; + } + + /** + * Generic ATX230 Switch command to retrieve the Power Status of an interface. Replace asterisk + * with actual port number for complete message. + */ + private String showIfacePowerStatusCommand(int interfacePort) { + return "show power-inline interface port1.0." + interfacePort; + } + + /** + * Port toggle commands. + * + * @param interfacePort port number + * @param enabled for bringing up/down interfacePort + * @return commands + */ + private String[] portManagementCommand(int interfacePort, boolean enabled) { + return new String[] { + "configure terminal", + "interface port1.0." + interfacePort, + (enabled ? "no " : "") + "shutdown", + "end" + }; + } + + + @Override + public void getPower(int devicePort, ResponseHandler handler) throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + String command = showIfacePowerStatusCommand(devicePort); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + synchronized (this) { + commandPending = false; + } + Map powerMap = processPowerStatusInline(data); + handler.receiveData(buildPowerResponse(powerMap)); + }; + telnetClientSocket.writeData(command + "\n"); + } + } + + @Override + public void getInterface(int devicePort, ResponseHandler handler) + throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + String command = showIfaceStatusCommand(devicePort); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + synchronized (this) { + commandPending = false; + } + Map interfaceMap = processInterfaceStatus(data); + handler.receiveData(buildInterfaceResponse(interfaceMap)); + }; + telnetClientSocket.writeData(command + "\n"); + } + } + + private void managePort(int devicePort, ResponseHandler handler, + boolean enabled) throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + Queue commands = + new LinkedList<>(Arrays.asList(portManagementCommand(devicePort, enabled))); + SwitchActionResponse.Builder response = SwitchActionResponse.newBuilder(); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + if (!commands.isEmpty()) { + telnetClientSocket.writeData(commands.poll() + "\n"); + return; + } + synchronized (this) { + commandPending = false; + handler.receiveData(response.setSuccess(true).build()); + } + }; + telnetClientSocket.writeData(commands.poll() + "\n"); + } + } + + @Override + public void connect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, true); + } + + @Override + public void disconnect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, false); + } + + private InterfaceResponse buildInterfaceResponse(Map interfaceMap) { + InterfaceResponse.Builder response = InterfaceResponse.newBuilder(); + String duplex = interfaceMap.getOrDefault("duplex", ""); + int speed = 0; + try { + speed = Integer.parseInt(interfaceMap.getOrDefault("speed", "")); + } catch (NumberFormatException e) { + System.out.println("Could not parse int: " + interfaceMap.get("speed")); + return response.build(); + } + String linkStatus = interfaceMap.getOrDefault("link", ""); + return response + .setLinkStatus(linkStatus.equals("UP") ? LinkStatus.State.UP : LinkStatus.State.DOWN) + .setDuplex(duplex) + .setLinkSpeed(speed) + .build(); + } + + private PowerResponse buildPowerResponse(Map powerMap) { + PowerResponse.Builder response = PowerResponse.newBuilder(); + float maxPower = 0; + float currentPower = 0; + try { + // AT switch may add trailing "[C]" in power output. + String maxPowerString = powerMap.getOrDefault("max", "") + .replaceAll("\\[.*\\]", ""); + maxPower = Float.parseFloat(maxPowerString); + currentPower = Float.parseFloat(powerMap.getOrDefault("power", "")); + } catch (NumberFormatException e) { + System.out.println( + "Could not parse float: " + powerMap.get("max") + " or " + powerMap.get("power")); + } + String poeSupport = powerMap.getOrDefault("admin", ""); + String poeStatus = powerMap.getOrDefault("oper", ""); + return response + .setPoeStatus(poeStatusMap.getOrDefault(poeStatus, POEStatus.State.UNKNOWN)) + .setPoeSupport(poeSupportMap.getOrDefault(poeSupport, POESupport.State.UNKNOWN)) + .setPoeNegotiation(poeNegotiationMap.getOrDefault(poeSupport, POENegotiation.State.UNKNOWN)) + .setMaxPowerConsumption(maxPower) + .setCurrentPowerConsumption(currentPower).build(); + } + + private Map processInterfaceStatus(String response) { + Map interfaceMap = new HashMap<>(); + Arrays.stream(response.split("\n")).filter(s -> !containsPrompt(s)).forEach(s -> { + for (Pattern pattern : interfaceProcessMap.keySet()) { + Matcher m = pattern.matcher(s); + if (m.find()) { + interfaceMap.put(interfaceProcessMap.get(pattern), m.group(1)); + } + } + }); + return interfaceMap; + } + + private Map processPowerStatusInline(String response) { + String filtered = Arrays.stream(response.split("\n")) + .filter(s -> s.trim().length() > 0 + && !s.contains("show power-inline") + && !containsPrompt(s) + && !s.contains("(mW)")) // AT shows mW in second line + .collect(Collectors.joining("\n")); + return mapSimpleTable(filtered, showPowerExpected, powerExpected); + } + + /** + * Handles the process when using the enter command. Enable is a required step before commands can + * be sent to the switch. + * + * @param consoleData Raw console data received the the telnet connection. + */ + public void handleEnableMessage(String consoleData) throws Exception { + if (containsPrompt(consoleData)) { + userEnabled = true; + commandPending = false; + } + } + + /** + * Handles the process when logging into the switch. + * + * @param consoleData Raw console data received the the telnet connection. + */ + public void handleLoginMessage(String consoleData) throws Exception { + if (consoleData.endsWith("login:")) { + telnetClientSocket.writeData(username + "\n"); + } else if (consoleData.contains("Password:")) { + telnetClientSocket.writeData(password + "\n"); + } else if (consoleData.contains(CONSOLE_PROMPT_ENDING_LOGIN)) { + userAuthorised = true; + hostname = consoleData.split(CONSOLE_PROMPT_ENDING_LOGIN)[0]; + telnetClientSocket.writeData("enable\n"); + } else if (consoleData.contains("Login incorrect")) { + telnetClientSocket.disposeConnection(); + throw new Exception("Failed to Login, Bad Password"); + } + } + +} diff --git a/usi/src/main/java/daq/usi/cisco/Cisco9300.java b/usi/src/main/java/daq/usi/cisco/Cisco9300.java new file mode 100644 index 0000000000..20142c6e7a --- /dev/null +++ b/usi/src/main/java/daq/usi/cisco/Cisco9300.java @@ -0,0 +1,312 @@ +package daq.usi.cisco; + +import daq.usi.BaseSwitchController; +import daq.usi.ResponseHandler; +import grpc.InterfaceResponse; +import grpc.LinkStatus; +import grpc.POENegotiation; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import grpc.SwitchActionResponse; +import java.util.Arrays; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.Map; +import java.util.Queue; +import java.util.stream.Collectors; + + +public class Cisco9300 extends BaseSwitchController { + + private static final String[] interfaceExpected = + {"interface", "name", "status", "vlan", "duplex", "speed", "type"}; + private static final String[] showInterfaceExpected = + {"Port", "Name", "Status", "Vlan", "Duplex", "Speed", "Type"}; + private static final Map powerInlineMap = Map.of("Interface", "dev_interface", + "Inline Power Mode", "admin", + "Operational status", "oper", + "Measured at the port", "power", + "Device Type", "device", + "IEEE Class", "dev_class", + "Power available to the device", "max"); + private static final Map poeStatusMap = Map.of("on", + POEStatus.State.ON, "off", POEStatus.State.OFF, "fault", POEStatus.State.FAULT, + "power-deny", POEStatus.State.DENY); + private static final Map poeSupportMap = Map.of("auto", + POESupport.State.ENABLED, "off", POESupport.State.DISABLED); + private static final Map poeNegotiationtMap = Map.of("auto", + POENegotiation.State.ENABLED, "off", POENegotiation.State.DISABLED); + private static final int WAIT_MS = 100; + private ResponseHandler responseHandler; + + /** + * Cisco 9300 Switch Controller. + * + * @param remoteIpAddress switch ip + * @param user switch username + * @param password switch password + */ + public Cisco9300( + String remoteIpAddress, + String user, + String password) { + this(remoteIpAddress, user, password, false); + } + + /** + * Cisco 9300 Switch Controller. + * + * @param remoteIpAddress switch ip + * @param user switch username + * @param password switch password + * @param debug for verbose output + */ + public Cisco9300( + String remoteIpAddress, + String user, + String password, boolean debug) { + super(remoteIpAddress, user, password, debug); + this.username = user == null ? "admin" : user; + this.password = password == null ? "password" : password; + commandPending = true; + } + + /** + * Generic Cisco Switch command to retrieve the Status of an interface. + */ + private String showIfaceStatusCommand(int interfacePort) { + return "show interface gigabitethernet1/0/" + interfacePort + " status"; + } + + /** + * Generic Cisco Switch command to retrieve the Power Status of an interface. Replace asterisk + * with actual port number for complete message + */ + private String showIfacePowerStatusCommand(int interfacePort) { + return "show power inline gigabitethernet1/0/" + interfacePort + " detail"; + } + + /** + * Get port toggle commands. + * + * @param interfacePort port number + * @param enabled for bringing up/down interfacePort + * @return commands + */ + private String[] portManagementCommand(int interfacePort, boolean enabled) { + return new String[] { + "configure terminal", + "interface gigabitethernet1/0/" + interfacePort, + (enabled ? "no " : "") + "shutdown", + "end" + }; + } + + /** + * Handles the process when using the enter command. Enable is a required step before commands can + * be sent to the switch. + * + * @param consoleData Raw console data received the the telnet connection. + */ + @Override + public void handleEnableMessage(String consoleData) throws Exception { + if (consoleData.contains("Password:")) { + telnetClientSocket.writeData(password + "\n"); + } else if (containsPrompt(consoleData)) { + userEnabled = true; + commandPending = false; + } else if (consoleData.contains("% Bad passwords")) { + telnetClientSocket.disposeConnection(); + throw new Exception("Could not Enable the User, Bad Password"); + } + } + + /** + * Handles the process when logging into the switch. + * + * @param consoleData Raw console data received the the telnet connection. + */ + @Override + public void handleLoginMessage(String consoleData) throws Exception { + if (consoleData.contains("Username:")) { + telnetClientSocket.writeData(username + "\n"); + } else if (consoleData.contains("Password:")) { + telnetClientSocket.writeData(password + "\n"); + } else if (consoleData.endsWith(CONSOLE_PROMPT_ENDING_LOGIN)) { + userAuthorised = true; + hostname = consoleData.split(CONSOLE_PROMPT_ENDING_LOGIN)[0]; + telnetClientSocket.writeData("enable\n"); + } else if (consoleData.contains("% Login invalid")) { + telnetClientSocket.disposeConnection(); + throw new Exception("Failed to Login, Login Invalid"); + } else if (consoleData.contains("% Bad passwords")) { + telnetClientSocket.disposeConnection(); + throw new Exception("Failed to Login, Bad Password"); + } + } + + /** + * Handles current data in the buffer read from the telnet console InputStream and sends it to the + * appropriate process. + * + * @param consoleData Current unhandled data in the buffered reader + */ + @Override + public void parseData(String consoleData) throws Exception { + if (commandPending) { + responseHandler.receiveData(consoleData); + } + } + + @Override + public void getPower(int devicePort, ResponseHandler powerResponseHandler) + throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + String command = showIfacePowerStatusCommand(devicePort); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + synchronized (this) { + commandPending = false; + } + Map powerMap = processPowerStatusInline(data); + powerResponseHandler.receiveData(buildPowerResponse(powerMap)); + }; + telnetClientSocket.writeData(command + "\n"); + } + } + + @Override + public void getInterface(int devicePort, ResponseHandler handler) + throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + String command = showIfaceStatusCommand(devicePort); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + synchronized (this) { + commandPending = false; + } + Map interfaceMap = processInterfaceStatus(data); + handler.receiveData(buildInterfaceResponse(interfaceMap)); + }; + telnetClientSocket.writeData(command + "\n"); + } + } + + private void managePort(int devicePort, ResponseHandler handler, + boolean enabled) throws Exception { + while (commandPending) { + Thread.sleep(WAIT_MS); + } + Queue commands = + new LinkedList<>(Arrays.asList(portManagementCommand(devicePort, enabled))); + SwitchActionResponse.Builder response = SwitchActionResponse.newBuilder(); + synchronized (this) { + commandPending = true; + responseHandler = data -> { + if (!commands.isEmpty()) { + telnetClientSocket.writeData(commands.poll() + "\n"); + return; + } + synchronized (this) { + commandPending = false; + handler.receiveData(response.setSuccess(true).build()); + } + }; + telnetClientSocket.writeData(commands.poll() + "\n"); + } + } + + @Override + public void connect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, true); + } + + @Override + public void disconnect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, false); + } + + private InterfaceResponse buildInterfaceResponse(Map interfaceMap) { + InterfaceResponse.Builder response = InterfaceResponse.newBuilder(); + String duplex = interfaceMap.getOrDefault("duplex", ""); + if (duplex.startsWith("a-")) { // Interface in Auto Duplex + duplex = duplex.replaceFirst("a-", ""); + } + + String speed = interfaceMap.getOrDefault("speed", ""); + if (speed.startsWith("a-")) { // Interface in Auto Speed + speed = speed.replaceFirst("a-", ""); + } + int speedNum = 0; + try { + speedNum = Integer.parseInt(speed); + } catch (NumberFormatException e) { + System.out.println("Could not parse int for interface speed: " + speed); + return response.build(); + } + + String linkStatus = interfaceMap.getOrDefault("status", ""); + return response + .setLinkStatus(linkStatus.equals("connected") ? LinkStatus.State.UP : LinkStatus.State.DOWN) + .setDuplex(duplex) + .setLinkSpeed(speedNum) + .build(); + } + + private PowerResponse buildPowerResponse(Map powerMap) { + PowerResponse.Builder response = PowerResponse.newBuilder(); + float maxPower = 0; + float currentPower = 0; + try { + maxPower = Float.parseFloat(powerMap.getOrDefault("max", "")); + currentPower = Float.parseFloat(powerMap.getOrDefault("power", "")); + } catch (NumberFormatException e) { + System.out.println( + "Could not parse float: " + powerMap.get("max") + " or " + powerMap.get("power")); + } + + String poeSupport = powerMap.getOrDefault("admin", ""); + String poeStatus = powerMap.getOrDefault("oper", ""); + return response + .setPoeStatus(poeStatusMap.getOrDefault(poeStatus, POEStatus.State.UNKNOWN)) + .setPoeSupport(poeSupportMap.getOrDefault(poeSupport, POESupport.State.UNKNOWN)) + .setPoeNegotiation( + poeNegotiationtMap.getOrDefault(poeSupport, POENegotiation.State.UNKNOWN)) + .setMaxPowerConsumption(maxPower) + .setCurrentPowerConsumption(currentPower).build(); + } + + private Map processInterfaceStatus(String response) { + String filtered = Arrays.stream(response.split("\n")) + .filter(s -> !containsPrompt(s) && !s.contains("show interface") && s.length() > 0) + .collect(Collectors.joining("\n")); + return mapSimpleTable(filtered, showInterfaceExpected, interfaceExpected); + } + + private Map processPowerStatusInline(String response) { + Map powerMap = new HashMap<>(); + Arrays.stream(response.split("\n")) + .forEach( + line -> { + String[] lineParts = line.trim().split(":"); + if (lineParts.length > 1) { + String powerMapKey = powerInlineMap.getOrDefault(lineParts[0], null); + if (powerMapKey != null) { + powerMap.put(powerMapKey, lineParts[1].trim()); + } + } + }); + return powerMap; + } + + +} diff --git a/usi/src/main/java/daq/usi/ovs/OpenVSwitch.java b/usi/src/main/java/daq/usi/ovs/OpenVSwitch.java new file mode 100644 index 0000000000..91366218c2 --- /dev/null +++ b/usi/src/main/java/daq/usi/ovs/OpenVSwitch.java @@ -0,0 +1,88 @@ +package daq.usi.ovs; + +import daq.usi.ResponseHandler; +import daq.usi.SwitchController; +import grpc.InterfaceResponse; +import grpc.LinkStatus; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import grpc.SwitchActionResponse; +import java.io.BufferedReader; +import java.io.FileNotFoundException; +import java.io.FileReader; +import java.io.IOException; +import java.net.URL; +import java.util.concurrent.TimeUnit; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +public class OpenVSwitch implements SwitchController { + + private static final String OVS_OUTPUT_FILE = "sec.ofctl"; + + protected String getInterfaceByPort(int devicePort) throws IOException { + URL file = OpenVSwitch.class.getClassLoader().getResource(OVS_OUTPUT_FILE); + if (file == null) { + throw new FileNotFoundException(OVS_OUTPUT_FILE + " is not found!"); + } + FileReader reader = new FileReader(file.getFile()); + try (BufferedReader bufferedReader = new BufferedReader(reader)) { + Pattern pattern = Pattern.compile("(^\\s*" + devicePort + ")(\\((.+)\\))(:.*)", 'g'); + String interfaceLine = bufferedReader.lines().filter(line -> { + Matcher m = pattern.matcher(line); + return m.find(); + }).findFirst().get(); + Matcher m = pattern.matcher(interfaceLine); + m.matches(); + return m.group(3); + } + } + + @Override + public void getPower(int devicePort, ResponseHandler handler) throws Exception { + PowerResponse.Builder response = PowerResponse.newBuilder(); + PowerResponse power = response.setPoeStatus(POEStatus.State.OFF) + .setPoeSupport(POESupport.State.DISABLED) + .build(); + handler.receiveData(power); + } + + @Override + public void getInterface(int devicePort, ResponseHandler handler) + throws Exception { + InterfaceResponse.Builder response = InterfaceResponse.newBuilder(); + InterfaceResponse iface = + response.setLinkStatus(LinkStatus.State.UP).build(); + handler.receiveData(iface); + } + + private void managePort(int devicePort, ResponseHandler handler, + boolean enabled) + throws Exception { + String iface = getInterfaceByPort(devicePort); + ProcessBuilder processBuilder = new ProcessBuilder(); + processBuilder.command("bash", "-c", "ifconfig " + iface + (enabled ? " up" : " down")) + .inheritIO(); + Process process = processBuilder.start(); + boolean exited = process.waitFor(10, TimeUnit.SECONDS); + int exitCode = process.exitValue(); + handler + .receiveData(SwitchActionResponse.newBuilder().setSuccess(exited && exitCode == 0).build()); + } + + @Override + public void connect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, true); + } + + @Override + public void disconnect(int devicePort, ResponseHandler handler) + throws Exception { + managePort(devicePort, handler, false); + } + + public void start() { + } +} diff --git a/usi/src/main/proto/usi.proto b/usi/src/main/proto/usi.proto new file mode 100644 index 0000000000..156df5eaca --- /dev/null +++ b/usi/src/main/proto/usi.proto @@ -0,0 +1,94 @@ +/* + * Specification for Universal Switch Interface. + */ +syntax = "proto3"; +package usi; + +option java_multiple_files = true; +option java_outer_classname = "USIProto"; +option java_package = "grpc"; + +service USIService { + rpc GetPower(SwitchInfo) returns (PowerResponse) {} + rpc GetInterface(SwitchInfo) returns (InterfaceResponse) {} + rpc disconnect(SwitchInfo) returns (SwitchActionResponse) {} + rpc connect(SwitchInfo) returns (SwitchActionResponse) {} +} + +message SwitchActionResponse { + bool success = 1; +} + +message PowerResponse { + float current_power_consumption = 1; + float max_power_consumption = 2; + POESupport.State poe_support = 3; + POEStatus.State poe_status = 4; + POENegotiation.State poe_negotiation = 5; +} + +message InterfaceResponse { + LinkStatus.State link_status = 1; + int32 link_speed = 2; + string duplex = 3; +} + +enum SwitchModel { + ALLIED_TELESIS_X230 = 0; + CISCO_9300 = 1; + OVS_SWITCH = 2; +} + +message LinkStatus { + enum State { + UNKNOWN = 0; + DOWN = 1; + UP = 2; + } +} +message POESupport { + enum State { + UNKNOWN = 0; + ENABLED = 1; + DISABLED = 2; + } +} + +message POEStatus { + enum State { + UNKNOWN = 0; + ON = 1; + OFF = 2; + FAULT = 3; + DENY = 4; + } +} + +message POENegotiation { + enum State { + UNKNOWN = 0; + ENABLED = 1; + DISABLED = 2; + } +} + +/* + * System configuraiton of the access switch. This is used by the system + * to setup and configure the switch itself. + */ +message SwitchInfo { + // IP address of external switch. + string ip_addr = 1; + + // Device Port + int32 device_port = 3; + + // Switch model + SwitchModel model = 4; + + // Switch connect username + string username = 5; + + // Switch connect password + string password = 6; +} \ No newline at end of file diff --git a/usi/src/test/java/daq/usi/AlliedTelesisX230Test.java b/usi/src/test/java/daq/usi/AlliedTelesisX230Test.java new file mode 100644 index 0000000000..c19a767c3d --- /dev/null +++ b/usi/src/test/java/daq/usi/AlliedTelesisX230Test.java @@ -0,0 +1,45 @@ +package daq.usi; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import daq.usi.allied.AlliedTelesisX230; +import grpc.POENegotiation; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class AlliedTelesisX230Test { + + private AlliedTelesisX230 at; + + @BeforeEach + void setUp() { + at = new AlliedTelesisX230(null, null, null); + at.telnetClientSocket = new FakeSwitchTelnetClientSocket(null, 0, null, false); + } + + @AfterEach + void tearDown() { + } + + @Test + void testEmptyPower() throws Exception { + at.userAuthorised = true; + at.userEnabled = true; + at.commandPending = false; + at.getPower(1, new ResponseHandler() { + @Override + public void receiveData(PowerResponse data) throws Exception { + assertEquals(data.getPoeSupport(), POESupport.State.UNKNOWN); + assertEquals(data.getPoeNegotiation(), POENegotiation.State.UNKNOWN); + assertEquals(data.getPoeStatus(), POEStatus.State.UNKNOWN); + assertEquals(data.getCurrentPowerConsumption(), 0); + assertEquals(data.getMaxPowerConsumption(), 0); + } + }); + at.receiveData(""); + } +} diff --git a/usi/src/test/java/daq/usi/BaseSwitchControllerTest.java b/usi/src/test/java/daq/usi/BaseSwitchControllerTest.java new file mode 100644 index 0000000000..7fea888b3b --- /dev/null +++ b/usi/src/test/java/daq/usi/BaseSwitchControllerTest.java @@ -0,0 +1,91 @@ +package daq.usi; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; + +import java.util.Map; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class BaseSwitchControllerTest { + + @BeforeEach + void setUp() { + } + + @AfterEach + void tearDown() { + } + + @Test + void mapSimpleTableEmptyInput() { + String raw = ""; + String[] colNames = {"a", "b"}; + String[] mapNames = {"a", "b"}; + Map response = BaseSwitchController.mapSimpleTable(raw, colNames, mapNames); + for (String key : response.keySet()) { + assertNull(response.get(key)); + } + } + + @Test + void mapSimpleTableSampleInputAT() { + String raw = "Interface Admin Pri Oper Power Device Class Max \n" + + "port1.0.1 Enabled Low Powered 3337 n/a 0 15400 [C]"; + String[] colNames = {"Interface", "Admin", "Pri", "Oper", "Power", "Device", "Class", "Max"}; + String[] mapNames = {"interface", "admin", "pri", "oper", "power", "device", "class", "max"}; + Map expected = Map.of("interface", "port1.0.1", "admin", "Enabled", "pri", + "Low", "oper", "Powered", "power", "3337", "device", "n/a", + "class", "0", "max", "15400 [C]"); + Map response = BaseSwitchController.mapSimpleTable(raw, colNames, mapNames); + for (String key : response.keySet()) { + assertEquals(expected.get(key), response.get(key)); + } + } + + @Test + void mapSimpleTableSampleInputCisco9300() { + String raw = "Port         Name               Status       Vlan       Duplex  Speed Type\n" + + "Gi1/0/1                         connected    routed     a-full  a-100 10/100/1000BaseTX"; + String[] colNames = {"Port", "Name", "Status", "Vlan", "Duplex", "Speed", "Type"}; + String[] mapNames = {"interface", "name", "status", "vlan", "duplex", "speed", "type"}; + Map expected = Map.of("interface", "Gi1/0/1", "name", "", "status", + "connected", "vlan", "routed", "duplex", "a-full", "speed", "a-100", + "type", "10/100/1000BaseTX"); + Map response = BaseSwitchController.mapSimpleTable(raw, colNames, mapNames); + for (String key : response.keySet()) { + assertEquals(expected.get(key), response.get(key)); + } + } + + @Test + void mapSimpleTableMissingValues() { + String raw = "Port         Name               Status       Vlan       Duplex  Speed Type\n" + + "Gi1/0/1                             routed     a-full  a-100 10/100/1000BaseTX"; + String[] colNames = {"Port", "Name", "Status", "Vlan", "Duplex", "Speed", "Type"}; + String[] mapNames = {"interface", "name", "status", "vlan", "duplex", "speed", "type"}; + Map expected = Map.of("interface", "Gi1/0/1", "name", "", "status", + "", "vlan", "routed", "duplex", "a-full", "speed", "a-100", + "type", "10/100/1000BaseTX"); + Map response = BaseSwitchController.mapSimpleTable(raw, colNames, mapNames); + for (String key : response.keySet()) { + assertEquals(expected.get(key), response.get(key)); + } + } + + @Test + void mapSimpleTableMissingValuesInFront() { + String raw = "Port         Name               Status       Vlan       Duplex  Speed Type\n" + + "                        connected routed     a-full  a-100 10/100/1000BaseTX"; + String[] colNames = {"Port", "Name", "Status", "Vlan", "Duplex", "Speed", "Type"}; + String[] mapNames = {"interface", "name", "status", "vlan", "duplex", "speed", "type"}; + Map expected = Map.of("interface", "", "name", "", "status", + "connected", "vlan", "routed", "duplex", "a-full", "speed", "a-100", + "type", "10/100/1000BaseTX"); + Map response = BaseSwitchController.mapSimpleTable(raw, colNames, mapNames); + for (String key : response.keySet()) { + assertEquals(expected.get(key), response.get(key)); + } + } +} diff --git a/usi/src/test/java/daq/usi/Cisco9300Test.java b/usi/src/test/java/daq/usi/Cisco9300Test.java new file mode 100644 index 0000000000..333e9658c6 --- /dev/null +++ b/usi/src/test/java/daq/usi/Cisco9300Test.java @@ -0,0 +1,69 @@ +package daq.usi; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import daq.usi.cisco.Cisco9300; +import grpc.InterfaceResponse; +import grpc.LinkStatus; +import grpc.POENegotiation; +import grpc.POEStatus; +import grpc.POESupport; +import grpc.PowerResponse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class Cisco9300Test { + + private Cisco9300 at; + + @BeforeEach + void setUp() { + at = new Cisco9300(null, null, null); + at.telnetClientSocket = new FakeSwitchTelnetClientSocket(null, 0, null, false); + } + + @AfterEach + void tearDown() { + } + + private void fakeLogin() { + at.userAuthorised = true; + at.userEnabled = true; + at.commandPending = false; + } + + @Test + void testEmptyPower() throws Exception { + fakeLogin(); + at.getPower(1, new ResponseHandler() { + @Override + public void receiveData(PowerResponse data) throws Exception { + assertEquals(data.getPoeSupport(), POESupport.State.UNKNOWN); + assertEquals(data.getPoeNegotiation(), POENegotiation.State.UNKNOWN); + assertEquals(data.getPoeStatus(), POEStatus.State.UNKNOWN); + assertEquals(data.getCurrentPowerConsumption(), 0); + assertEquals(data.getMaxPowerConsumption(), 0); + } + }); + at.receiveData(""); + } + + @Test + void testSampleInterfaceResponse() throws Exception { + final String output = "show interface gigabitethernet1/0/2 status\n\n\n" + + "Port Name Status Vlan Duplex Speed Type\n" + + "Gi1/0/4 connected trunk a-full a-100 10/100/1000BaseTX\n" + + "daq#\n"; + fakeLogin(); + at.getInterface(1, new ResponseHandler() { + @Override + public void receiveData(InterfaceResponse data) throws Exception { + assertEquals(data.getDuplex(), "full"); + assertEquals(data.getLinkSpeed(), 100); + assertEquals(data.getLinkStatus(), LinkStatus.State.UP); + } + }); + at.receiveData(output); + } +} diff --git a/usi/src/test/java/daq/usi/FakeSwitchTelnetClientSocket.java b/usi/src/test/java/daq/usi/FakeSwitchTelnetClientSocket.java new file mode 100644 index 0000000000..0c2e70ee21 --- /dev/null +++ b/usi/src/test/java/daq/usi/FakeSwitchTelnetClientSocket.java @@ -0,0 +1,19 @@ +package daq.usi; + +public class FakeSwitchTelnetClientSocket extends SwitchTelnetClientSocket { + + public FakeSwitchTelnetClientSocket( + String remoteIpAddress, int remotePort, BaseSwitchController interrogator, boolean debug) { + super(remoteIpAddress, remotePort, interrogator, debug); + } + + @Override + public void writeData(String data) { + System.out.println(data); + } + + @Override + public void disposeConnection() { + System.out.println("disposing connection."); + } +} \ No newline at end of file diff --git a/usi/src/test/java/daq/usi/ovs/OpenVSwitchTest.java b/usi/src/test/java/daq/usi/ovs/OpenVSwitchTest.java new file mode 100644 index 0000000000..9a123a7c82 --- /dev/null +++ b/usi/src/test/java/daq/usi/ovs/OpenVSwitchTest.java @@ -0,0 +1,29 @@ +package daq.usi.ovs; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import java.io.IOException; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +class OpenVSwitchTest { + OpenVSwitch ovs; + + @BeforeEach + void setUp() { + ovs = new OpenVSwitch(); + } + + @AfterEach + void tearDown() { + } + + @Test + void getInterfaceByPort() throws IOException { + assertEquals(ovs.getInterfaceByPort(1), "faux"); + assertEquals(ovs.getInterfaceByPort(2), "faux-2"); + assertEquals(ovs.getInterfaceByPort(7), "sec-eth7"); + } + +} \ No newline at end of file diff --git a/usi/src/test/resources/sec.ofctl b/usi/src/test/resources/sec.ofctl new file mode 100644 index 0000000000..9621772035 --- /dev/null +++ b/usi/src/test/resources/sec.ofctl @@ -0,0 +1,24 @@ +OFPT_FEATURES_REPLY (xid=0x2): dpid:0000000000000002 +n_tables:254, n_buffers:0 +capabilities: FLOW_STATS TABLE_STATS PORT_STATS QUEUE_STATS ARP_MATCH_IP +actions: output enqueue set_vlan_vid set_vlan_pcp strip_vlan mod_dl_src mod_dl_dst mod_nw_src mod_nw_dst mod_nw_tos mod_tp_src mod_tp_dst + 1(faux): addr:de:06:c6:06:73:bb + config: 0 + state: 0 + current: 10GB-FD COPPER + speed: 10000 Mbps now, 0 Mbps max + 2(faux-2): addr:de:06:c6:06:73:bc + config: 0 + state: 0 + current: 10GB-FD COPPER + speed: 10000 Mbps now, 0 Mbps max + 7(sec-eth7): addr:a2:f2:6f:01:84:d4 + config: 0 + state: 0 + current: 10GB-FD COPPER + speed: 10000 Mbps now, 0 Mbps max + LOCAL(sec): addr:72:87:94:b5:9c:48 + config: PORT_DOWN + state: LINK_DOWN + speed: 0 Mbps now, 0 Mbps max +OFPT_GET_CONFIG_REPLY (xid=0x4): frags=normal miss_send_len=0 diff --git a/usi/start b/usi/start new file mode 100755 index 0000000000..7e25c0035f --- /dev/null +++ b/usi/start @@ -0,0 +1,2 @@ +#!/bin/bash -e +java -cp /ovs:usi/target/usi-0.0.1-jar-with-dependencies.jar daq.usi.UsiServer $DEBUG 2>&1 | tee logs.txt diff --git a/validator/.idea/encodings.xml b/validator/.idea/encodings.xml deleted file mode 100644 index 15a15b218a..0000000000 --- a/validator/.idea/encodings.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/validator/.idea/inspectionProfiles/Project_Default.xml b/validator/.idea/inspectionProfiles/Project_Default.xml deleted file mode 100644 index d7a7dfacf9..0000000000 --- a/validator/.idea/inspectionProfiles/Project_Default.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - \ No newline at end of file diff --git a/validator/.idea/jarRepositories.xml b/validator/.idea/jarRepositories.xml deleted file mode 100644 index f5a0c5d630..0000000000 --- a/validator/.idea/jarRepositories.xml +++ /dev/null @@ -1,25 +0,0 @@ - - - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_damnhandy_handy_uri_templates_2_1_6.xml b/validator/.idea/libraries/Gradle__com_damnhandy_handy_uri_templates_2_1_6.xml deleted file mode 100644 index 06fc55732d..0000000000 --- a/validator/.idea/libraries/Gradle__com_damnhandy_handy_uri_templates_2_1_6.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml b/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml deleted file mode 100644 index 940abc9cd6..0000000000 --- a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml b/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml deleted file mode 100644 index c39a1aad89..0000000000 --- a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml b/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml deleted file mode 100644 index 401e4470cc..0000000000 --- a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_dataformat_jackson_dataformat_yaml_2_10_3.xml b/validator/.idea/libraries/Gradle__com_fasterxml_jackson_dataformat_jackson_dataformat_yaml_2_10_3.xml deleted file mode 100644 index eeaf4be6fe..0000000000 --- a/validator/.idea/libraries/Gradle__com_fasterxml_jackson_dataformat_jackson_dataformat_yaml_2_10_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_github_everit_org_json_schema_org_everit_json_schema_1_9_1.xml b/validator/.idea/libraries/Gradle__com_github_everit_org_json_schema_org_everit_json_schema_1_9_1.xml deleted file mode 100644 index b593493a54..0000000000 --- a/validator/.idea/libraries/Gradle__com_github_everit_org_json_schema_org_everit_json_schema_1_9_1.xml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_api_common_1_7_0.xml b/validator/.idea/libraries/Gradle__com_google_api_api_common_1_7_0.xml deleted file mode 100644 index 375d39d770..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_api_common_1_7_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_27_0.xml b/validator/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_27_0.xml deleted file mode 100644 index 5e8f46fd94..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_27_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_gax_1_42_0.xml b/validator/.idea/libraries/Gradle__com_google_api_gax_1_42_0.xml deleted file mode 100644 index 5e68c48029..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_gax_1_42_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_gax_grpc_1_42_0.xml b/validator/.idea/libraries/Gradle__com_google_api_gax_grpc_1_42_0.xml deleted file mode 100644 index f0d0844afe..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_gax_grpc_1_42_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_grpc_google_cloud_pubsub_v1_1_48_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_grpc_google_cloud_pubsub_v1_1_48_0.xml deleted file mode 100644 index 6827686b9c..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_grpc_google_cloud_pubsub_v1_1_48_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1_0_49_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1_0_49_0.xml deleted file mode 100644 index 4a56a72303..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1_0_49_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1beta1_0_49_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1beta1_0_49_0.xml deleted file mode 100644 index 3ae4446a46..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_firestore_v1beta1_0_49_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_49_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_49_0.xml deleted file mode 100644 index daff23e355..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_49_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_pubsub_v1_1_48_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_pubsub_v1_1_48_0.xml deleted file mode 100644 index 7ecc7143d6..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_pubsub_v1_1_48_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_1_14_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_1_14_0.xml deleted file mode 100644 index 940dcdf6fb..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_1_14_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_12_0.xml b/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_12_0.xml deleted file mode 100644 index 3255c67202..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_12_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20181120_1_27_0.xml b/validator/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20181120_1_27_0.xml deleted file mode 100644 index 976f5bad3f..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20181120_1_27_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_13_0.xml b/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_13_0.xml deleted file mode 100644 index 0f00e2cf53..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_13_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_13_0.xml b/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_13_0.xml deleted file mode 100644 index 5899b266d7..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_13_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_66_0.xml b/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_66_0.xml deleted file mode 100644 index 1b348bd118..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_66_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_66_0.xml b/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_66_0.xml deleted file mode 100644 index cc313478fc..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_66_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_firestore_0_84_0_beta.xml b/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_firestore_0_84_0_beta.xml deleted file mode 100644 index 7314479e1f..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_firestore_0_84_0_beta.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_66_0.xml b/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_66_0.xml deleted file mode 100644 index c61dc8841a..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_66_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_pubsub_1_66_0.xml b/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_pubsub_1_66_0.xml deleted file mode 100644 index 5a5df67856..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_cloud_google_cloud_pubsub_1_66_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_2.xml b/validator/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_2.xml deleted file mode 100644 index 122552e3fe..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml b/validator/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml deleted file mode 100644 index cbe1b3266b..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_2_0.xml b/validator/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_2_0.xml deleted file mode 100644 index 9b2f90767c..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_2_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_guava_guava_26_0_android.xml b/validator/.idea/libraries/Gradle__com_google_guava_guava_26_0_android.xml deleted file mode 100644 index 85d219b6b1..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_guava_guava_26_0_android.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_28_0.xml b/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_28_0.xml deleted file mode 100644 index c5f9810e75..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_28_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_28_0.xml b/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_28_0.xml deleted file mode 100644 index 553c5c252c..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_28_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml b/validator/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml deleted file mode 100644 index ab45264c2d..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_27_0.xml b/validator/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_27_0.xml deleted file mode 100644 index 6aa514b846..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_27_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_6_1.xml b/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_6_1.xml deleted file mode 100644 index ba08aca4c9..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_6_1.xml b/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_6_1.xml deleted file mode 100644 index b13413867f..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_6_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__com_google_re2j_re2j_1_1.xml b/validator/.idea/libraries/Gradle__com_google_re2j_re2j_1_1.xml deleted file mode 100644 index c30869dedd..0000000000 --- a/validator/.idea/libraries/Gradle__com_google_re2j_re2j_1_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_beanutils_commons_beanutils_1_9_2.xml b/validator/.idea/libraries/Gradle__commons_beanutils_commons_beanutils_1_9_2.xml deleted file mode 100644 index f564ac0fad..0000000000 --- a/validator/.idea/libraries/Gradle__commons_beanutils_commons_beanutils_1_9_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_collections_commons_collections_3_2_2.xml b/validator/.idea/libraries/Gradle__commons_collections_commons_collections_3_2_2.xml deleted file mode 100644 index 03b4c932e9..0000000000 --- a/validator/.idea/libraries/Gradle__commons_collections_commons_collections_3_2_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_digester_commons_digester_1_8_1.xml b/validator/.idea/libraries/Gradle__commons_digester_commons_digester_1_8_1.xml deleted file mode 100644 index 0482c8e86a..0000000000 --- a/validator/.idea/libraries/Gradle__commons_digester_commons_digester_1_8_1.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_io_commons_io_2_5.xml b/validator/.idea/libraries/Gradle__commons_io_commons_io_2_5.xml deleted file mode 100644 index 12ab8e6678..0000000000 --- a/validator/.idea/libraries/Gradle__commons_io_commons_io_2_5.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_logging_commons_logging_1_2.xml b/validator/.idea/libraries/Gradle__commons_logging_commons_logging_1_2.xml deleted file mode 100644 index d26e0617f1..0000000000 --- a/validator/.idea/libraries/Gradle__commons_logging_commons_logging_1_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__commons_validator_commons_validator_1_6.xml b/validator/.idea/libraries/Gradle__commons_validator_commons_validator_1_6.xml deleted file mode 100644 index 06fde17888..0000000000 --- a/validator/.idea/libraries/Gradle__commons_validator_commons_validator_1_6.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_alts_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_alts_1_18_0.xml deleted file mode 100644 index 845906af4d..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_alts_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_auth_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_auth_1_18_0.xml deleted file mode 100644 index f4059f435c..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_auth_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_context_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_context_1_18_0.xml deleted file mode 100644 index ffde830105..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_context_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_core_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_core_1_18_0.xml deleted file mode 100644 index 443667d791..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_core_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_grpclb_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_grpclb_1_18_0.xml deleted file mode 100644 index 1d97b75a72..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_grpclb_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_netty_shaded_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_netty_shaded_1_18_0.xml deleted file mode 100644 index b6d9ed29f8..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_netty_shaded_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_18_0.xml deleted file mode 100644 index 43a745faad..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_18_0.xml deleted file mode 100644 index f140dac0ed..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_grpc_grpc_stub_1_18_0.xml b/validator/.idea/libraries/Gradle__io_grpc_grpc_stub_1_18_0.xml deleted file mode 100644 index 658340cd48..0000000000 --- a/validator/.idea/libraries/Gradle__io_grpc_grpc_stub_1_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_18_0.xml b/validator/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_18_0.xml deleted file mode 100644 index 5e6f89071d..0000000000 --- a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_metrics_0_18_0.xml b/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_metrics_0_18_0.xml deleted file mode 100644 index 2090ef845c..0000000000 --- a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_metrics_0_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_util_0_18_0.xml b/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_util_0_18_0.xml deleted file mode 100644 index 069dfadd6f..0000000000 --- a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_grpc_util_0_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_http_util_0_18_0.xml b/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_http_util_0_18_0.xml deleted file mode 100644 index 22d05e51e8..0000000000 --- a/validator/.idea/libraries/Gradle__io_opencensus_opencensus_contrib_http_util_0_18_0.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__javax_annotation_javax_annotation_api_1_3_2.xml b/validator/.idea/libraries/Gradle__javax_annotation_javax_annotation_api_1_3_2.xml deleted file mode 100644 index c9125edcd6..0000000000 --- a/validator/.idea/libraries/Gradle__javax_annotation_javax_annotation_api_1_3_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__joda_time_joda_time_2_9_4.xml b/validator/.idea/libraries/Gradle__joda_time_joda_time_2_9_4.xml deleted file mode 100644 index 1bab911479..0000000000 --- a/validator/.idea/libraries/Gradle__joda_time_joda_time_2_9_4.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__junit_junit_4_13.xml b/validator/.idea/libraries/Gradle__junit_junit_4_13.xml deleted file mode 100644 index 0cef6bc81e..0000000000 --- a/validator/.idea/libraries/Gradle__junit_junit_4_13.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_apache_commons_commons_lang3_3_5.xml b/validator/.idea/libraries/Gradle__org_apache_commons_commons_lang3_3_5.xml deleted file mode 100644 index fed9b5690b..0000000000 --- a/validator/.idea/libraries/Gradle__org_apache_commons_commons_lang3_3_5.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_checkerframework_checker_compat_qual_2_5_2.xml b/validator/.idea/libraries/Gradle__org_checkerframework_checker_compat_qual_2_5_2.xml deleted file mode 100644 index b87180fb05..0000000000 --- a/validator/.idea/libraries/Gradle__org_checkerframework_checker_compat_qual_2_5_2.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_17.xml b/validator/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_17.xml deleted file mode 100644 index cbfbdf1d12..0000000000 --- a/validator/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_17.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml b/validator/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml deleted file mode 100644 index 8262f729c2..0000000000 --- a/validator/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_json_json_20180130.xml b/validator/.idea/libraries/Gradle__org_json_json_20180130.xml deleted file mode 100644 index 83318a7eed..0000000000 --- a/validator/.idea/libraries/Gradle__org_json_json_20180130.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml b/validator/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml deleted file mode 100644 index 0fcafe29d0..0000000000 --- a/validator/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/Gradle__org_yaml_snakeyaml_1_24.xml b/validator/.idea/libraries/Gradle__org_yaml_snakeyaml_1_24.xml deleted file mode 100644 index 6d98003d93..0000000000 --- a/validator/.idea/libraries/Gradle__org_yaml_snakeyaml_1_24.xml +++ /dev/null @@ -1,11 +0,0 @@ - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/libraries/com_fasterxml_jackson_core_jackson_databind_2_10_2.xml b/validator/.idea/libraries/com_fasterxml_jackson_core_jackson_databind_2_10_2.xml deleted file mode 100644 index 0eaf3101e7..0000000000 --- a/validator/.idea/libraries/com_fasterxml_jackson_core_jackson_databind_2_10_2.xml +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/misc.xml b/validator/.idea/misc.xml deleted file mode 100644 index 50b0ab60d9..0000000000 --- a/validator/.idea/misc.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/validator/.idea/modules.xml b/validator/.idea/modules.xml deleted file mode 100644 index 2b55d713a9..0000000000 --- a/validator/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/modules/daq-validator.validator.iml b/validator/.idea/modules/daq-validator.validator.iml deleted file mode 100644 index c07f3da9ef..0000000000 --- a/validator/.idea/modules/daq-validator.validator.iml +++ /dev/null @@ -1,82 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/uiDesigner.xml b/validator/.idea/uiDesigner.xml deleted file mode 100644 index e96534fb27..0000000000 --- a/validator/.idea/uiDesigner.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/validator.iml b/validator/.idea/validator.iml deleted file mode 100644 index d6ebd48059..0000000000 --- a/validator/.idea/validator.iml +++ /dev/null @@ -1,9 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/validator/.idea/vcs.xml b/validator/.idea/vcs.xml deleted file mode 100644 index 63ec3e9c8b..0000000000 --- a/validator/.idea/vcs.xml +++ /dev/null @@ -1,80 +0,0 @@ - - - - - - - - - \ No newline at end of file diff --git a/validator/bin/build b/validator/bin/build deleted file mode 100755 index 603b77548b..0000000000 --- a/validator/bin/build +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/.. - -cd $ROOT -rm -rf build -./gradlew shadow - -ls -l build/libs/validator-1.0-SNAPSHOT-all.jar - -echo Done with validator build. diff --git a/validator/bin/config b/validator/bin/config deleted file mode 100755 index a340ba9d87..0000000000 --- a/validator/bin/config +++ /dev/null @@ -1,36 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/../.. -cd $ROOT - -CONFIG=local/pubber.json - -DEVICE=$1 -DATA=$2 -PROJECT=`jq -r .projectId $CONFIG` -REGION=`jq -r .cloudRegion $CONFIG` -REGISTRY=`jq -r .registryId $CONFIG` -TOPIC=target -SUBFOLDER=config - -if [ ! -f "$DATA" ]; then - echo Missing device or config file $DATA - echo Usage: $0 [device] [config_message] - false -fi - -echo Configuring $PROJECT:$REGION:$REGISTRY:$DEVICE from $DATA - -ATTRIBUTES="subFolder=$SUBFOLDER,deviceId=$DEVICE,deviceRegistryId=$REGISTRY" -ATTRIBUTES+=",deviceNumId=$RANDOM,projectId=$PROJECT" - -gcloud pubsub topics publish $TOPIC --project=$PROJECT \ - --attribute=$ATTRIBUTES \ - --message "$(< $DATA)" - -gcloud iot devices configs update \ - --project=$PROJECT \ - --region=$REGION \ - --registry=$REGISTRY \ - --device=$DEVICE \ - --config-file=$DATA diff --git a/validator/bin/keygen b/validator/bin/keygen deleted file mode 100755 index a544c87578..0000000000 --- a/validator/bin/keygen +++ /dev/null @@ -1,21 +0,0 @@ -#!/bin/bash -e - -if [ "$#" != 2 ]; then - echo $0 [type] [out_dir] - false -fi - -type=$1 -cd $2 - -if [ $type == RS256 ]; then - openssl genrsa -out rsa_private.pem 2048 - openssl rsa -in rsa_private.pem -pubout -out rsa_public.pem -elif [ $type == RS256_X509 ]; then - openssl req -x509 -nodes -newkey rsa:2048 -keyout rsa_private.pem -days 1000000 -out rsa_cert.pem -subj "/CN=unused" -else - echo Unknown key type $type. Try one of { RS256, RS256_X509 } - false -fi - -openssl pkcs8 -topk8 -inform PEM -outform DER -in rsa_private.pem -nocrypt > rsa_private.pkcs8 diff --git a/validator/bin/registrar b/validator/bin/registrar deleted file mode 100755 index c302518d21..0000000000 --- a/validator/bin/registrar +++ /dev/null @@ -1,30 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/../.. -cd $ROOT - -jarfile=validator/build/libs/validator-1.0-SNAPSHOT-all.jar -mainclass=com.google.daq.mqtt.registrar.Registrar - -if [ -z "$1" -o -z "$2" ]; then - echo Usage: $0 [project_id] [devices_dir] [schema_dir] '(device_filter)' - false -fi - -project_id=$1 -devices_dir=$2 -schema_dir=$3 -device_filter=$4 - -echo Using cloud project $project_id -echo Using site config dir $devices_dir -echo Using schema root dir $schema_dir -echo Using device filter $device_filter - -JAVA=/usr/lib/jvm/java-11-openjdk-amd64/bin/java - -error=0 -$JAVA -cp $jarfile $mainclass $project_id $devices_dir $schema_dir $device_filter || error=$? - -echo Registrar complete, exit $error -exit $error diff --git a/validator/bin/test_schema b/validator/bin/test_schema deleted file mode 100755 index c73505aa2e..0000000000 --- a/validator/bin/test_schema +++ /dev/null @@ -1,81 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/.. -cd $ROOT - -schema_root=../schemas - -errorfile=`mktemp` -rm -f $errorfile - -build=y -force=n -ignoreset=. - -while getopts ":fn" opt; do - case $opt in - f) force=y - ;; - n) build=n - ;; - \?) echo "Usage: $0 [-f] [-n]" - exit -1 - ;; - esac -done - -outroot=out -rm -rf $outroot - -if [ "$build" == y ]; then - bin/build -fi - -jarfile=$(realpath build/libs/validator-1.0-SNAPSHOT-all.jar) - -schemas=$(cd $schema_root && ls) -if [ -z "$schemas" ]; then - echo No schemas found. - false -fi -for schema in $schemas; do - rootdir=$schema_root/$schema - subsets=$(cd $rootdir; ls -d *.tests) - for subset in $subsets; do - if [ -z "$subset" ]; then - echo Schema $schema has no .tests dirs. - false - fi - schemaname=${subset%.tests}.json - testfiles=$(cd $rootdir/$subset; ls *.json) - for testfile in $testfiles; do - outfile=${testfile%.json}.out - testdir=$rootdir/$subset - testpath=$testdir/$testfile - expected=$testdir/$outfile - outdir=$outroot/${testdir#${schema_root}/} - mkdir -p $outdir - output=$outdir/$outfile - - error=0 - reltest=${testpath#$rootdir/} - (cd $rootdir; java -jar $jarfile $schemaname $reltest $ignoreset) 2> $output || error=$? - if [ $force == y ]; then - diff $expected $output || echo Updating $expected && cp $output $expected - else - diff -b $expected $output || (echo ' ' cp $output $expected | tee -a $errorfile) - fi - done - done -done - -echo - -if [ -f $errorfile ]; then - echo Validation errors found in $(pwd): - cat $errorfile - false -fi - -echo Done with validation. - diff --git a/validator/bin/validate b/validator/bin/validate deleted file mode 100755 index 910021f982..0000000000 --- a/validator/bin/validate +++ /dev/null @@ -1,34 +0,0 @@ -#!/bin/bash -e - -ROOT=$(dirname $0)/../.. -cd $ROOT - -jarfile=validator/build/libs/validator-1.0-SNAPSHOT-all.jar - -if [ "$#" -lt 2 ]; then - echo Usage: $0 [schema] [target] [devset] [sitepath] - false -fi - -schema=$1 -target=$2 -devset=${3:-$USER} -sitepath=$4 - -if [ ! -f $jarfile ]; then - echo Building validator... - validator/bin/build -fi - -echo Executing validator $schema $target... - -echo Validating against schema $schemafile into validations/ -rm -rf validations - -echo java -jar $jarfile $schema $target $devset $sitepath - -error=0 -java -jar $jarfile $schema $target $devset $sitepath || error=$? - -echo Validation complete, exit $error -exit $error diff --git a/validator/build.gradle b/validator/build.gradle deleted file mode 100644 index 1248f8d10d..0000000000 --- a/validator/build.gradle +++ /dev/null @@ -1,51 +0,0 @@ -buildscript { - repositories { - maven { - url "https://plugins.gradle.org/m2/" - } - } - dependencies { - classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0" - } -} - -plugins { - id 'com.github.johnrengelman.shadow' version '5.2.0' - id 'java' - id 'maven' -} - -group 'daq-validator' -version '1.0-SNAPSHOT' - -sourceCompatibility = 1.8 - -jar { - manifest { - attributes 'Main-Class': 'com.google.daq.mqtt.validator.Validator' - } -} - -repositories { - mavenCentral() - maven { - url "https://jitpack.io" - } -} - -dependencies { - compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.11.0' - compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.11.0' - compile group: 'com.github.everit-org.json-schema', name: 'org.everit.json.schema', version: '1.9.1' - - compile 'com.google.guava:guava:22.0' - compile 'com.google.cloud:google-cloud-logging:1.66.0' - compile ('com.google.apis:google-api-services-cloudiot:v1-rev20181120-1.27.0') { - exclude group: 'com.google.guava', module: 'guava-jdk5' - } - compile 'commons-io:commons-io:2.5' - - compile 'com.google.cloud:google-cloud-pubsub:1.66.0' - compile 'com.google.cloud:google-cloud-firestore:0.84.0-beta' - testCompile group: 'junit', name: 'junit', version: '4.13' -} diff --git a/validator/gradle/wrapper/gradle-wrapper.jar b/validator/gradle/wrapper/gradle-wrapper.jar deleted file mode 100644 index 01b8bf6b1f..0000000000 Binary files a/validator/gradle/wrapper/gradle-wrapper.jar and /dev/null differ diff --git a/validator/gradlew b/validator/gradlew deleted file mode 100755 index cccdd3d517..0000000000 --- a/validator/gradlew +++ /dev/null @@ -1,172 +0,0 @@ -#!/usr/bin/env sh - -############################################################################## -## -## Gradle start up script for UN*X -## -############################################################################## - -# Attempt to set APP_HOME -# Resolve links: $0 may be a link -PRG="$0" -# Need this for relative symlinks. -while [ -h "$PRG" ] ; do - ls=`ls -ld "$PRG"` - link=`expr "$ls" : '.*-> \(.*\)$'` - if expr "$link" : '/.*' > /dev/null; then - PRG="$link" - else - PRG=`dirname "$PRG"`"/$link" - fi -done -SAVED="`pwd`" -cd "`dirname \"$PRG\"`/" >/dev/null -APP_HOME="`pwd -P`" -cd "$SAVED" >/dev/null - -APP_NAME="Gradle" -APP_BASE_NAME=`basename "$0"` - -# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" - -# Use the maximum available, or set MAX_FD != -1 to use that value. -MAX_FD="maximum" - -warn () { - echo "$*" -} - -die () { - echo - echo "$*" - echo - exit 1 -} - -# OS specific support (must be 'true' or 'false'). -cygwin=false -msys=false -darwin=false -nonstop=false -case "`uname`" in - CYGWIN* ) - cygwin=true - ;; - Darwin* ) - darwin=true - ;; - MINGW* ) - msys=true - ;; - NONSTOP* ) - nonstop=true - ;; -esac - -CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar - -# Determine the Java command to use to start the JVM. -if [ -n "$JAVA_HOME" ] ; then - if [ -x "$JAVA_HOME/jre/sh/java" ] ; then - # IBM's JDK on AIX uses strange locations for the executables - JAVACMD="$JAVA_HOME/jre/sh/java" - else - JAVACMD="$JAVA_HOME/bin/java" - fi - if [ ! -x "$JAVACMD" ] ; then - die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." - fi -else - JAVACMD="java" - which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. - -Please set the JAVA_HOME variable in your environment to match the -location of your Java installation." -fi - -# Increase the maximum file descriptors if we can. -if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then - MAX_FD_LIMIT=`ulimit -H -n` - if [ $? -eq 0 ] ; then - if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then - MAX_FD="$MAX_FD_LIMIT" - fi - ulimit -n $MAX_FD - if [ $? -ne 0 ] ; then - warn "Could not set maximum file descriptor limit: $MAX_FD" - fi - else - warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" - fi -fi - -# For Darwin, add options to specify how the application appears in the dock -if $darwin; then - GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" -fi - -# For Cygwin, switch paths to Windows format before running java -if $cygwin ; then - APP_HOME=`cygpath --path --mixed "$APP_HOME"` - CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` - JAVACMD=`cygpath --unix "$JAVACMD"` - - # We build the pattern for arguments to be converted via cygpath - ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` - SEP="" - for dir in $ROOTDIRSRAW ; do - ROOTDIRS="$ROOTDIRS$SEP$dir" - SEP="|" - done - OURCYGPATTERN="(^($ROOTDIRS))" - # Add a user-defined pattern to the cygpath arguments - if [ "$GRADLE_CYGPATTERN" != "" ] ; then - OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" - fi - # Now convert the arguments - kludge to limit ourselves to /bin/sh - i=0 - for arg in "$@" ; do - CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` - CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option - - if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition - eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` - else - eval `echo args$i`="\"$arg\"" - fi - i=$((i+1)) - done - case $i in - (0) set -- ;; - (1) set -- "$args0" ;; - (2) set -- "$args0" "$args1" ;; - (3) set -- "$args0" "$args1" "$args2" ;; - (4) set -- "$args0" "$args1" "$args2" "$args3" ;; - (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; - (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; - (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; - (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; - (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; - esac -fi - -# Escape application args -save () { - for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done - echo " " -} -APP_ARGS=$(save "$@") - -# Collect all arguments for the java command, following the shell quoting and substitution rules -eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" - -# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong -if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then - cd "$(dirname "$0")" -fi - -exec "$JAVACMD" "$@" diff --git a/validator/gradlew.bat b/validator/gradlew.bat deleted file mode 100644 index e95643d6a2..0000000000 --- a/validator/gradlew.bat +++ /dev/null @@ -1,84 +0,0 @@ -@if "%DEBUG%" == "" @echo off -@rem ########################################################################## -@rem -@rem Gradle startup script for Windows -@rem -@rem ########################################################################## - -@rem Set local scope for the variables with windows NT shell -if "%OS%"=="Windows_NT" setlocal - -set DIRNAME=%~dp0 -if "%DIRNAME%" == "" set DIRNAME=. -set APP_BASE_NAME=%~n0 -set APP_HOME=%DIRNAME% - -@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= - -@rem Find java.exe -if defined JAVA_HOME goto findJavaFromJavaHome - -set JAVA_EXE=java.exe -%JAVA_EXE% -version >NUL 2>&1 -if "%ERRORLEVEL%" == "0" goto init - -echo. -echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:findJavaFromJavaHome -set JAVA_HOME=%JAVA_HOME:"=% -set JAVA_EXE=%JAVA_HOME%/bin/java.exe - -if exist "%JAVA_EXE%" goto init - -echo. -echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% -echo. -echo Please set the JAVA_HOME variable in your environment to match the -echo location of your Java installation. - -goto fail - -:init -@rem Get command-line arguments, handling Windows variants - -if not "%OS%" == "Windows_NT" goto win9xME_args - -:win9xME_args -@rem Slurp the command line arguments. -set CMD_LINE_ARGS= -set _SKIP=2 - -:win9xME_args_slurp -if "x%~1" == "x" goto execute - -set CMD_LINE_ARGS=%* - -:execute -@rem Setup the command line - -set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar - -@rem Execute Gradle -"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% - -:end -@rem End local scope for the variables with windows NT shell -if "%ERRORLEVEL%"=="0" goto mainEnd - -:fail -rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of -rem the _cmd.exe /c_ return code! -if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 -exit /b 1 - -:mainEnd -if "%OS%"=="Windows_NT" endlocal - -:omega diff --git a/validator/settings.gradle b/validator/settings.gradle deleted file mode 100644 index 9ad209fbe9..0000000000 --- a/validator/settings.gradle +++ /dev/null @@ -1,2 +0,0 @@ -rootProject.name = 'validator' - diff --git a/validator/src/main/java/com/google/daq/mqtt/registrar/LocalDevice.java b/validator/src/main/java/com/google/daq/mqtt/registrar/LocalDevice.java deleted file mode 100644 index f36e338b85..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/registrar/LocalDevice.java +++ /dev/null @@ -1,412 +0,0 @@ -package com.google.daq.mqtt.registrar; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.core.JsonGenerator; -import com.fasterxml.jackson.core.JsonParser.Feature; -import com.fasterxml.jackson.core.PrettyPrinter; -import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.util.ISO8601DateFormat; -import com.google.api.services.cloudiot.v1.model.DeviceCredential; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Sets; -import com.google.common.collect.Sets.SetView; -import com.google.daq.mqtt.util.CloudDeviceSettings; -import com.google.daq.mqtt.util.CloudIotManager; -import com.google.daq.mqtt.util.ExceptionMap; -import org.apache.commons.io.IOUtils; -import org.everit.json.schema.Schema; -import org.json.JSONObject; -import org.json.JSONTokener; - -import java.io.*; -import java.nio.charset.Charset; -import java.util.*; - -import static com.google.daq.mqtt.registrar.Registrar.*; - -class LocalDevice { - - private static final PrettyPrinter PROPER_PRETTY_PRINTER_POLICY = new ProperPrettyPrinterPolicy(); - - private static final ObjectMapper OBJECT_MAPPER_RAW = new ObjectMapper() - .enable(SerializationFeature.ORDER_MAP_ENTRIES_BY_KEYS) - .enable(Feature.ALLOW_TRAILING_COMMA) - .enable(Feature.STRICT_DUPLICATE_DETECTION) - .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) - .setDateFormat(new ISO8601DateFormat()) - .setSerializationInclusion(Include.NON_NULL); - - private static final ObjectMapper OBJECT_MAPPER = OBJECT_MAPPER_RAW.copy() - .disable(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES) - .enable(SerializationFeature.INDENT_OUTPUT); - - private static final String RSA_CERT_TYPE = "RS256_X509"; - private static final String RSA_KEY_FILE = "RSA_PEM"; - private static final String RSA_CERT_FILE = "RSA_X509_PEM"; - private static final String RSA_PUBLIC_PEM = "rsa_public.pem"; - private static final String RSA_CERT_PEM = "rsa_cert.pem"; - private static final String RSA_PRIVATE_PEM = "rsa_private.pem"; - private static final String RSA_PRIVATE_PKCS8 = "rsa_private.pkcs8"; - private static final String PHYSICAL_TAG_ERROR = "Physical tag %s %s does not match expected %s"; - - private static final Set DEVICE_FILES = ImmutableSet.of(METADATA_JSON); - private static final Set KEY_FILES = ImmutableSet.of(RSA_PUBLIC_PEM, RSA_PRIVATE_PEM, RSA_PRIVATE_PKCS8); - private static final Set OPTIONAL_FILES = ImmutableSet.of( - GENERATED_CONFIG_JSON, DEVICE_ERRORS_JSON, NORMALIZED_JSON); - - private static final String KEYGEN_EXEC_FORMAT = "validator/bin/keygen %s %s"; - public static final String METADATA_SUBFOLDER = "metadata"; - private static final String ERROR_FORMAT_INDENT = " "; - private static final int MAX_METADATA_LENGTH = 32767; - public static final String INVALID_METADATA_HASH = "INVALID"; - - private final String deviceId; - private final Map schemas; - private final File deviceDir; - private final UdmiSchema.Metadata metadata; - private final File devicesDir; - private final ExceptionMap exceptionMap; - - private String deviceNumId; - - private CloudDeviceSettings settings; - - LocalDevice(File devicesDir, String deviceId, Map schemas) { - try { - this.deviceId = deviceId; - this.schemas = schemas; - this.devicesDir = devicesDir; - exceptionMap = new ExceptionMap("Exceptions for " + deviceId); - deviceDir = new File(devicesDir, deviceId); - metadata = readMetadata(); - } catch (Exception e) { - throw new RuntimeException("While loading local device " + deviceId, e); - } - } - - static boolean deviceExists(File devicesDir, String deviceName) { - return new File(new File(devicesDir, deviceName), METADATA_JSON).isFile(); - } - - public void validatedDeviceDir() { - try { - String[] files = deviceDir.list(); - Preconditions.checkNotNull(files, "No files found in " + deviceDir.getAbsolutePath()); - Set actualFiles = ImmutableSet.copyOf(files); - Set expectedFiles = Sets.union(DEVICE_FILES, keyFiles()); - SetView missing = Sets.difference(expectedFiles, actualFiles); - if (!missing.isEmpty()) { - throw new RuntimeException("Missing files: " + missing); - } - SetView extra = Sets.difference(Sets.difference(actualFiles, expectedFiles), OPTIONAL_FILES); - if (!extra.isEmpty()) { - throw new RuntimeException("Extra files: " + extra); - } - } catch (Exception e) { - throw new RuntimeException("While validating device directory " + deviceId, e); - } - } - - private UdmiSchema.Metadata readMetadata() { - File metadataFile = new File(deviceDir, METADATA_JSON); - try (InputStream targetStream = new FileInputStream(metadataFile)) { - schemas.get(METADATA_JSON).validate(new JSONObject(new JSONTokener(targetStream))); - } catch (Exception metadata_exception) { - exceptionMap.put("Validating", metadata_exception); - } - try { - return OBJECT_MAPPER.readValue(metadataFile, UdmiSchema.Metadata.class); - } catch (Exception mapping_exception) { - exceptionMap.put("Reading", mapping_exception); - } - return null; - } - - private UdmiSchema.Metadata readNormalized() { - try { - File metadataFile = new File(deviceDir, NORMALIZED_JSON); - return OBJECT_MAPPER.readValue(metadataFile, UdmiSchema.Metadata.class); - } catch (Exception mapping_exception) { - return new UdmiSchema.Metadata(); - } - } - - private String metadataHash() { - if (metadata == null) { - return INVALID_METADATA_HASH; - } - String savedHash = metadata.hash; - Date savedTimestamp = metadata.timestamp; - try { - metadata.hash = null; - metadata.timestamp = null; - String json = metadataString(); - return String.format("%08x", Objects.hash(json)); - } catch (Exception e) { - throw new RuntimeException("Converting object to string", e); - } finally { - metadata.hash = savedHash; - metadata.timestamp = savedTimestamp; - } - } - - private String getAuthType() { - return metadata.cloud == null ? null : metadata.cloud.auth_type; - } - - private String getAuthFileType() { - return RSA_CERT_TYPE.equals(getAuthType()) ? RSA_CERT_FILE : RSA_KEY_FILE; - } - - private DeviceCredential loadCredential() { - try { - if (hasGateway() && getAuthType() != null) { - throw new RuntimeException("Proxied devices should not have auth_type defined"); - } - if (!isDirectConnect()) { - return null; - } - if (getAuthType() == null) { - throw new RuntimeException("Credential auth_type definition missing"); - } - File deviceKeyFile = new File(deviceDir, publicKeyFile()); - if (!deviceKeyFile.exists()) { - generateNewKey(); - } - return CloudIotManager.makeCredentials(getAuthFileType(), - IOUtils.toString(new FileInputStream(deviceKeyFile), Charset.defaultCharset())); - } catch (Exception e) { - throw new RuntimeException("While loading credential for local device " + deviceId, e); - } - } - - private Set keyFiles() { - if (!isDirectConnect()) { - return ImmutableSet.of(); - } - return Sets.union(Sets.union(DEVICE_FILES, KEY_FILES), Set.of(publicKeyFile())); - } - - private String publicKeyFile() { - return RSA_CERT_TYPE.equals(getAuthType()) ? RSA_CERT_PEM : RSA_PUBLIC_PEM; - } - - private void generateNewKey() { - String absolutePath = deviceDir.getAbsolutePath(); - try { - String command = String.format(KEYGEN_EXEC_FORMAT, getAuthType(), absolutePath); - System.err.println(command); - int exitCode = Runtime.getRuntime().exec(command).waitFor(); - if (exitCode != 0) { - throw new RuntimeException("Keygen exit code " + exitCode); - } - } catch (Exception e) { - throw new RuntimeException("While generating new credential for " + deviceId, e); - } - } - - boolean isGateway() { - return metadata != null && metadata.gateway != null && - metadata.gateway.proxy_ids != null; - } - - boolean hasGateway() { - return metadata != null && metadata.gateway != null && - metadata.gateway.gateway_id != null; - } - - boolean isDirectConnect() { - return isGateway() || !hasGateway(); - } - - String getGatewayId() { - return hasGateway() ? metadata.gateway.gateway_id : null; - } - - CloudDeviceSettings getSettings() { - try { - if (settings != null) { - return settings; - } - settings = new CloudDeviceSettings(); - if (metadata == null) { - return settings; - } - settings.credential = loadCredential(); - settings.metadata = metadataString(); - settings.config = deviceConfigString(); - settings.proxyDevices = getProxyDevicesList(); - return settings; - } catch (Exception e) { - throw new RuntimeException("While getting settings for device " + deviceId, e); - } - } - - private List getProxyDevicesList() { - return isGateway() ? metadata.gateway.proxy_ids : null; - } - - private String deviceConfigString() { - try { - UdmiSchema.Config config = new UdmiSchema.Config(); - config.timestamp = metadata.timestamp; - if (isGateway()) { - config.gateway = new UdmiSchema.GatewayConfig(); - config.gateway.proxy_ids = getProxyDevicesList(); - } - if (metadata.pointset != null) { - config.pointset = getDevicePointsetConfig(); - } - if (metadata.localnet != null) { - config.localnet = getDeviceLocalnetConfig(); - } - return OBJECT_MAPPER.writeValueAsString(config); - } catch (Exception e) { - throw new RuntimeException("While converting device config to string", e); - } - } - - private UdmiSchema.LocalnetConfig getDeviceLocalnetConfig() { - UdmiSchema.LocalnetConfig localnetConfig = new UdmiSchema.LocalnetConfig(); - localnetConfig.subsystems = metadata.localnet.subsystem; - return localnetConfig; - } - - private UdmiSchema.PointsetConfig getDevicePointsetConfig() { - UdmiSchema.PointsetConfig pointsetConfig = new UdmiSchema.PointsetConfig(); - metadata.pointset.points.forEach((metadataKey, value) -> - pointsetConfig.points.computeIfAbsent(metadataKey, configKey -> - UdmiSchema.PointConfig.fromRef(value.ref))); - return pointsetConfig; - } - - private String metadataString() { - try { - String prettyString = OBJECT_MAPPER.writeValueAsString(metadata); - if (prettyString.length() <= MAX_METADATA_LENGTH) { - return prettyString; - } - return OBJECT_MAPPER_RAW.writeValueAsString(metadata); - } catch (Exception e) { - throw new RuntimeException("While converting metadata to string", e); - } - } - - public void validateEnvelope(String registryId, String siteName) { - try { - UdmiSchema.Envelope envelope = new UdmiSchema.Envelope(); - envelope.deviceId = deviceId; - envelope.deviceRegistryId = registryId; - // Don't use actual project id because it should be abstracted away. - envelope.projectId = fakeProjectId(); - envelope.deviceNumId = makeNumId(envelope); - String envelopeJson = OBJECT_MAPPER.writeValueAsString(envelope); - schemas.get(ENVELOPE_JSON).validate(new JSONObject(new JSONTokener(envelopeJson))); - } catch (Exception e) { - throw new IllegalStateException("Validating envelope " + deviceId, e); - } - checkConsistency(siteName); - } - - private String fakeProjectId() { - return metadata.system.location.site.toLowerCase(); - } - - private void checkConsistency(String expectedSite) { - String siteName = metadata.system.location.site; - String assetSite = metadata.system.physical_tag.asset.site; - String assetName = metadata.system.physical_tag.asset.name; - Preconditions.checkState(expectedSite.equals(siteName), - String.format(PHYSICAL_TAG_ERROR, "location", siteName, expectedSite)); - Preconditions.checkState(expectedSite.equals(assetSite), - String.format(PHYSICAL_TAG_ERROR, "site", assetSite, expectedSite)); - Preconditions.checkState(deviceId.equals(assetName), - String.format(PHYSICAL_TAG_ERROR, "name", assetName, deviceId)); - } - - private String makeNumId(UdmiSchema.Envelope envelope) { - int hash = Objects.hash(deviceId, envelope.deviceRegistryId, envelope.projectId); - return Integer.toString(hash < 0 ? -hash : hash); - } - - public void writeErrors() { - File errorsFile = new File(deviceDir, DEVICE_ERRORS_JSON); - System.err.println("Updating " + errorsFile); - if (exceptionMap.isEmpty()) { - errorsFile.delete(); - return; - } - try (PrintStream printStream = new PrintStream(new FileOutputStream(errorsFile))) { - ExceptionMap.ErrorTree errorTree = ExceptionMap.format(exceptionMap, ERROR_FORMAT_INDENT); - errorTree.write(printStream); - } catch (Exception e) { - throw new RuntimeException("While writing "+ errorsFile.getAbsolutePath(), e); - } - } - - void writeNormalized() { - File metadataFile = new File(deviceDir, NORMALIZED_JSON); - if (metadata == null) { - System.err.println("Deleting (invalid) " + metadataFile.getAbsolutePath()); - metadataFile.delete(); - return; - } - UdmiSchema.Metadata normalized = readNormalized(); - String writeHash = metadataHash(); - if (normalized.hash != null && normalized.hash.equals(writeHash)) { - return; - } - metadata.timestamp = new Date(); - metadata.hash = writeHash; - System.err.println("Writing normalized " + metadataFile.getAbsolutePath()); - try (OutputStream outputStream = new FileOutputStream(metadataFile)) { - // Super annoying, but can't set this on the global static instance. - JsonGenerator generator = OBJECT_MAPPER.getFactory() - .createGenerator(outputStream) - .setPrettyPrinter(PROPER_PRETTY_PRINTER_POLICY); - OBJECT_MAPPER.writeValue(generator, metadata); - } catch (Exception e) { - exceptionMap.put("Writing", e); - } - } - - public void writeConfigFile() { - File configFile = new File(deviceDir, GENERATED_CONFIG_JSON); - try (OutputStream outputStream = new FileOutputStream(configFile)) { - outputStream.write(settings.config.getBytes()); - } catch (Exception e) { - throw new RuntimeException("While writing "+ configFile.getAbsolutePath(), e); - } - } - - public String getDeviceId() { - return deviceId; - } - - public String getDeviceNumId() { - return Preconditions.checkNotNull(deviceNumId, "deviceNumId not set"); - } - - public void setDeviceNumId(String numId) { - deviceNumId = numId; - } - - public ExceptionMap getErrors() { - return exceptionMap; - } - - public boolean hasValidMetadata() { - return metadata != null; - } - - private static class ProperPrettyPrinterPolicy extends DefaultPrettyPrinter { - @Override - public void writeObjectFieldValueSeparator(JsonGenerator jg) throws IOException { - jg.writeRaw(": "); - } - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/registrar/Registrar.java b/validator/src/main/java/com/google/daq/mqtt/registrar/Registrar.java deleted file mode 100644 index 6b991bca48..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/registrar/Registrar.java +++ /dev/null @@ -1,317 +0,0 @@ -package com.google.daq.mqtt.registrar; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.util.ISO8601DateFormat; -import com.google.api.services.cloudiot.v1.model.Device; -import com.google.api.services.cloudiot.v1.model.DeviceCredential; -import com.google.common.base.Preconditions; -import com.google.daq.mqtt.util.*; -import com.google.daq.mqtt.util.ExceptionMap.ErrorTree; -import org.everit.json.schema.Schema; -import org.everit.json.schema.loader.SchemaClient; -import org.everit.json.schema.loader.SchemaLoader; -import org.json.JSONObject; -import org.json.JSONTokener; - -import java.io.File; -import java.io.FileInputStream; -import java.io.InputStream; -import java.math.BigInteger; -import java.util.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -import static java.util.stream.Collectors.toSet; - -public class Registrar { - - static final String METADATA_JSON = "metadata.json"; - static final String NORMALIZED_JSON = "metadata_norm.json"; - static final String DEVICE_ERRORS_JSON = "errors.json"; - static final String ENVELOPE_JSON = "envelope.json"; - static final String GENERATED_CONFIG_JSON = "generated_config.json"; - - private static final String DEVICES_DIR = "devices"; - private static final String ERROR_FORMAT_INDENT = " "; - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .enable(SerializationFeature.INDENT_OUTPUT) - .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) - .setDateFormat(new ISO8601DateFormat()) - .setSerializationInclusion(Include.NON_NULL); - public static final String ALL_MATCH = ""; - - private CloudIotManager cloudIotManager; - private File siteConfig; - private final Map schemas = new HashMap<>(); - private File schemaBase; - private String schemaName; - private PubSubPusher pubSubPusher; - private Map localDevices; - private File summaryFile; - private ExceptionMap blockErrors; - private String projectId; - - public static void main(String[] args) { - Registrar registrar = new Registrar(); - try { - if (args.length < 3 || args.length > 4) { - throw new IllegalArgumentException("Args: [project_id] [site_dir] [schema_file] (device_regex)"); - } - registrar.setProjectId(args[0]); - registrar.setSchemaBase(args[2]); - registrar.setSiteConfigPath(args[1]); - registrar.processDevices(args.length > 3 ? args[3] : ALL_MATCH); - registrar.writeErrors(); - registrar.shutdown(); - } catch (ExceptionMap em) { - ErrorTree errorTree = ExceptionMap.format(em, ERROR_FORMAT_INDENT); - errorTree.write(System.err); - System.exit(2); - } catch (Exception ex) { - ex.printStackTrace(); - System.exit(-1); - } - System.exit(0); - } - - private void writeErrors() throws Exception { - Map> errorSummary = new TreeMap<>(); - localDevices.values().forEach(LocalDevice::writeErrors); - localDevices.values().forEach(device -> { - device.getErrors().stream().forEach(error -> errorSummary - .computeIfAbsent(error.getKey(), cat -> new TreeMap<>()) - .put(device.getDeviceId(), error.getValue().toString())); - if (device.getErrors().isEmpty()) { - errorSummary.computeIfAbsent("Clean", cat -> new TreeMap<>()) - .put(device.getDeviceId(), "True"); - } - }); - if (!blockErrors.isEmpty()) { - errorSummary.put("Block", blockErrors.stream().collect(Collectors.toMap( - Map.Entry::getKey, entry -> entry.getValue().toString()))); - } - System.err.println("\nSummary:"); - errorSummary.forEach((key, value) -> System.err.println(" Device " + key + ": " + value.size())); - System.err.println("Out of " + localDevices.size() + " total."); - OBJECT_MAPPER.writeValue(summaryFile, errorSummary); - } - - private void setSiteConfigPath(String siteConfigPath) { - Preconditions.checkNotNull(schemaName, "schemaName not set yet"); - siteConfig = new File(siteConfigPath); - summaryFile = new File(siteConfig, "registration_summary.json"); - summaryFile.delete(); - File cloudIotConfig = new File(siteConfig, ConfigUtil.CLOUD_IOT_CONFIG_JSON); - System.err.println("Reading Cloud IoT config from " + cloudIotConfig.getAbsolutePath()); - cloudIotManager = new CloudIotManager(projectId, cloudIotConfig, schemaName); - pubSubPusher = new PubSubPusher(projectId, cloudIotConfig); - System.err.println(String.format("Working with project %s registry %s/%s", - cloudIotManager.getProjectId(), cloudIotManager.getCloudRegion(), cloudIotManager.getRegistryId())); - } - - private void processDevices(String deviceRegex) { - try { - Pattern devicePattern = Pattern.compile(deviceRegex); - localDevices = loadLocalDevices(devicePattern); - List cloudDevices = fetchDeviceList(devicePattern); - Set extraDevices = cloudDevices.stream().map(Device::getId).collect(toSet()); - for (String localName : localDevices.keySet()) { - LocalDevice localDevice = localDevices.get(localName); - if (!localDevice.hasValidMetadata()) { - System.err.println("Skipping (invalid) " + localName); - continue; - } - extraDevices.remove(localName); - try { - updateCloudIoT(localDevice); - localDevice.writeConfigFile(); - Device device = Preconditions.checkNotNull(fetchDevice(localName), - "missing device " + localName); - BigInteger numId = Preconditions.checkNotNull(device.getNumId(), - "missing deviceNumId for " + localName); - localDevice.setDeviceNumId(numId.toString()); - sendMetadataMessage(localDevice); - } catch (Exception e) { - System.err.println("Deferring exception: " + e.toString()); - localDevice.getErrors().put("Registering", e); - } - } - bindGatewayDevices(localDevices); - blockErrors = blockExtraDevices(extraDevices); - System.err.println(String.format("Processed %d devices", localDevices.size())); - } catch (Exception e) { - throw new RuntimeException("While processing devices", e); - } - } - - private ExceptionMap blockExtraDevices(Set extraDevices) { - ExceptionMap exceptionMap = new ExceptionMap("Block devices errors"); - for (String extraName : extraDevices) { - try { - System.err.println("Blocking extra device " + extraName); - cloudIotManager.blockDevice(extraName, true); - } catch (Exception e) { - exceptionMap.put(extraName, e); - } - } - return exceptionMap; - } - - private Device fetchDevice(String localName) { - try { - return cloudIotManager.fetchDevice(localName); - } catch (Exception e) { - throw new RuntimeException("Fetching device " + localName, e); - } - } - - private void sendMetadataMessage(LocalDevice localDevice) { - System.err.println("Sending metadata message for " + localDevice.getDeviceId()); - Map attributes = new HashMap<>(); - attributes.put("deviceId", localDevice.getDeviceId()); - attributes.put("deviceNumId", localDevice.getDeviceNumId()); - attributes.put("deviceRegistryId", cloudIotManager.getRegistryId()); - attributes.put("projectId", cloudIotManager.getProjectId()); - attributes.put("subFolder", LocalDevice.METADATA_SUBFOLDER); - pubSubPusher.sendMessage(attributes, localDevice.getSettings().metadata); - } - - private void updateCloudIoT(LocalDevice localDevice) { - String localName = localDevice.getDeviceId(); - fetchDevice(localName); - CloudDeviceSettings localDeviceSettings = localDevice.getSettings(); - if (cloudIotManager.registerDevice(localName, localDeviceSettings)) { - System.err.println("Created new device entry " + localName); - } else { - System.err.println("Updated device entry " + localName); - } - } - - private void bindGatewayDevices(Map localDevices) { - localDevices.values().stream().filter(localDevice -> localDevice.getSettings().proxyDevices != null).forEach( - localDevice -> localDevice.getSettings().proxyDevices.forEach(proxyDeviceId -> { - try { - System.err.println("Binding " + proxyDeviceId + " to gateway " + localDevice.getDeviceId()); - cloudIotManager.bindDevice(proxyDeviceId, localDevice.getDeviceId()); - } catch (Exception e) { - throw new RuntimeException("While binding device " + proxyDeviceId, e); - } - }) - ); - } - - private void shutdown() { - pubSubPusher.shutdown(); - } - - private List fetchDeviceList(Pattern devicePattern) { - System.err.println("Fetching remote registry " + cloudIotManager.getRegistryId()); - return cloudIotManager.fetchDeviceList(devicePattern); - } - - private Map loadLocalDevices(Pattern devicePattern) { - File devicesDir = new File(siteConfig, DEVICES_DIR); - String[] devices = devicesDir.list(); - Preconditions.checkNotNull(devices, "No devices found in " + devicesDir.getAbsolutePath()); - Map localDevices = loadDevices(devicesDir, devices, devicePattern); - validateKeys(localDevices); - validateFiles(localDevices); - writeNormalized(localDevices); - return localDevices; - } - - private void validateFiles(Map localDevices) { - for (LocalDevice device : localDevices.values()) { - try { - device.validatedDeviceDir(); - } catch (Exception e) { - device.getErrors().put("Files", e); - } - } - } - - private void writeNormalized(Map localDevices) { - for (String deviceName : localDevices.keySet()) { - try { - localDevices.get(deviceName).writeNormalized(); - } catch (Exception e) { - throw new RuntimeException("While writing normalized " + deviceName, e); - } - } - } - - private void validateKeys(Map localDevices) { - Map privateKeys = new HashMap<>(); - localDevices.values().stream().filter(LocalDevice::isDirectConnect).forEach( - localDevice -> { - String deviceName = localDevice.getDeviceId(); - CloudDeviceSettings settings = localDevice.getSettings(); - if (privateKeys.containsKey(settings.credential)) { - String previous = privateKeys.get(settings.credential); - RuntimeException exception = new RuntimeException( - String.format("Duplicate credentials found for %s & %s", previous, deviceName)); - localDevice.getErrors().put("Key", exception); - } else { - privateKeys.put(settings.credential, deviceName); - } - }); - } - - private Map loadDevices(File devicesDir, String[] devices, Pattern devicePattern) { - HashMap localDevices = new HashMap<>(); - for (String deviceName : devices) { - Matcher deviceMatch = devicePattern.matcher(deviceName); - if (deviceMatch.find() && LocalDevice.deviceExists(devicesDir, deviceName)) { - System.err.println("Loading local device " + deviceName); - LocalDevice localDevice = new LocalDevice(devicesDir, deviceName, schemas); - localDevices.put(deviceName, localDevice); - try { - localDevice.validateEnvelope(cloudIotManager.getRegistryId(), cloudIotManager.getSiteName()); - } catch (Exception e) { - localDevice.getErrors().put("Envelope", e); - } - } - } - return localDevices; - } - - private void setProjectId(String projectId) { - this.projectId = projectId; - } - - private void setSchemaBase(String schemaBasePath) { - schemaBase = new File(schemaBasePath); - schemaName = schemaBase.getName(); - loadSchema(METADATA_JSON); - loadSchema(ENVELOPE_JSON); - } - - private void loadSchema(String key) { - File schemaFile = new File(schemaBase, key); - try (InputStream schemaStream = new FileInputStream(schemaFile)) { - JSONObject rawSchema = new JSONObject(new JSONTokener(schemaStream)); - schemas.put(key, SchemaLoader.load(rawSchema, new Loader())); - } catch (Exception e) { - throw new RuntimeException("While loading schema " + schemaFile.getAbsolutePath(), e); - } - } - - private class Loader implements SchemaClient { - - public static final String FILE_PREFIX = "file:"; - - @Override - public InputStream get(String schema) { - try { - Preconditions.checkArgument(schema.startsWith(FILE_PREFIX)); - return new FileInputStream(new File(schemaBase, schema.substring(FILE_PREFIX.length()))); - } catch (Exception e) { - throw new RuntimeException("While loading sub-schema " + schema, e); - } - } - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/registrar/UdmiSchema.java b/validator/src/main/java/com/google/daq/mqtt/registrar/UdmiSchema.java deleted file mode 100644 index 9c2ed2701d..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/registrar/UdmiSchema.java +++ /dev/null @@ -1,102 +0,0 @@ -package com.google.daq.mqtt.registrar; - -import java.util.*; - -public class UdmiSchema { - static class Envelope { - public String deviceId; - public String deviceNumId; - public String deviceRegistryId; - public String projectId; - public final String subFolder = LocalDevice.METADATA_SUBFOLDER; - } - - static class Metadata { - public PointsetMetadata pointset; - public SystemMetadata system; - public GatewayMetadata gateway; - public LocalnetMetadata localnet; - public CloudMetadata cloud; - public Integer version; - public Date timestamp; - public String hash; - } - - static class CloudMetadata { - public String auth_type; - } - - static class PointsetMetadata { - public Map points; - } - - static class SystemMetadata { - public LocationMetadata location; - public PhysicalTagMetadata physical_tag; - } - - static class GatewayMetadata { - public String gateway_id; - public List proxy_ids; - public String subsystem; - } - - static class PointMetadata { - public String units; - public String ref; - } - - static class LocationMetadata { - public String site; - public String section; - public Object position; - } - - static class PhysicalTagMetadata { - public AssetMetadata asset; - } - - static class AssetMetadata { - public String guid; - public String name; - public String site; - } - - static class Config { - public Integer version = 1; - public Date timestamp; - public GatewayConfig gateway; - public LocalnetConfig localnet; - public PointsetConfig pointset; - } - - static class GatewayConfig { - public List proxy_ids = new ArrayList<>(); - } - - static class LocalnetConfig { - public Map subsystems = new TreeMap<>(); - } - - static class PointsetConfig { - public Map points = new TreeMap<>(); - } - - static class PointConfig { - public String ref; - - static PointConfig fromRef(String ref) { - PointConfig pointConfig = new PointConfig(); - pointConfig.ref = ref; - return pointConfig; - } - } - - static class LocalnetMetadata { - public Map subsystem; - } - - static class LocalnetSubsystem { - public String local_id; - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/CloudDeviceSettings.java b/validator/src/main/java/com/google/daq/mqtt/util/CloudDeviceSettings.java deleted file mode 100644 index 5e8eec817e..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/CloudDeviceSettings.java +++ /dev/null @@ -1,12 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.google.api.services.cloudiot.v1.model.DeviceCredential; - -import java.util.List; - -public class CloudDeviceSettings { - public DeviceCredential credential; - public String metadata; - public List proxyDevices; - public String config; -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/CloudIotConfig.java b/validator/src/main/java/com/google/daq/mqtt/util/CloudIotConfig.java deleted file mode 100644 index 2a5fa86681..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/CloudIotConfig.java +++ /dev/null @@ -1,8 +0,0 @@ -package com.google.daq.mqtt.util; - -public class CloudIotConfig { - public String registry_id; - public String cloud_region; - public String site_name; - public String registrar_topic; -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/CloudIotManager.java b/validator/src/main/java/com/google/daq/mqtt/util/CloudIotManager.java deleted file mode 100644 index f69c3bb0ae..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/CloudIotManager.java +++ /dev/null @@ -1,264 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport; -import com.google.api.client.googleapis.json.GoogleJsonResponseException; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.json.JsonFactory; -import com.google.api.client.json.jackson2.JacksonFactory; -import com.google.api.services.cloudiot.v1.CloudIot; -import com.google.api.services.cloudiot.v1.CloudIotScopes; -import com.google.api.services.cloudiot.v1.model.BindDeviceToGatewayRequest; -import com.google.api.services.cloudiot.v1.model.Device; -import com.google.api.services.cloudiot.v1.model.DeviceCredential; -import com.google.api.services.cloudiot.v1.model.GatewayConfig; -import com.google.api.services.cloudiot.v1.model.ModifyCloudToDeviceConfigRequest; -import com.google.api.services.cloudiot.v1.model.PublicKeyCredential; -import com.google.auth.http.HttpCredentialsAdapter; -import com.google.auth.oauth2.GoogleCredentials; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; - -import java.io.File; -import java.io.IOException; -import java.util.ArrayList; -import java.util.Base64; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.regex.Pattern; - -import static com.google.daq.mqtt.util.ConfigUtil.readCloudIotConfig; -import static java.util.stream.Collectors.toList; - -/** - * Encapsulation of all Cloud IoT interaction functions. - */ -public class CloudIotManager { - - private static final String DEVICE_UPDATE_MASK = "blocked,credentials,metadata"; - private static final String REGISTERED_KEY = "registered"; - private static final String SCHEMA_KEY = "schema_name"; - private static final int LIST_PAGE_SIZE = 1000; - - private final CloudIotConfig cloudIotConfig; - - private final String registryId; - private final String projectId; - private final String cloudRegion; - - private CloudIot cloudIotService; - private String projectPath; - private CloudIot.Projects.Locations.Registries cloudIotRegistries; - private Map deviceMap = new HashMap<>(); - private String schemaName; - - public CloudIotManager(String projectId, File iotConfigFile, String schemaName) { - this.projectId = projectId; - this.schemaName = schemaName; - cloudIotConfig = validate(readCloudIotConfig(iotConfigFile)); - registryId = cloudIotConfig.registry_id; - cloudRegion = cloudIotConfig.cloud_region; - initializeCloudIoT(); - } - - private static CloudIotConfig validate(CloudIotConfig cloudIotConfig) { - Preconditions.checkNotNull(cloudIotConfig.registry_id, "registry_id not defined"); - Preconditions.checkNotNull(cloudIotConfig.cloud_region, "cloud_region not defined"); - Preconditions.checkNotNull(cloudIotConfig.site_name, "site_name not defined"); - return cloudIotConfig; - } - - private String getRegistryPath(String registryId) { - return projectPath + "/registries/" + registryId; - } - - private String getDevicePath(String registryId, String deviceId) { - return getRegistryPath(registryId) + "/devices/" + deviceId; - } - - private void initializeCloudIoT() { - projectPath = "projects/" + projectId + "/locations/" + cloudRegion; - try { - System.err.println("Initializing with default credentials..."); - GoogleCredentials credential = - GoogleCredentials.getApplicationDefault().createScoped(CloudIotScopes.all()); - JsonFactory jsonFactory = JacksonFactory.getDefaultInstance(); - HttpRequestInitializer init = new HttpCredentialsAdapter(credential); - cloudIotService = - new CloudIot.Builder(GoogleNetHttpTransport.newTrustedTransport(), jsonFactory, init) - .setApplicationName("com.google.iot.bos") - .build(); - cloudIotRegistries = cloudIotService.projects().locations().registries(); - System.err.println("Created service for project " + projectPath); - } catch (Exception e) { - throw new RuntimeException("While initializing Cloud IoT project " + projectPath, e); - } - } - - public boolean registerDevice(String deviceId, CloudDeviceSettings settings) { - try { - Preconditions.checkNotNull(cloudIotService, "CloudIoT service not initialized"); - Preconditions.checkNotNull(deviceMap, "deviceMap not initialized"); - Device device = deviceMap.get(deviceId); - boolean isNewDevice = device == null; - if (isNewDevice) { - createDevice(deviceId, settings); - } else { - updateDevice(deviceId, settings, device); - } - writeDeviceConfig(deviceId, settings.config); - return isNewDevice; - } catch (Exception e) { - throw new RuntimeException("While registering device " + deviceId, e); - } - } - - private void writeDeviceConfig(String deviceId, String config) { - try { - cloudIotRegistries.devices().modifyCloudToDeviceConfig(getDevicePath(registryId, deviceId), - new ModifyCloudToDeviceConfigRequest().setBinaryData( - Base64.getEncoder().encodeToString(config.getBytes())) - ).execute(); - } catch (Exception e) { - throw new RuntimeException("While modifying device config", e); - } - } - - public void blockDevice(String deviceId, boolean blocked) { - try { - Device device = new Device(); - device.setBlocked(blocked); - String path = getDevicePath(registryId, deviceId); - cloudIotRegistries.devices().patch(path, device).setUpdateMask("blocked").execute(); - } catch (Exception e) { - throw new RuntimeException(String.format("While (un)blocking device %s/%s=%s", registryId, deviceId, blocked), e); - } - } - - private Device makeDevice(String deviceId, CloudDeviceSettings settings, - Device oldDevice) { - Map metadataMap = oldDevice == null ? null : oldDevice.getMetadata(); - if (metadataMap == null) { - metadataMap = new HashMap<>(); - } - metadataMap.put(REGISTERED_KEY, settings.metadata); - metadataMap.put(SCHEMA_KEY, schemaName); - return new Device() - .setId(deviceId) - .setGatewayConfig(getGatewayConfig(settings)) - .setCredentials(getCredentials(settings)) - .setMetadata(metadataMap); - } - - private ImmutableList getCredentials(CloudDeviceSettings settings) { - if (settings.credential != null) { - return ImmutableList.of(settings.credential); - } else { - return ImmutableList.of(); - } - } - - private GatewayConfig getGatewayConfig(CloudDeviceSettings settings) { - boolean isGateway = settings.proxyDevices != null; - GatewayConfig gwConfig = new GatewayConfig(); - gwConfig.setGatewayType(isGateway ? "GATEWAY" : "NON_GATEWAY"); - gwConfig.setGatewayAuthMethod("ASSOCIATION_ONLY"); - return gwConfig; - } - - private void createDevice(String deviceId, CloudDeviceSettings settings) throws IOException { - try { - cloudIotRegistries.devices().create(getRegistryPath(registryId), - makeDevice(deviceId, settings, null)).execute(); - } catch (GoogleJsonResponseException e) { - throw new RuntimeException("Remote error creating device " + deviceId, e); - } - } - - private void updateDevice(String deviceId, CloudDeviceSettings settings, - Device oldDevice) { - try { - Device device = makeDevice(deviceId, settings, oldDevice) - .setId(null) - .setNumId(null); - cloudIotRegistries - .devices() - .patch(getDevicePath(registryId, deviceId), device).setUpdateMask(DEVICE_UPDATE_MASK) - .execute(); - } catch (Exception e) { - throw new RuntimeException("Remote error patching device " + deviceId, e); - } - } - - public static DeviceCredential makeCredentials(String keyFormat, String keyData) { - PublicKeyCredential publicKeyCredential = new PublicKeyCredential(); - publicKeyCredential.setFormat(keyFormat); - publicKeyCredential.setKey(keyData); - - DeviceCredential deviceCredential = new DeviceCredential(); - deviceCredential.setPublicKey(publicKeyCredential); - return deviceCredential; - } - - public List fetchDeviceList(Pattern devicePattern) { - Preconditions.checkNotNull(cloudIotService, "CloudIoT service not initialized"); - try { - List devices = cloudIotRegistries - .devices() - .list(getRegistryPath(registryId)) - .setPageSize(LIST_PAGE_SIZE) - .execute() - .getDevices(); - if (devices == null) { - return new ArrayList<>(); - } - if (devices.size() == LIST_PAGE_SIZE) { - throw new RuntimeException("Returned exact page size, likely not fetched all devices"); - } - return devices.stream().filter(device -> devicePattern.matcher(device.getId()).find()).collect(toList()); - } catch (Exception e) { - throw new RuntimeException("While listing devices for registry " + registryId, e); - } - } - - public Device fetchDevice(String deviceId) { - return deviceMap.computeIfAbsent(deviceId, this::fetchDeviceFromCloud); - } - - private Device fetchDeviceFromCloud(String deviceId) { - try { - return cloudIotRegistries.devices().get(getDevicePath(registryId, deviceId)).execute(); - } catch (Exception e) { - if (e instanceof GoogleJsonResponseException - && ((GoogleJsonResponseException) e).getDetails().getCode() == 404) { - return null; - } - throw new RuntimeException("While fetching " + deviceId, e); - } - } - - public String getRegistryId() { - return registryId; - } - - public String getProjectId() { - return projectId; - } - - public String getSiteName() { - return cloudIotConfig.site_name; - } - - public Object getCloudRegion() { - return cloudRegion; - } - - public void bindDevice(String proxyDeviceId, String gatewayDeviceId) throws IOException { - cloudIotRegistries.bindDeviceToGateway(getRegistryPath(registryId), - getBindRequest(proxyDeviceId, gatewayDeviceId)).execute(); - } - - private BindDeviceToGatewayRequest getBindRequest(String deviceId, String gatewayId) { - return new BindDeviceToGatewayRequest().setDeviceId(deviceId).setGatewayId(gatewayId); - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/ConfigUtil.java b/validator/src/main/java/com/google/daq/mqtt/util/ConfigUtil.java deleted file mode 100644 index 16230859d4..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/ConfigUtil.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.api.client.googleapis.auth.oauth2.GoogleCredential; -import com.google.api.services.cloudiot.v1.CloudIotScopes; - -import java.io.File; -import java.io.FileInputStream; - -public class ConfigUtil { - public static final String CLOUD_IOT_CONFIG_JSON = "cloud_iot_config.json"; - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper(); - - public static CloudIotConfig readCloudIotConfig(File configFile) { - try { - return OBJECT_MAPPER.readValue(configFile, CloudIotConfig.class); - } catch (Exception e) { - throw new RuntimeException("While reading config file "+ configFile.getAbsolutePath(), e); - } - } - - static GoogleCredential authorizeServiceAccount(File credFile) { - try (FileInputStream credStream = new FileInputStream(credFile)) { - return GoogleCredential - .fromStream(credStream) - .createScoped(CloudIotScopes.all()); - } catch (Exception e) { - throw new RuntimeException("While reading cred file " + credFile.getAbsolutePath(), e); - } - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/ExceptionMap.java b/validator/src/main/java/com/google/daq/mqtt/util/ExceptionMap.java deleted file mode 100644 index ce8cdcfd32..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/ExceptionMap.java +++ /dev/null @@ -1,109 +0,0 @@ -package com.google.daq.mqtt.util; - -import java.io.IOException; -import java.io.PrintStream; -import java.util.Map; -import java.util.TreeMap; -import java.util.function.BiConsumer; -import java.util.stream.Stream; - -import org.everit.json.schema.ValidationException; - -public class ExceptionMap extends RuntimeException { - - private static final byte[] NEWLINE_BYTES = "\n".getBytes(); - private static final byte[] SEPARATOR_BYTES = ": ".getBytes(); - - final Map exceptions = new TreeMap<>(); - - public ExceptionMap(String description) { - super(description); - } - - private void forEach(BiConsumer consumer) { - exceptions.forEach(consumer); - } - - public boolean isEmpty() { - return exceptions.isEmpty(); - } - - public void throwIfNotEmpty() { - if (!exceptions.isEmpty() || getCause() != null) { - throw this; - } - } - - public void put(String key, Exception exception) { - if (exceptions.put(key, exception) != null) { - throw new IllegalArgumentException("Exception key already defined: " + key); - } - } - - public static ErrorTree format(Exception e, String indent) { - return format(e, "", indent); - } - - private static ErrorTree format(Throwable e, final String prefix, final String indent) { - final ErrorTree errorTree = new ErrorTree(); - errorTree.prefix = prefix; - errorTree.message = e.getMessage(); - final String newPrefix = prefix + indent; - if (e instanceof ExceptionMap) { - if (e.getCause() != null) { - errorTree.child = format(e.getCause(), newPrefix, indent); - } - ((ExceptionMap) e).forEach( - (key, sub) -> errorTree.children.put(key, format(sub, newPrefix, indent))); - } else if (e instanceof ValidationException) { - ((ValidationException) e).getCausingExceptions().forEach( - sub -> errorTree.children.put(sub.getMessage(), format(sub, newPrefix, indent))); - } else if (e.getCause() != null) { - errorTree.child = format(e.getCause(), newPrefix, indent); - } - if (errorTree.children.isEmpty()) { - errorTree.children = null; - } - if (errorTree.child == null && errorTree.children == null && errorTree.message == null) { - errorTree.message = e.toString(); - } - return errorTree; - } - - public Stream> stream() { - return exceptions.entrySet().stream(); - } - - public int size() { - return exceptions.size(); - } - - public static class ErrorTree { - public String prefix; - public String message; - public ErrorTree child; - public Map children = new TreeMap<>(); - - public void write(PrintStream err) { - if (message == null && children == null && child == null) { - throw new RuntimeException("Empty ErrorTree object"); - } - try { - if (message != null) { - err.write(prefix.getBytes()); - err.write(message.getBytes()); - err.write(NEWLINE_BYTES); - } - } catch (IOException e) { - throw new RuntimeException(e); - } - if (child != null) { - child.write(err); - } - if (children != null) { - children.forEach((key, value) -> value.write(err)); - } - } - } - -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/FirestoreDataSink.java b/validator/src/main/java/com/google/daq/mqtt/util/FirestoreDataSink.java deleted file mode 100644 index 9a09a0cc9d..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/FirestoreDataSink.java +++ /dev/null @@ -1,101 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.google.auth.Credentials; -import com.google.auth.oauth2.GoogleCredentials; -import com.google.cloud.ServiceOptions; -import com.google.cloud.firestore.DocumentReference; -import com.google.cloud.firestore.Firestore; -import com.google.cloud.firestore.FirestoreOptions; -import com.google.common.base.Preconditions; -import com.google.daq.mqtt.util.ExceptionMap.ErrorTree; -import java.io.File; -import java.io.FileInputStream; -import java.io.IOException; -import java.time.Instant; -import java.time.ZoneOffset; -import java.time.format.DateTimeFormatter; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; - -public class FirestoreDataSink { - - private static final String - CREDENTIAL_ERROR_FORMAT = "Credential file %s defined by %s not found."; - private static final String - VIEW_URL_FORMAT = "https://console.cloud.google.com/firestore/data/registries/?project=%s"; - - private static final DateTimeFormatter dateTimeFormatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSX").withZone(ZoneOffset.UTC); - - private final Firestore db; - private final String projectId = ServiceOptions.getDefaultProjectId(); - - private final AtomicReference oldError = new AtomicReference<>(); - - public FirestoreDataSink() { - try { - Credentials projectCredentials = getProjectCredentials(); - FirestoreOptions firestoreOptions = - FirestoreOptions.getDefaultInstance().toBuilder() - .setCredentials(projectCredentials) - .setProjectId(projectId) - .setTimestampsInSnapshotsEnabled(true) - .build(); - - db = firestoreOptions.getService(); - } catch (Exception e) { - throw new RuntimeException("While creating Firestore connection to " + projectId, e); - } - } - - private Credentials getProjectCredentials() throws IOException { - File credentialFile = new File(System.getenv(ServiceOptions.CREDENTIAL_ENV_NAME)); - if (!credentialFile.exists()) { - throw new RuntimeException(String.format(CREDENTIAL_ERROR_FORMAT, - credentialFile.getAbsolutePath(), ServiceOptions.CREDENTIAL_ENV_NAME)); - } - try (FileInputStream serviceAccount = new FileInputStream(credentialFile)) { - return GoogleCredentials.fromStream(serviceAccount); - } - } - - public void validationResult(String deviceId, String schemaId, Map attributes, - Object message, - ErrorTree errorTree) { - if (oldError.get() != null) { - throw oldError.getAndSet(null); - } - - try { - String registryId = attributes.get("deviceRegistryId"); - Preconditions.checkNotNull(deviceId, "deviceId attribute not defined"); - Preconditions.checkNotNull(schemaId, "schemaId not properly defined"); - Preconditions.checkNotNull(registryId, "deviceRegistryId attribute not defined"); - String instantNow = dateTimeFormatter.format(Instant.now()); - DocumentReference registryDoc = db.collection("registries").document(registryId); - registryDoc.update("validated", instantNow); - DocumentReference deviceDoc = registryDoc.collection("devices").document(deviceId); - deviceDoc.update("validated", instantNow); - DocumentReference resultDoc = deviceDoc.collection("validations").document(schemaId); - PojoBundle dataBundle = new PojoBundle(); - dataBundle.validated = instantNow; - dataBundle.errorTree = errorTree; - dataBundle.attributes = attributes; - dataBundle.message = message; - resultDoc.set(dataBundle); - } catch (Exception e) { - throw new RuntimeException("While writing result for " + deviceId, e); - } - } - - static class PojoBundle { - public String validated; - public ErrorTree errorTree; - public Object message; - public Map attributes; - } - - public String getViewUrl() { - return String.format(VIEW_URL_FORMAT, projectId); - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/PubSubClient.java b/validator/src/main/java/com/google/daq/mqtt/util/PubSubClient.java deleted file mode 100644 index c7c048dd1f..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/PubSubClient.java +++ /dev/null @@ -1,166 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.core.JsonProcessingException; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.google.api.client.util.Base64; -import com.google.cloud.ServiceOptions; -import com.google.cloud.pubsub.v1.AckReplyConsumer; -import com.google.cloud.pubsub.v1.MessageReceiver; -import com.google.cloud.pubsub.v1.Subscriber; -import com.google.cloud.pubsub.v1.SubscriptionAdminClient; -import com.google.cloud.pubsub.v1.SubscriptionAdminClient.ListSubscriptionsPagedResponse; -import com.google.protobuf.Timestamp; -import com.google.pubsub.v1.*; -import io.grpc.LoadBalancerRegistry; -import io.grpc.internal.PickFirstLoadBalancerProvider; - -import java.util.HashMap; -import java.util.Map; -import java.util.TreeMap; -import java.util.concurrent.BlockingQueue; -import java.util.concurrent.LinkedBlockingDeque; -import java.util.concurrent.atomic.AtomicBoolean; -import java.util.function.BiConsumer; - -public class PubSubClient { - - private static final String CONNECT_ERROR_FORMAT = "While connecting to %s/%s"; - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .enable(SerializationFeature.INDENT_OUTPUT) - .setSerializationInclusion(Include.NON_NULL); - private static final String SUBSCRIPTION_NAME_FORMAT = "daq-validator-%s"; - private static final String - REFRESH_ERROR_FORMAT = "While refreshing subscription to topic %s subscription %s"; - - private static final String PROJECT_ID = ServiceOptions.getDefaultProjectId(); - private static final long SUBSCRIPTION_RACE_DELAY_MS = 10000; - private static final String WAS_BASE_64 = "wasBase64"; - - private final AtomicBoolean active = new AtomicBoolean(); - private final BlockingQueue messages = new LinkedBlockingDeque<>(); - private final long startTimeSec = System.currentTimeMillis() / 1000; - - private Subscriber subscriber; - - { - // Why this needs to be done there is no rhyme or reason. - LoadBalancerRegistry.getDefaultRegistry().register(new PickFirstLoadBalancerProvider()); - } - - public PubSubClient(String instName, String topicId) { - try { - ProjectTopicName projectTopicName = ProjectTopicName.of(PROJECT_ID, topicId); - String name = String.format(SUBSCRIPTION_NAME_FORMAT, instName); - ProjectSubscriptionName subscriptionName = ProjectSubscriptionName.of(PROJECT_ID, name); - System.out.println("Resetting and connecting to pubsub subscription " + subscriptionName); - resetSubscription(projectTopicName, subscriptionName); - subscriber = Subscriber.newBuilder(subscriptionName, new MessageProcessor()).build(); - subscriber.startAsync().awaitRunning(); - active.set(true); - } catch (Exception e) { - throw new RuntimeException(String.format(CONNECT_ERROR_FORMAT, PROJECT_ID, topicId), e); - } - } - - private SeekRequest getCurrentTimeSeekRequest(String subscription) { - Timestamp timestamp = Timestamp.newBuilder().setSeconds(System.currentTimeMillis()/1000).build(); - return SeekRequest.newBuilder().setSubscription(subscription).setTime(timestamp).build(); - } - - public boolean isActive() { - return active.get(); - } - - @SuppressWarnings("unchecked") - public void processMessage(BiConsumer, Map> handler) { - try { - PubsubMessage message = messages.take(); - long seconds = message.getPublishTime().getSeconds(); - if (seconds < startTimeSec) { - System.out.println(String.format("Flushing outdated message from %d seconds ago", - startTimeSec - seconds)); - return; - } - Map attributes = message.getAttributesMap(); - byte[] rawData = message.getData().toByteArray(); - final String data; - boolean base64 = rawData[0] != '{'; - if (base64) { - data = new String(Base64.decodeBase64(rawData)); - } else { - data = new String(rawData); - } - Map asMap; - try { - asMap = OBJECT_MAPPER.readValue(data, TreeMap.class); - } catch (JsonProcessingException e) { - asMap = new ErrorContainer(e, data); - } - - attributes = new HashMap<>(attributes); - attributes.put(WAS_BASE_64, ""+ base64); - - handler.accept(asMap, attributes); - } catch (Exception e) { - throw new RuntimeException("Processing pubsub message for " + getSubscriptionId(), e); - } - } - - static class ErrorContainer extends TreeMap { - ErrorContainer(Exception e, String message) { - put("exception", e.toString()); - put("message", message); - } - } - - private void stop() { - if (subscriber != null) { - active.set(false); - subscriber.stopAsync(); - } - } - - public String getSubscriptionId() { - return subscriber.getSubscriptionNameString(); - } - - private class MessageProcessor implements MessageReceiver { - @Override - public void receiveMessage(PubsubMessage message, AckReplyConsumer consumer) { - messages.offer(message); - consumer.ack(); - } - } - - private void resetSubscription(ProjectTopicName topicName, ProjectSubscriptionName subscriptionName) { - try (SubscriptionAdminClient subscriptionAdminClient = SubscriptionAdminClient.create()) { - if (subscriptionExists(subscriptionAdminClient, topicName, subscriptionName)) { - System.out.println("Resetting existing subscription " + subscriptionName); - subscriptionAdminClient.seek(getCurrentTimeSeekRequest(subscriptionName.toString())); - Thread.sleep(SUBSCRIPTION_RACE_DELAY_MS); - } else { - System.out.println("Creating new subscription " + subscriptionName); - subscriptionAdminClient.createSubscription( - subscriptionName, topicName, PushConfig.getDefaultInstance(), 0); - } - } catch (Exception e) { - throw new RuntimeException( - String.format(REFRESH_ERROR_FORMAT, topicName, subscriptionName), e); - } - } - - private boolean subscriptionExists(SubscriptionAdminClient subscriptionAdminClient, - ProjectTopicName topicName, ProjectSubscriptionName subscriptionName) { - ListSubscriptionsPagedResponse listSubscriptionsPagedResponse = subscriptionAdminClient - .listSubscriptions(ProjectName.of(PROJECT_ID)); - for (Subscription subscription : listSubscriptionsPagedResponse.iterateAll()) { - if (subscription.getName().equals(subscriptionName.toString())) { - return true; - } - } - return false; - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/PubSubPusher.java b/validator/src/main/java/com/google/daq/mqtt/util/PubSubPusher.java deleted file mode 100644 index 3c9eca44bc..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/PubSubPusher.java +++ /dev/null @@ -1,66 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.google.api.core.ApiFuture; -import com.google.cloud.pubsub.v1.Publisher; -import com.google.common.base.Preconditions; -import com.google.protobuf.ByteString; -import com.google.pubsub.v1.ProjectTopicName; -import com.google.pubsub.v1.PubsubMessage; -import io.grpc.LoadBalancerRegistry; -import io.grpc.internal.PickFirstLoadBalancerProvider; - -import java.io.File; -import java.nio.charset.Charset; -import java.util.Map; - -import static com.google.daq.mqtt.util.ConfigUtil.readCloudIotConfig; - -public class PubSubPusher { - - private final Publisher publisher; - private final String registrar_topic; - - { - // Why this needs to be done there is no rhyme or reason. - LoadBalancerRegistry.getDefaultRegistry().register(new PickFirstLoadBalancerProvider()); - } - - public PubSubPusher(String projectId, File iotConfigFile) { - try { - CloudIotConfig cloudIotConfig = validate(readCloudIotConfig(iotConfigFile)); - registrar_topic = cloudIotConfig.registrar_topic; - ProjectTopicName topicName = ProjectTopicName.of(projectId, registrar_topic); - publisher = Publisher.newBuilder(topicName).build(); - } catch (Exception e) { - throw new RuntimeException("While creating PubSubPublisher", e); - } - } - - public String sendMessage(Map attributes, String body) { - try { - PubsubMessage message = PubsubMessage.newBuilder() - .setData(ByteString.copyFrom(body, Charset.defaultCharset())) - .putAllAttributes(attributes) - .build(); - ApiFuture publish = publisher.publish(message); - return publish.get(); - } catch (Exception e) { - throw new RuntimeException("While sending to topic " + registrar_topic, e); - } - } - - public void shutdown() { - try { - publisher.publishAllOutstanding(); - publisher.shutdown(); - System.err.println("Done with PubSubPusher"); - } catch (Exception e) { - throw new RuntimeException("While shutting down publisher" + registrar_topic, e); - } - } - - private CloudIotConfig validate(CloudIotConfig readCloudIotConfig) { - Preconditions.checkNotNull(readCloudIotConfig.registrar_topic, "registrar_topic not defined"); - return readCloudIotConfig; - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/util/RetryHttpInitializerWrapper.java b/validator/src/main/java/com/google/daq/mqtt/util/RetryHttpInitializerWrapper.java deleted file mode 100644 index dfc84aa9e6..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/util/RetryHttpInitializerWrapper.java +++ /dev/null @@ -1,87 +0,0 @@ -package com.google.daq.mqtt.util; - -import com.google.api.client.auth.oauth2.Credential; -import com.google.api.client.http.HttpBackOffIOExceptionHandler; -import com.google.api.client.http.HttpBackOffUnsuccessfulResponseHandler; -import com.google.api.client.http.HttpRequest; -import com.google.api.client.http.HttpRequestInitializer; -import com.google.api.client.http.HttpResponse; -import com.google.api.client.http.HttpUnsuccessfulResponseHandler; -import com.google.api.client.util.ExponentialBackOff; -import com.google.api.client.util.Sleeper; -import com.google.common.base.Preconditions; -import java.io.IOException; -import java.util.logging.Logger; - -/** - * RetryHttpInitializerWrapper will automatically retry upon RPC failures, preserving the - * auto-refresh behavior of the Google Credentials. - */ -public class RetryHttpInitializerWrapper implements HttpRequestInitializer { - - /** A private logger. */ - private static final Logger LOG = Logger.getLogger(RetryHttpInitializerWrapper.class.getName()); - - /** One minutes in milliseconds. */ - private static final int ONE_MINUTE_MILLIS = 60 * 1000; - - /** - * Intercepts the request for filling in the "Authorization" header field, as well as recovering - * from certain unsuccessful error codes wherein the Credential must refresh its token for a - * retry. - */ - private final Credential wrappedCredential; - - /** A sleeper; you can replace it with a mock in your test. */ - private final Sleeper sleeper; - - /** - * A constructor. - * - * @param wrappedCredential Credential which will be wrapped and used for providing auth header. - */ - public RetryHttpInitializerWrapper(final Credential wrappedCredential) { - this(wrappedCredential, Sleeper.DEFAULT); - } - - /** - * A protected constructor only for testing. - * - * @param wrappedCredential Credential which will be wrapped and used for providing auth header. - * @param sleeper Sleeper for easy testing. - */ - RetryHttpInitializerWrapper(final Credential wrappedCredential, final Sleeper sleeper) { - this.wrappedCredential = Preconditions.checkNotNull(wrappedCredential); - this.sleeper = sleeper; - } - - /** Initializes the given request. */ - @Override - public final void initialize(final HttpRequest request) { - request.setReadTimeout(2 * ONE_MINUTE_MILLIS); // 2 minutes read timeout - final HttpUnsuccessfulResponseHandler backoffHandler = - new HttpBackOffUnsuccessfulResponseHandler(new ExponentialBackOff()).setSleeper(sleeper); - request.setInterceptor(wrappedCredential); - request.setUnsuccessfulResponseHandler( - new HttpUnsuccessfulResponseHandler() { - @Override - public boolean handleResponse( - final HttpRequest request, final HttpResponse response, final boolean supportsRetry) - throws IOException { - if (wrappedCredential.handleResponse(request, response, supportsRetry)) { - // If credential decides it can handle it, the return code or message indicated - // something specific to authentication, and no backoff is desired. - return true; - } else if (backoffHandler.handleResponse(request, response, supportsRetry)) { - // Otherwise, we defer to the judgment of our internal backoff handler. - LOG.info("Retrying " + request.getUrl().toString()); - return true; - } else { - return false; - } - } - }); - request.setIOExceptionHandler( - new HttpBackOffIOExceptionHandler(new ExponentialBackOff()).setSleeper(sleeper)); - } -} \ No newline at end of file diff --git a/validator/src/main/java/com/google/daq/mqtt/validator/ReportingDevice.java b/validator/src/main/java/com/google/daq/mqtt/validator/ReportingDevice.java deleted file mode 100644 index 65480463d0..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/validator/ReportingDevice.java +++ /dev/null @@ -1,101 +0,0 @@ -package com.google.daq.mqtt.validator; - -import com.google.common.base.Joiner; - -import java.util.*; - -public class ReportingDevice { - - private final String deviceId; - private final MetadataDiff metadataDiff = new MetadataDiff(); - private Metadata metadata; - private List errors = new ArrayList<>(); - - public ReportingDevice(String deviceId) { - this.deviceId = deviceId; - } - - public void setMetadata(Metadata metadata) { - this.metadata = metadata; - } - - public String getDeviceId() { - return deviceId; - } - - public boolean hasBeenValidated() { - return metadataDiff.extraPoints != null; - } - - public boolean hasError() { - return metadataDiff.errors != null && metadataDiff.errors.isEmpty(); - } - - public boolean hasMetadataDiff() { - return (metadataDiff.extraPoints != null && !metadataDiff.extraPoints.isEmpty()) - || (metadataDiff.missingPoints != null && !metadataDiff.missingPoints.isEmpty()); - } - - public String metadataMessage() { - if (metadataDiff.extraPoints != null && !metadataDiff.extraPoints.isEmpty()) { - return "Extra points: " + Joiner.on(",").join(metadataDiff.extraPoints); - } - if (metadataDiff.missingPoints != null && !metadataDiff.missingPoints.isEmpty()) { - return "Missing points: " + Joiner.on(",").join(metadataDiff.missingPoints); - } - return null; - } - - public MetadataDiff getMetadataDiff() { - return metadataDiff; - } - - public void validateMetadata(PointsetMessage message) { - Set expectedPoints = new TreeSet<>(metadata.pointset.points.keySet()); - Set deliveredPoints = new TreeSet<>(message.points.keySet()); - metadataDiff.extraPoints = new TreeSet<>(deliveredPoints); - metadataDiff.extraPoints.removeAll(expectedPoints); - metadataDiff.missingPoints = new TreeSet<>(expectedPoints); - metadataDiff.missingPoints.removeAll(deliveredPoints); - if (hasMetadataDiff()) { - throw new RuntimeException("Metadata validation failed: " + metadataMessage()); - } - } - - public void addError(Exception error) { - errors.add(error); - if (metadataDiff.errors == null) { - metadataDiff.errors = new ArrayList<>(); - } - metadataDiff.errors.add(error.toString()); - } - - public static class MetadataDiff { - public List errors; - public Set extraPoints; - public Set missingPoints; - } - - public static class PointsetMessage { - public Integer version; - public String timestamp; - public Map points; - } - - public static class Metadata { - public Integer version; - public String timestamp; - public String hash; - public Object system; - public PointSet pointset; - } - - public static class PointSet { - public Map points; - } - - public static class PointDescriptor { - public String units; - public Object present_value; - } -} diff --git a/validator/src/main/java/com/google/daq/mqtt/validator/Validator.java b/validator/src/main/java/com/google/daq/mqtt/validator/Validator.java deleted file mode 100644 index 5fc94ed99c..0000000000 --- a/validator/src/main/java/com/google/daq/mqtt/validator/Validator.java +++ /dev/null @@ -1,463 +0,0 @@ -package com.google.daq.mqtt.validator; - -import com.fasterxml.jackson.annotation.JsonInclude.Include; -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.databind.util.ISO8601DateFormat; -import com.google.cloud.ServiceOptions; -import com.google.common.base.Preconditions; -import com.google.common.base.Strings; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSet; -import com.google.daq.mqtt.util.*; -import com.google.daq.mqtt.util.ExceptionMap.ErrorTree; -import org.everit.json.schema.Schema; -import org.everit.json.schema.ValidationException; -import org.everit.json.schema.loader.SchemaClient; -import org.everit.json.schema.loader.SchemaLoader; -import org.json.JSONObject; -import org.json.JSONTokener; - -import java.io.*; -import java.net.URL; -import java.text.SimpleDateFormat; -import java.util.*; -import java.util.regex.Pattern; -import java.util.stream.Collectors; - -public class Validator { - - private static final SimpleDateFormat DATE_FORMAT = new SimpleDateFormat("dd-MM-yyyy hh:mm"); - - private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper() - .enable(SerializationFeature.INDENT_OUTPUT) - .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS) - .setDateFormat(new ISO8601DateFormat()) - .setSerializationInclusion(Include.NON_NULL); - - private static final String ERROR_FORMAT_INDENT = " "; - private static final String JSON_SUFFIX = ".json"; - private static final String SCHEMA_VALIDATION_FORMAT = "Validating %d schemas"; - private static final String TARGET_VALIDATION_FORMAT = "Validating %d files against %s"; - private static final String PUBSUB_PREFIX = "pubsub:"; - private static final File OUT_BASE_FILE = new File("validations"); - private static final String DEVICE_FILE_FORMAT = "devices/%s"; - private static final String ATTRIBUTE_FILE_FORMAT = "%s.attr"; - private static final String MESSAGE_FILE_FORMAT = "%s.json"; - private static final String ERROR_FILE_FORMAT = "%s.out"; - private static final Pattern DEVICE_ID_PATTERN = - Pattern.compile("^([a-z][_a-z0-9-]*[a-z0-9]|[A-Z][_A-Z0-9-]*[A-Z0-9])$"); - private static final String DEVICE_MATCH_FORMAT = "DeviceId %s must match pattern %s"; - private static final String SCHEMA_SKIP_FORMAT = "Unknown schema subFolder '%s' for %s"; - private static final String ENVELOPE_SCHEMA_ID = "envelope"; - private static final String METADATA_JSON = "metadata.json"; - private static final String DEVICES_SUBDIR = "devices"; - private static final String METADATA_REPORT_JSON = "metadata_report.json"; - private static final String DEVICE_REGISTRY_ID_KEY = "deviceRegistryId"; - private static final String UNKNOWN_SCHEMA_DEFAULT = "unknown"; - private static final String POINTSET_TYPE = "pointset"; - private FirestoreDataSink dataSink; - private File schemaRoot; - private String schemaSpec; - private final Map expectedDevices = new TreeMap<>(); - private final Set extraDevices = new TreeSet<>(); - private final Set processedDevices = new TreeSet<>(); - private final Set base64Devices = new TreeSet<>(); - private CloudIotConfig cloudIotConfig; - public static final File METADATA_REPORT_FILE = new File(OUT_BASE_FILE, METADATA_REPORT_JSON); - - public static void main(String[] args) { - Validator validator = new Validator(); - try { - System.out.println(ServiceOptions.CREDENTIAL_ENV_NAME + "=" + - System.getenv(ServiceOptions.CREDENTIAL_ENV_NAME)); - if (args.length < 3 || args.length > 4) { - throw new IllegalArgumentException("Args: schema target inst_name [site]"); - } - validator.setSchemaSpec(args[0]); - String targetSpec = args[1]; - String instName = args[2]; - if (args.length >= 4) { - validator.setSiteDir(args[3]); - } - if (targetSpec.startsWith(PUBSUB_PREFIX)) { - String topicName = targetSpec.substring(PUBSUB_PREFIX.length()); - validator.validatePubSub(instName, topicName); - } else { - validator.validateFilesOutput(targetSpec); - } - } catch (ExceptionMap | ValidationException processingException) { - System.exit(2); - } catch (Exception e) { - e.printStackTrace(); - System.err.flush(); - System.exit(-1); - } - System.exit(0); - } - - private void setSiteDir(String siteDir) { - File cloudConfig = new File(siteDir, "cloud_iot_config.json"); - try { - cloudIotConfig = ConfigUtil.readCloudIotConfig(cloudConfig); - } catch (Exception e) { - throw new RuntimeException("While reading config file " + cloudConfig.getAbsolutePath(), e); - } - - File devicesDir = new File(siteDir, DEVICES_SUBDIR); - try { - for (String device : Objects.requireNonNull(devicesDir.list())) { - try { - File deviceDir = new File(devicesDir, device); - File metadataFile = new File(deviceDir, METADATA_JSON); - ReportingDevice reportingDevice = new ReportingDevice(device); - reportingDevice.setMetadata( - OBJECT_MAPPER.readValue(metadataFile, ReportingDevice.Metadata.class)); - expectedDevices.put(device, reportingDevice); - } catch (Exception e) { - throw new RuntimeException("While loading device " + device, e); - } - } - System.out.println("Loaded " + expectedDevices.size() + " expected devices"); - } catch (Exception e) { - throw new RuntimeException( - "While loading devices directory " + devicesDir.getAbsolutePath(), e); - } - } - - private void setSchemaSpec(String schemaPath) { - File schemaFile = new File(schemaPath).getAbsoluteFile(); - if (schemaFile.isFile()) { - schemaRoot = schemaFile.getParentFile(); - schemaSpec = schemaFile.getName(); - } else if (schemaFile.isDirectory()) { - schemaRoot = schemaFile; - schemaSpec = null; - } else { - throw new RuntimeException("Schema directory/file not found: " + schemaFile); - } - } - - private void validatePubSub(String instName, String topicName) { - Map schemaMap = new TreeMap<>(); - for (File schemaFile : makeFileList(schemaRoot)) { - Schema schema = getSchema(schemaFile); - String fullName = schemaFile.getName(); - String schemaName = schemaFile.getName() - .substring(0, fullName.length() - JSON_SUFFIX.length()); - schemaMap.put(schemaName, schema); - } - if (!schemaMap.containsKey(ENVELOPE_SCHEMA_ID)) { - throw new RuntimeException("Missing schema for attribute validation: " + ENVELOPE_SCHEMA_ID); - } - dataSink = new FirestoreDataSink(); - System.out.println("Results will be uploaded to " + dataSink.getViewUrl()); - OUT_BASE_FILE.mkdirs(); - System.out.println("Also found in such directories as " + OUT_BASE_FILE.getAbsolutePath()); - System.out.println("Generating report file in " + METADATA_REPORT_FILE.getAbsolutePath()); - System.out.println("Connecting to pubsub topic " + topicName); - PubSubClient client = new PubSubClient(instName, topicName); - System.out.println("Entering pubsub message loop on " + client.getSubscriptionId()); - while(client.isActive()) { - try { - client.processMessage( - (message, attributes) -> validateMessage(schemaMap, message, attributes)); - } catch (Exception e) { - e.printStackTrace(); - } - } - System.out.println("Message loop complete"); - } - - private Set convertIgnoreSet(String ignoreSpec) { - if (ignoreSpec == null) { - return ImmutableSet.of(); - } - return Arrays.stream(ignoreSpec.split(",")).collect(Collectors.toSet()); - } - - private void validateMessage(Map schemaMap, Map message, - Map attributes) { - if (validateUpdate(schemaMap, message, attributes)) { - writeDeviceMetadataReport(); - } - } - - private boolean validateUpdate(Map schemaMap, Map message, - Map attributes) { - - String registryId = attributes.get(DEVICE_REGISTRY_ID_KEY); - if (cloudIotConfig != null && !cloudIotConfig.registry_id.equals(registryId)) { - // Silently drop messages for different registries. - return false; - } - - try { - String deviceId = attributes.get("deviceId"); - Preconditions.checkNotNull(deviceId, "Missing deviceId in message"); - - String schemaId = attributes.get("subFolder"); - - if (Strings.isNullOrEmpty(schemaId)) { - schemaId = UNKNOWN_SCHEMA_DEFAULT; - } - - if (!expectedDevices.isEmpty()) { - if (!processedDevices.add(deviceId)) { - return false; - } - System.out.println(String.format("Processing device #%d/%d: %s", - processedDevices.size(), expectedDevices.size(), deviceId)); - } - - if (attributes.get("wasBase64").equals("true")) { - base64Devices.add(deviceId); - } - - File deviceDir = new File(OUT_BASE_FILE, String.format(DEVICE_FILE_FORMAT, deviceId)); - deviceDir.mkdirs(); - - File attributesFile = new File(deviceDir, String.format(ATTRIBUTE_FILE_FORMAT, schemaId)); - OBJECT_MAPPER.writeValue(attributesFile, attributes); - - File messageFile = new File(deviceDir, String.format(MESSAGE_FILE_FORMAT, schemaId)); - OBJECT_MAPPER.writeValue(messageFile, message); - - File errorFile = new File(deviceDir, String.format(ERROR_FILE_FORMAT, schemaId)); - - final ReportingDevice reportingDevice = getReportingDevice(deviceId); - - try { - if (!schemaMap.containsKey(schemaId)) { - throw new IllegalArgumentException(String.format(SCHEMA_SKIP_FORMAT, schemaId, deviceId)); - } - } catch (Exception e) { - System.out.println(e.getMessage()); - OBJECT_MAPPER.writeValue(errorFile, e.getMessage()); - reportingDevice.addError(e); - } - - try { - validateMessage(schemaMap.get(ENVELOPE_SCHEMA_ID), attributes); - validateDeviceId(deviceId); - } catch (ExceptionMap | ValidationException e) { - processViolation(message, attributes, deviceId, ENVELOPE_SCHEMA_ID, attributesFile, errorFile, e); - reportingDevice.addError(e); - } - - if (schemaMap.containsKey(schemaId)) { - try { - validateMessage(schemaMap.get(schemaId), message); - dataSink.validationResult(deviceId, schemaId, attributes, message, null); - } catch (ExceptionMap | ValidationException e) { - processViolation(message, attributes, deviceId, schemaId, messageFile, errorFile, e); - reportingDevice.addError(e); - } - } - - boolean updated = false; - - if (expectedDevices.isEmpty()) { - // No devices configured, so don't check metadata. - updated = false; - } else if (expectedDevices.containsKey(deviceId)) { - try { - if (POINTSET_TYPE.equals(schemaId)) { - ReportingDevice.PointsetMessage pointsetMessage = - OBJECT_MAPPER.convertValue(message, ReportingDevice.PointsetMessage.class); - updated = !reportingDevice.hasBeenValidated(); - reportingDevice.validateMetadata(pointsetMessage); - } - } catch (Exception e) { - e.printStackTrace(); - OBJECT_MAPPER.writeValue(errorFile, e.getMessage()); - reportingDevice.addError(e); - } - } else if (extraDevices.add(deviceId)) { - updated = true; - } - - if (!reportingDevice.hasError()) { - System.out.println(String.format("Success validating %s/%s", deviceId, schemaId)); - } - - return updated; - } catch (Exception e){ - e.printStackTrace(); - return false; - } - } - - private ReportingDevice getReportingDevice(String deviceId) { - if (expectedDevices.containsKey(deviceId)) { - return expectedDevices.get(deviceId); - } else { - return new ReportingDevice(deviceId); - } - } - - private void writeDeviceMetadataReport() { - try { - MetadataReport metadataReport = new MetadataReport(); - metadataReport.updated = new Date(); - metadataReport.missingDevices = new TreeSet<>(); - metadataReport.extraDevices = extraDevices; - metadataReport.successfulDevices = new TreeSet<>(); - metadataReport.base64Devices = base64Devices; - metadataReport.expectedDevices = expectedDevices.keySet(); - metadataReport.errorDevices = new TreeMap<>(); - for (ReportingDevice deviceInfo : expectedDevices.values()) { - String deviceId = deviceInfo.getDeviceId(); - if (deviceInfo.hasMetadataDiff() || deviceInfo.hasError()) { - metadataReport.errorDevices.put(deviceId, deviceInfo.getMetadataDiff()); - } else if (deviceInfo.hasBeenValidated()) { - metadataReport.successfulDevices.add(deviceId); - } else { - metadataReport.missingDevices.add(deviceId); - } - } - OBJECT_MAPPER.writeValue(METADATA_REPORT_FILE, metadataReport); - } catch (Exception e) { - throw new RuntimeException("While generating metadata report file " + METADATA_REPORT_FILE.getAbsolutePath(), e); - } - } - - public static class MetadataReport { - public Date updated; - public Set expectedDevices; - public Set missingDevices; - public Set extraDevices; - public Set successfulDevices; - public Set base64Devices; - public Map errorDevices; - } - - private void processViolation(Map message, Map attributes, - String deviceId, String schemaId, File inputFile, File errorFile, RuntimeException e) - throws FileNotFoundException { - System.out.println("Error validating " + inputFile + ": " + e.getMessage()); - ErrorTree errorTree = ExceptionMap.format(e, ERROR_FORMAT_INDENT); - dataSink.validationResult(deviceId, schemaId, attributes, message, errorTree); - try (PrintStream errorOut = new PrintStream(errorFile)) { - errorTree.write(errorOut); - } - } - - private void validateDeviceId(String deviceId) { - if (!DEVICE_ID_PATTERN.matcher(deviceId).matches()) { - throw new ExceptionMap(String.format(DEVICE_MATCH_FORMAT, deviceId, DEVICE_ID_PATTERN.pattern())); - } - } - - private void validateFiles(String schemaSpec, String targetSpec) { - List schemaFiles = makeFileList(schemaSpec); - if (schemaFiles.size() == 0) { - throw new RuntimeException("Cowardly refusing to validate against zero schemas"); - } - List targetFiles = makeFileList(targetSpec); - if (targetFiles.size() == 0) { - throw new RuntimeException("Cowardly refusing to validate against zero targets"); - } - ExceptionMap schemaExceptions = new ExceptionMap( - String.format(SCHEMA_VALIDATION_FORMAT, schemaFiles.size())); - for (File schemaFile : schemaFiles) { - try { - Schema schema = getSchema(schemaFile); - ExceptionMap validateExceptions = new ExceptionMap( - String.format(TARGET_VALIDATION_FORMAT, targetFiles.size(), schemaFile.getName())); - for (File targetFile : targetFiles) { - try { - System.out.println("Validating " + targetFile.getName() + " against " + schemaFile.getName()); - validateFile(targetFile, schema); - } catch (Exception e) { - validateExceptions.put(targetFile.getName(), e); - } - } - validateExceptions.throwIfNotEmpty(); - } catch (Exception e) { - schemaExceptions.put(schemaFile.getName(), e); - } - } - schemaExceptions.throwIfNotEmpty(); - } - - private void validateFilesOutput(String targetSpec) { - try { - validateFiles(schemaSpec, targetSpec); - } catch (ExceptionMap | ValidationException processingException) { - ErrorTree errorTree = ExceptionMap.format(processingException, ERROR_FORMAT_INDENT); - errorTree.write(System.err); - throw processingException; - } - } - - private Schema getSchema(File schemaFile) { - try (InputStream schemaStream = new FileInputStream(schemaFile)) { - JSONObject rawSchema = new JSONObject(new JSONTokener(schemaStream)); - SchemaLoader loader = SchemaLoader.builder().schemaJson(rawSchema).httpClient(new RelativeClient()).build(); - return loader.load().build(); - } catch (Exception e) { - throw new RuntimeException("While loading schema " + schemaFile.getAbsolutePath(), e); - } - } - - class RelativeClient implements SchemaClient { - - public static final String FILE_URL_PREFIX = "file:"; - - @Override - public InputStream get(String url) { - try { - if (!url.startsWith(FILE_URL_PREFIX)) { - throw new IllegalStateException("Expected path to start with " + FILE_URL_PREFIX); - } - String new_url = FILE_URL_PREFIX + new File(schemaRoot, url.substring(FILE_URL_PREFIX.length())); - return (InputStream) (new URL(new_url)).getContent(); - } catch (Exception e) { - throw new RuntimeException("While loading URL " + url, e); - } - } - } - - private List makeFileList(String spec) { - return makeFileList(new File(spec)); - } - - private List makeFileList(File target) { - if (target.isFile()) { - return ImmutableList.of(target); - } - boolean isDir = target.isDirectory(); - String prefix = isDir ? "" : target.getName(); - File parent = isDir ? target : target.getAbsoluteFile().getParentFile(); - if (!parent.isDirectory()) { - throw new RuntimeException("Parent directory not found " + parent.getAbsolutePath()); - } - - FilenameFilter filter = (dir, file) -> file.startsWith(prefix) && file.endsWith(JSON_SUFFIX); - String[] fileNames = parent.list(filter); - - return Arrays.stream(fileNames).map(name -> new File(parent, name)) - .collect(Collectors.toList()); - } - - private void validateMessage(Schema schema, Object message) { - final String stringMessage; - try { - stringMessage = OBJECT_MAPPER.writeValueAsString(message); - } catch (Exception e) { - throw new RuntimeException("While converting to string", e); - } - schema.validate(new JSONObject(new JSONTokener(stringMessage))); - } - - private void validateFile(File targetFile, Schema schema) { - try (InputStream targetStream = new FileInputStream(targetFile)) { - schema.validate(new JSONObject(new JSONTokener(targetStream))); - } catch (Exception e) { - throw new RuntimeException("Against input " + targetFile, e); - } - } - - -}