/etc/systemd/system/docker.service.d/docker-override.conf
+[Service]
+Environment="GOOGLE_APPLICATION_CREDENTIALS=`realpath $gcp_cred`"
+EOF
+ sudo systemctl daemon-reload
+ sudo systemctl restart docker
+fi
+
+rm -rf $USI_DIR
+mkdir -p $USI_DIR
+args="-d -v $USI_DIR:/ovs --privileged --network=host -e DEBUG=$DEBUG --name daq-usi daqf/usi"
+docker run $log_driver $args || docker run $args
+
+echo DAQ autoclean docker cp daq-usi:/root/logs.txt inst/cmdusi.log
+echo DAQ autoclean docker kill daq-usi
diff --git a/config/faucet/faucet_alt-switch.yaml b/config/faucet/faucet_alt-switch.yaml
new file mode 100644
index 0000000000..059233aa4b
--- /dev/null
+++ b/config/faucet/faucet_alt-switch.yaml
@@ -0,0 +1,22 @@
+dps:
+ alt-switch:
+ dp_id: 2
+ interfaces:
+ 1:
+ native_vlan: 1002
+ 2:
+ native_vlan: 1001
+ 3:
+ native_vlan: 1003
+ 4:
+ native_vlan: 1004
+ 5:
+ native_vlan: 1005
+ 100:
+ tagged_vlans: [1001, 1002, 1003, 1004, 1005]
+vlans:
+ 1001:
+ 1002:
+ 1003:
+ 1004:
+ 1005:
diff --git a/config/modules/all.conf b/config/modules/all.conf
index 4d897ae7c5..37b581b695 100644
--- a/config/modules/all.conf
+++ b/config/modules/all.conf
@@ -3,8 +3,8 @@ include config/modules/host.conf
# All contributed modules.
include subset/switches/build.conf
-include subset/connection/build.conf
include subset/bacnet/build.conf
include subset/security/build.conf
include subset/cloud/build.conf
-include subset/manual/build.conf
\ No newline at end of file
+include subset/manual/build.conf
+include subset/network/build.conf
diff --git a/config/modules/host.conf b/config/modules/host.conf
index e6ec5e37ef..cd9e9421b4 100644
--- a/config/modules/host.conf
+++ b/config/modules/host.conf
@@ -12,6 +12,10 @@ add mudgee
# Additional base modules
include subset/pentests/build.conf
+include usi/build.conf
+
+# Extended dhcp tests
+add ipaddr
# Example of how to remove something.
remove unused
diff --git a/config/modules/topo.conf b/config/modules/topo.conf
index 4d47ec6cf9..ba02ce2374 100644
--- a/config/modules/topo.conf
+++ b/config/modules/topo.conf
@@ -3,3 +3,4 @@ build docker/modules
# Use ping with runtime configuration for topo testing.
add ping
+include usi/build.conf
diff --git a/config/system/all.conf b/config/system/all.conf
index 0fe77da8fb..612cdc89cb 100644
--- a/config/system/all.conf
+++ b/config/system/all.conf
@@ -3,7 +3,7 @@
# Load defaults.
source config/system/default.yaml
-# Description description for dashboard.
+# Description for dashboard.
site_description="Multi-Device All-Tests Configuration"
# Upstream dataplane port from the external (secondary) switch.
diff --git a/config/system/alt.yaml b/config/system/alt.yaml
new file mode 100644
index 0000000000..ab1f6945f2
--- /dev/null
+++ b/config/system/alt.yaml
@@ -0,0 +1,23 @@
+# Example configuration file for using an OVS switch not managed by DAQ.
+
+# Load defaults.
+include: config/system/default.yaml
+
+# Description for dashboard.
+site_description: "Alternate (not managed by DAQ) OVS switch configuration"
+
+# Network switch configuration.
+switch_setup:
+ data_intf: alt-intf
+ alt_port: 6669
+ uplink_port: 100
+ ext_br: alt-switch
+
+# Faux device connection for testing.
+interfaces:
+ faux:
+ opts:
+ port: 2
+
+# use vlan trigger
+run_trigger_type: VLAN
diff --git a/config/system/default.yaml b/config/system/default.yaml
index 644993b96e..3b3746f6f4 100644
--- a/config/system/default.yaml
+++ b/config/system/default.yaml
@@ -37,3 +37,13 @@ long_dhcp_response_sec: 105
# finish hook: executed at the end of every test
finish_hook: bin/dump_network
+
+# topology hook: executed when device topology changes
+topology_hook: bin/dump_network
+
+run_trigger_type: PORT
+
+# usi url for DAQ to connect to
+usi_setup:
+ url: localhost:5000
+ rpc_timeout_sec: 10
diff --git a/config/system/ext.conf b/config/system/ext.conf
index 8625109734..28dc34b707 100644
--- a/config/system/ext.conf
+++ b/config/system/ext.conf
@@ -3,7 +3,7 @@
# Load defaults.
source config/system/default.yaml
-# Description description for dashboard.
+# Description for dashboard.
site_description="External (not integrated with DAQ) OVS switch configuration"
# Network switch configuration.
diff --git a/config/system/ext.yaml b/config/system/ext.yaml
index 7ad626341e..4fef079c1e 100644
--- a/config/system/ext.yaml
+++ b/config/system/ext.yaml
@@ -3,7 +3,7 @@
# Load defaults.
include: config/system/default.yaml
-# Description description for dashboard.
+# Description for dashboard.
site_description: "External (not integrated with DAQ) OVS switch configuration"
# Network switch configuration.
diff --git a/config/system/muddy.conf b/config/system/muddy.conf
index 6510e2b113..3d3a17b30c 100644
--- a/config/system/muddy.conf
+++ b/config/system/muddy.conf
@@ -3,7 +3,7 @@
# Load defaults.
source config/system/default.yaml
-# Description description for dashboard.
+# Description for dashboard.
site_description="Multi-Device Configuration"
# Upstream dataplane port from the external (secondary) switch.
diff --git a/config/system/multi.conf b/config/system/multi.conf
index 185bbc40df..367a94e86b 100644
--- a/config/system/multi.conf
+++ b/config/system/multi.conf
@@ -3,7 +3,7 @@
# Load defaults.
source config/system/default.yaml
-# Description description for dashboard.
+# Description for dashboard.
site_description="Multi-Device Configuration"
# Upstream dataplane port from the external (secondary) switch.
diff --git a/daq/base_module.py b/daq/base_module.py
new file mode 100644
index 0000000000..f8062e4dd8
--- /dev/null
+++ b/daq/base_module.py
@@ -0,0 +1,38 @@
+"""Host module base class"""
+
+from __future__ import absolute_import
+
+import datetime
+import logger
+
+
+LOGGER = logger.get_logger('module')
+
+
+class HostModule:
+ """Base class for host test modules"""
+
+ def __init__(self, host, tmpdir, test_name, module_config):
+ self.host = host
+ self.tmpdir = tmpdir
+ self.test_name = test_name
+ self.device = host.device
+ self.test_config = module_config.get('modules').get(test_name)
+ self.runner = host.runner
+ self.host_name = '%s%02d' % (test_name, host.device.set_id)
+ # Host name can't be more than 10 characters because it is also used to create a
+ # network interface with -eth0 on the end and there's a hard linux limit on length.
+ assert len(self.host_name) <= 10, 'Hostname %s too long'
+ self.callback = None
+ self._finish_hook = None
+ self.start_time = None
+
+ def start(self, port, params, callback, finish_hook):
+ """Start a test module"""
+ LOGGER.debug('Starting test module %s', self)
+ self.callback = callback
+ self._finish_hook = finish_hook
+ self.start_time = datetime.datetime.now()
+
+ def __repr__(self):
+ return "Target device %s test %s" % (self.device, self.test_name)
diff --git a/daq/dhcp_monitor.py b/daq/dhcp_monitor.py
index 6a5beceae0..4c312cb89b 100644
--- a/daq/dhcp_monitor.py
+++ b/daq/dhcp_monitor.py
@@ -57,10 +57,12 @@ def _dhcp_line(self):
if match:
if match.group(2):
self.target_ip = match.group(2)
- if match.group(4) == "ACK":
- self._dhcp_success()
if match.group(6):
self.target_mac = match.group(6)
+ if match.group(4) == "ACK":
+ if not self.target_ip or not self.target_mac:
+ LOGGER.warning('dhcp ACK incomplete: %s', dhcp_line)
+ self._dhcp_success()
def cleanup(self):
"""Cleanup any ongoing dhcp activity"""
diff --git a/daq/docker_test.py b/daq/docker_test.py
index fdabebd0f7..9c678ee10d 100644
--- a/daq/docker_test.py
+++ b/daq/docker_test.py
@@ -3,6 +3,10 @@
import datetime
import os
import subprocess
+import string
+import random
+
+from base_module import HostModule
import logger
from clib import docker_host
@@ -11,40 +15,27 @@
LOGGER = logger.get_logger('docker')
-class DockerTest:
+class DockerTest(HostModule):
"""Class for running docker tests"""
IMAGE_NAME_FORMAT = 'daqf/test_%s'
TAGGED_IMAGE_FORMAT = IMAGE_NAME_FORMAT + ':latest'
CONTAINER_PREFIX = 'daq'
- # pylint: disable=too-many-arguments
- def __init__(self, runner, target_port, tmpdir, test_name, env_vars=None):
- self.target_port = target_port
- self.tmpdir = tmpdir
- self.test_name = test_name
- self.runner = runner
- self.host_name = '%s%02d' % (test_name, self.target_port)
+ def __init__(self, host, tmpdir, test_name, module_config):
+ super().__init__(host, tmpdir, test_name, module_config)
self.docker_log = None
self.docker_host = None
- self.callback = None
- self.start_time = None
self.pipe = None
- self.env_vars = env_vars or []
- self._finish_hook = None
def start(self, port, params, callback, finish_hook):
"""Start the docker test"""
- LOGGER.debug('Target port %d starting docker test %s', self.target_port, self.test_name)
-
- self.start_time = datetime.datetime.now()
- self.callback = callback
- self._finish_hook = finish_hook
+ super().start(port, params, callback, finish_hook)
def opt_param(key):
return params.get(key) or '' # Substitute empty string for None
- env_vars = self.env_vars + [
+ env_vars = [
"TARGET_NAME=" + self.host_name,
"TARGET_IP=" + params['target_ip'],
"TARGET_MAC=" + params['target_mac'],
@@ -61,7 +52,7 @@ def opt_param(key):
self._map_if_exists(vol_maps, params, 'type')
image = self.IMAGE_NAME_FORMAT % self.test_name
- LOGGER.debug("Target port %d running docker test %s", self.target_port, image)
+ LOGGER.debug("%s running docker test %s", self, image)
cls = docker_host.make_docker_host(image, prefix=self.CONTAINER_PREFIX)
# Work around an instability in the faucet/clib/docker library, b/152520627.
setattr(cls, 'pullImage', self._check_image)
@@ -74,20 +65,20 @@ def opt_param(key):
raise wrappers.DaqException(e)
try:
- LOGGER.debug("Target port %d activating docker test %s", self.target_port, image)
+ LOGGER.debug("%s activating docker test %s", self, image)
pipe = host.activate(log_name=None)
# Docker tests don't use DHCP, so manually set up DNS.
host.cmd('echo nameserver $GATEWAY_IP > /etc/resolv.conf')
self.docker_log = host.open_log()
if self._should_raise_test_exception('initialize'):
- LOGGER.error('Target port %d inducing initialization failure', self.target_port)
+ LOGGER.error('%s inducing initialization failure', self)
raise Exception('induced initialization failure')
self.runner.monitor_stream(self.host_name, pipe.stdout, copy_to=self.docker_log,
hangup=self._docker_complete,
error=self._docker_error)
self.pipe = pipe
if self._should_raise_test_exception('callback'):
- LOGGER.error('Target port %d will induce callback failure', self.target_port)
+ LOGGER.error('%s will induce callback failure', self)
# Closing this now will cause error when attempting to write output.
self.docker_log.close()
except Exception as e:
@@ -98,7 +89,7 @@ def opt_param(key):
self.runner.monitor_forget(self.pipe.stdout)
self.pipe = None
raise e
- LOGGER.info("Target port %d test %s running", self.target_port, self.test_name)
+ LOGGER.info("%s running", self)
def _check_image(self):
lines = subprocess.check_output(["docker", "images", "--format",
@@ -109,7 +100,7 @@ def _check_image(self):
def terminate(self):
"""Forcibly terminate this container"""
- LOGGER.info("Target port %d test %s terminating", self.target_port, self.test_name)
+ LOGGER.info("%s terminating", self)
return self._docker_finalize()
def _map_if_exists(self, vol_maps, params, kind):
@@ -117,18 +108,17 @@ def _map_if_exists(self, vol_maps, params, kind):
if base and os.path.exists(base):
abs_base = os.path.abspath(base)
vol_maps += ['%s:/config/%s' % (abs_base, kind)]
- LOGGER.info('Target port %d mapping %s to /config/%s', self.target_port, abs_base, kind)
+ LOGGER.info('%s mapping %s to /config/%s', self, abs_base, kind)
def _docker_error(self, exception):
- LOGGER.error('Target port %d docker error: %s', self.target_port, str(exception))
+ LOGGER.error('%s docker error: %s', self, str(exception))
if self._docker_finalize() is None:
- LOGGER.warning('Target port %d docker already terminated.', self.target_port)
+ LOGGER.warning('%s docker already terminated.', self)
else:
self.callback(exception=exception)
def _docker_finalize(self):
- assert self.docker_host, 'docker host %s already finalized' % self.target_port
- LOGGER.info('Target port %d docker finalize', self.target_port)
+ assert self.docker_host, 'docker host %s already finalized' % self
if self._finish_hook:
self._finish_hook()
self.runner.remove_host(self.docker_host)
@@ -136,16 +126,17 @@ def _docker_finalize(self):
self.runner.monitor_forget(self.pipe.stdout)
self.pipe = None
return_code = self.docker_host.terminate()
+ LOGGER.info('%s docker finalize %d', self, return_code)
self.docker_host = None
self.docker_log.close()
self.docker_log = None
if self._should_raise_test_exception('finalize'):
- LOGGER.error('Target port %d inducing finalize failure', self.target_port)
+ LOGGER.error('%s inducing finalize failure', self)
raise Exception('induced finalize failure')
return return_code
def _should_raise_test_exception(self, trigger_value):
- key = '%s_%02d' % (self.test_name, self.target_port)
+ key = "%s_%s" % (self.test_name, self.device.mac.replace(':', ''))
return self.runner.config.get('fail_module', {}).get(key) == trigger_value
def _docker_complete(self):
@@ -159,12 +150,18 @@ def _docker_complete(self):
exception = e
LOGGER.exception(e)
delay = (datetime.datetime.now() - self.start_time).total_seconds()
- LOGGER.debug("Target port %d docker complete, return=%d (%s)",
- self.target_port, return_code, exception)
+ LOGGER.debug("%s docker complete, return=%d (%s)",
+ self, return_code, exception)
if return_code:
- LOGGER.info("Target port %d test %s failed %ss: %s %s",
- self.target_port, self.test_name, delay, return_code, exception)
+ LOGGER.info("%s failed %ss: %s %s",
+ self, delay, return_code, exception)
else:
- LOGGER.info("Target port %d test %s passed %ss",
- self.target_port, self.test_name, delay)
+ LOGGER.info("%s passed %ss",
+ self, delay)
self.callback(return_code=return_code, exception=exception)
+
+ def _get_random_string(self, length):
+ return ''.join(random.choice(string.ascii_letters) for _ in range(length))
+
+ def ip_listener(self, target_ip):
+ """Do nothing b/c docker tests don't care about ip notifications"""
diff --git a/daq/faucet_event_client.py b/daq/faucet_event_client.py
index a97c8c9bb5..b2254b1cf9 100644
--- a/daq/faucet_event_client.py
+++ b/daq/faucet_event_client.py
@@ -184,11 +184,12 @@ def as_port_state(self, event):
def as_port_learn(self, event):
"""Convert to port learning info, if applicable"""
if not event or 'L2_LEARN' not in event:
- return (None, None, None)
+ return [None] * 4
dpid = event['dp_id']
port_no = int(event['L2_LEARN']['port_no'])
eth_src = event['L2_LEARN']['eth_src']
- return (dpid, port_no, eth_src)
+ vid = event['L2_LEARN']['vid']
+ return (dpid, port_no, eth_src, vid)
def close(self):
"""Close the faucet event socket"""
diff --git a/daq/gateway.py b/daq/gateway.py
index c7c54182a5..60b6abe193 100644
--- a/daq/gateway.py
+++ b/daq/gateway.py
@@ -12,6 +12,7 @@
LOGGER = logger.get_logger('gateway')
+
class Gateway():
"""Gateway collection class for managing testing services"""
@@ -37,8 +38,8 @@ def __init__(self, runner, name, port_set, network):
self.dummy = None
self.tmpdir = None
self.targets = {}
- self.test_ports = {}
- self.ready = {}
+ self.test_ports = set()
+ self.ready = set()
self.activated = False
self.result_linger = False
self._scan_monitor = None
@@ -125,6 +126,18 @@ def request_new_ip(self, mac):
"""Requests a new ip for the device"""
self.execute_script('new_ip', mac)
+ def change_dhcp_response_time(self, mac, time):
+ """Change dhcp response time for device mac"""
+ self.execute_script('change_dhcp_response_time', mac, time)
+
+ def stop_dhcp_response(self, mac):
+ """Stops DHCP response for the device"""
+ self.change_dhcp_response_time(mac, -1)
+
+ def change_dhcp_range(self, start, end, prefix_length):
+ """Change dhcp range for devices"""
+ self.execute_script('change_dhcp_range', start, end, prefix_length)
+
def allocate_test_port(self):
"""Get the test port to use for this gateway setup"""
test_port = self._switch_port(self.TEST_OFFSET_START)
@@ -132,7 +145,7 @@ def allocate_test_port(self):
test_port = test_port + 1
limit_port = self._switch_port(self.NUM_SET_PORTS)
assert test_port < limit_port, 'no test ports available'
- self.test_ports[test_port] = True
+ self.test_ports.add(test_port)
return test_port
def _startup_scan(self, host):
@@ -160,7 +173,7 @@ def _scan_error(self, e):
def release_test_port(self, test_port):
"""Release the given port from the gateway"""
assert test_port in self.test_ports, 'test port not allocated'
- del self.test_ports[test_port]
+ self.test_ports.remove(test_port)
def _switch_port(self, offset):
return self.port_set * self.SET_SPACING + offset
@@ -169,9 +182,8 @@ def _is_target_expected(self, target):
if not target:
return False
target_mac = target['mac']
- for target_port in self.targets:
- if self.targets[target_port]['mac'] == target_mac:
- return True
+ if target_mac in self.targets:
+ return True
LOGGER.warning('No target match found for %s in %s', target_mac, self.name)
return False
@@ -179,7 +191,7 @@ def _dhcp_callback(self, state, target, exception=None):
if exception:
LOGGER.error('Gateway DHCP exception %s', exception)
if self._is_target_expected(target) or exception:
- self.runner.ip_notify(state, target, self.port_set, exception=exception)
+ self.runner.ip_notify(state, target, self, exception=exception)
def _setup_tmpdir(self, base_name):
tmpdir = os.path.join('inst', base_name)
@@ -188,26 +200,26 @@ def _setup_tmpdir(self, base_name):
os.makedirs(tmpdir)
return tmpdir
- def attach_target(self, target_port, target):
+ def attach_target(self, device):
"""Attach the given target to this gateway; return number of attached targets."""
- assert target_port not in self.targets, 'target already attached to gw'
- LOGGER.info('Attaching target %d to gateway group %s', target_port, self.name)
- self.targets[target_port] = target
+ assert device.mac not in self.targets, 'target %s already attached to gw' % device
+ LOGGER.info('Attaching target %s to gateway group %s', device, self.name)
+ self.targets[device.mac] = device
return len(self.targets)
- def detach_target(self, target_port):
+ def detach_target(self, device):
"""Detach the given target from this gateway; return number of remaining targets."""
- assert target_port in self.targets, 'target not attached to gw'
- LOGGER.info('Detach target %d from gateway group %s: %s',
- target_port, self.name, list(self.targets.keys()))
- del self.targets[target_port]
+ assert device.mac in self.targets, 'target %s not attached to gw' % device
+ LOGGER.info('Detach target %s from gateway group %s: %s',
+ device, self.name, list(self.targets.keys()))
+ del self.targets[device.mac]
return len(self.targets)
- def target_ready(self, target_mac):
+ def target_ready(self, device):
"""Mark a target ready, and return set of ready targets"""
- if not target_mac in self.ready:
- LOGGER.info('Ready target %s from gateway group %s', target_mac, self.name)
- self.ready[target_mac] = True
+ if device not in self.ready:
+ LOGGER.info('Ready target %s from gateway group %s', device, self.name)
+ self.ready.add(device)
return self.ready
def get_targets(self):
@@ -240,3 +252,6 @@ def terminate(self):
def _ping_test(self, src, dst, src_addr=None):
return self.runner.ping_test(src, dst, src_addr=src_addr)
+
+ def __repr__(self):
+ return 'Gateway group %s set %d' % (self.name, self.port_set)
diff --git a/daq/gcp.py b/daq/gcp.py
index 17051870a1..0dea14d14f 100644
--- a/daq/gcp.py
+++ b/daq/gcp.py
@@ -24,6 +24,7 @@
# pylint: disable=no-member
DESCENDING = firestore.Query.DESCENDING
+
def get_timestamp():
""""Get a JSON-compatible formatted timestamp"""
return to_timestamp(datetime.datetime.now(datetime.timezone.utc))
@@ -49,7 +50,7 @@ def __init__(self, config, callback_handler):
self._callback_handler = callback_handler
cred_file = self.config.get('gcp_cred')
if not cred_file:
- LOGGER.info('No gcp_cred filr specified in config, disabling gcp use.')
+ LOGGER.info('No gcp_cred file specified in config, disabling gcp use.')
self._pubber = None
self._storage = None
self._firestore = None
@@ -263,19 +264,25 @@ def _get_json_report(self, runid):
blob = self._bucket.blob(report_blob)
return json.loads(str(blob.download_as_string(), 'utf-8'))
- def get_reports_from_date_range(self, device: str, start=None, end=None, count=None):
- """Combine test results from reports within a date range"""
+ # pylint: disable=too-many-arguments
+ def get_reports(self, device: str, start=None, end=None, count=None, daq_run_id=None):
+ """Get filtered list of reports"""
if not self._firestore:
LOGGER.error('Firestore not initialized.')
return
- LOGGER.info('Looking for reports...')
+ LOGGER.info('Looking for reports from GCP...')
limit_count = count if count else DEFAULT_LIMIT
origin = self._firestore.collection(u'origin').document(self._client_name).get()
query = origin.reference.collection('runid').where('deviceId', '==', device)
if start:
+ LOGGER.info('Limiting to start time %s', to_timestamp(start))
query = query.where('updated', '>=', to_timestamp(start))
if end:
+ LOGGER.info('Limiting to end time %s', to_timestamp(end))
query = query.where('updated', '<=', to_timestamp(end))
+ if daq_run_id:
+ LOGGER.info('Limiting to DAQ run id %s', daq_run_id)
+ query = query.where('daq_run_id', '==', daq_run_id)
runids = query.order_by(u'updated', direction=DESCENDING).limit(limit_count).stream()
for runid in runids:
json_report = self._get_json_report(runid)
diff --git a/daq/host.py b/daq/host.py
index dc13f9b659..c600cb3835 100644
--- a/daq/host.py
+++ b/daq/host.py
@@ -5,17 +5,20 @@
import shutil
import time
from datetime import timedelta, datetime
+import grpc
from clib import tcpdump_helper
+
from report import ResultType, ReportGenerator
+from proto import usi_pb2 as usi
+from proto import usi_pb2_grpc as usi_service
import configurator
import docker_test
import gcp
+import ipaddr_test
import logger
-LOGGER = logger.get_logger('host')
-
class _STATE:
"""Host state enum for testing cycle"""
@@ -31,6 +34,7 @@ class _STATE:
TERM = 'Host terminated'
+
class MODE:
"""Test module modes for state reporting."""
INIT = 'init'
@@ -46,21 +50,29 @@ class MODE:
LONG = 'long'
MERR = 'merr'
+
def pre_states():
"""Return pre-test states for basic operation"""
- return ['startup', 'sanity', 'ipaddr', 'base', 'monitor']
+ return ['startup', 'sanity', 'acquire', 'base', 'monitor']
def post_states():
"""Return post-test states for recording finalization"""
return ['finish', 'info', 'timer']
+def get_test_config(config, test):
+ """Get a single test module's config"""
+ return config["modules"].get(test)
+
+
class ConnectedHost:
"""Class managing a device-under-test"""
_STARTUP_MIN_TIME_SEC = 5
+ _RPC_TIMEOUT_SEC = 10
_INST_DIR = "inst/"
_DEVICE_PATH = "device/%s"
+ _NETWORK_DIR = "inst/network"
_MODULE_CONFIG = "module_config.json"
_CONTROL_PATH = "control/port-%s"
_CORE_TESTS = ['pass', 'fail', 'ping', 'hold']
@@ -68,19 +80,22 @@ class ConnectedHost:
_CONFIG_DIR = "config/"
_TIMEOUT_EXCEPTION = TimeoutError('Timeout expired')
- def __init__(self, runner, gateway, target, config):
+ # pylint: disable=too-many-statements
+ def __init__(self, runner, device, config):
self.configurator = configurator.Configurator()
self.runner = runner
self._gcp = runner.gcp
- self.gateway = gateway
+ self.gateway = device.gateway
self.config = config
self.switch_setup = self.config.get('switch_setup', {})
- self.target_port = target['port']
- self.target_mac = target['mac']
- self.fake_target = target['fake']
+ self.device = device
+ self.target_mac = device.mac
+ self.target_port = device.port.port_no
+ self.fake_target = self.gateway.fake_target
self.devdir = self._init_devdir()
self.run_id = self.make_runid()
self.scan_base = os.path.abspath(os.path.join(self.devdir, 'scans'))
+ self.logger = logger.get_logger('host')
self._port_base = self._get_port_base()
self._device_base = self._get_device_base()
self.state = None
@@ -95,7 +110,8 @@ def __init__(self, runner, gateway, target, config):
self._monitor_scan_sec = int(config.get('monitor_scan_sec', 0))
_default_timeout_sec = int(config.get('default_timeout_sec', 0))
self._default_timeout_sec = _default_timeout_sec if _default_timeout_sec else None
- self._finish_hook_script = config.get('finish_hook')
+ self._usi_config = config.get('usi_setup', {})
+ self._topology_hook_script = config.get('topology_hook')
self._mirror_intf_name = None
self._monitor_ref = None
self._monitor_start = None
@@ -106,7 +122,8 @@ def __init__(self, runner, gateway, target, config):
assert self._loaded_config, 'config was not loaded'
self._write_module_config(self._loaded_config, self._device_aux_path())
self.remaining_tests = self._get_enabled_tests()
- LOGGER.info('Host %s running with enabled tests %s', self.target_port, self.remaining_tests)
+ self.logger.info('Host %s running with enabled tests %s', self.target_mac,
+ self.remaining_tests)
self._report = ReportGenerator(config, self._INST_DIR, self.target_mac,
self._loaded_config)
self.record_result('startup', state=MODE.PREP)
@@ -115,6 +132,7 @@ def __init__(self, runner, gateway, target, config):
self._startup_file = None
self.timeout_handler = self._aux_module_timeout_handler
self._all_ips = []
+ self._ip_listener = None
@staticmethod
def make_runid():
@@ -122,20 +140,20 @@ def make_runid():
return '%06x' % int(time.time())
def _init_devdir(self):
- devdir = os.path.join(self._INST_DIR, 'run-port-%02d' % self.target_port)
+ devdir = os.path.join(self._INST_DIR, 'run-%s' % self.target_mac.replace(':', ''))
shutil.rmtree(devdir, ignore_errors=True)
os.makedirs(devdir)
return devdir
def _get_port_base(self):
test_config = self.config.get('test_config')
- if not test_config:
- return None
- conf_base = os.path.abspath(os.path.join(test_config, 'port-%02d' % self.target_port))
- if not os.path.isdir(conf_base):
- LOGGER.warning('Test config directory not found: %s', conf_base)
- return None
- return conf_base
+ if test_config and self.target_port:
+ conf_base = os.path.abspath(os.path.join(test_config, 'port-%02d' % self.target_port))
+ if not os.path.isdir(conf_base):
+ self.logger.warning('Test config directory not found: %s', conf_base)
+ return None
+ return conf_base
+ return None
def _make_config_bundle(self, config=None):
return {
@@ -148,22 +166,25 @@ def _make_control_bundle(self):
'paused': self.state == _STATE.READY
}
+ def _get_test_config(self, test):
+ return get_test_config(self._loaded_config, test)
+
def _test_enabled(self, test):
fallback_config = {'enabled': test in self._CORE_TESTS}
- test_config = self._loaded_config['modules'].get(test, fallback_config)
+ test_config = self._get_test_config(test) or fallback_config
return test_config.get('enabled', True)
def _get_test_timeout(self, test):
- test_module = self._loaded_config['modules'].get(test)
if test == 'hold':
return None
+ test_module = self._get_test_config(test)
if not test_module:
return self._default_timeout_sec
return test_module.get('timeout_sec', self._default_timeout_sec)
def get_port_flap_timeout(self, test):
"""Get port toggle timeout configuration that's specific to each test module"""
- test_module = self._loaded_config['modules'].get(test)
+ test_module = self._get_test_config(test)
if not test_module:
return None
return test_module.get('port_flap_timeout_sec')
@@ -193,18 +214,20 @@ def _get_unique_upload_path(self, file_name):
partial = os.path.join('tests', self.test_name, base) if self.test_name else base
return os.path.join('run_id', self.run_id, partial)
- def _load_config(self, config, path):
+ def _load_config(self, name, config, path):
+ if name:
+ self.logger.info('Loading %s module config from %s', name, path)
return self.configurator.load_and_merge(config, path, self._MODULE_CONFIG, optional=True)
def _write_module_config(self, config, path):
self.configurator.write_config(config, path, self._MODULE_CONFIG)
def _type_path(self):
- dev_config = self._load_config({}, self._device_base)
+ dev_config = self._load_config(None, {}, self._device_base)
device_type = dev_config.get('device_type')
if not device_type:
return None
- LOGGER.info('Configuring device %s as type %s', self.target_mac, device_type)
+ self.logger.info('Configuring device %s as type %s', self.device, device_type)
site_path = self.config.get('site_path')
type_path = os.path.abspath(os.path.join(site_path, 'device_types', device_type))
return type_path
@@ -215,20 +238,20 @@ def _type_aux_path(self):
return None
aux_path = os.path.join(type_path, self._AUX_DIR)
if not os.path.exists(aux_path):
- LOGGER.info('Skipping missing type dir %s', aux_path)
+ self.logger.info('Skipping missing type dir %s', aux_path)
return None
return aux_path
def _create_device_dir(self, path):
- LOGGER.warning('Creating new device dir: %s', path)
+ self.logger.warning('Creating new device dir: %s', path)
os.makedirs(path)
template_dir = self.config.get('device_template')
if not template_dir:
- LOGGER.warning('Skipping defaults since no device_template found')
+ self.logger.warning('Skipping defaults since no device_template found')
return
- LOGGER.info('Copying template files from %s to %s', template_dir, path)
+ self.logger.info('Copying template files from %s to %s', template_dir, path)
for file in os.listdir(template_dir):
- LOGGER.info('Copying %s...', file)
+ self.logger.info('Copying %s...', file)
shutil.copy(os.path.join(template_dir, file), path)
def _upload_file(self, path):
@@ -237,14 +260,16 @@ def _upload_file(self, path):
def initialize(self):
"""Fully initialize a new host set"""
- LOGGER.info('Target port %d initializing...', self.target_port)
+ self.logger.info('Target device %s initializing...', self)
# There is a race condition here with ovs assigning ports, so wait a bit.
time.sleep(2)
shutil.rmtree(self.devdir, ignore_errors=True)
os.makedirs(self.scan_base)
self._initialize_config()
network = self.runner.network
- self._mirror_intf_name = network.create_mirror_interface(self.target_port)
+ if self.target_port:
+ self._mirror_intf_name = network.create_mirror_interface(self.target_port)
+ self._topology_hook()
if self.config['test_list']:
self._start_run()
else:
@@ -268,9 +293,27 @@ def _state_transition(self, target, expected=None):
message = 'state was %s expected %s' % (self.state, expected)
assert self.state == expected, message
assert self.state != _STATE.TERM, 'host already terminated'
- LOGGER.debug('Target port %d state: %s -> %s', self.target_port, self.state, target)
+ self.logger.debug('Target device %s state: %s -> %s', self, self.state, target)
self.state = target
+ def _build_switch_info(self) -> usi.SwitchInfo:
+ switch_config = self._get_switch_config()
+ model_str = switch_config['model']
+ if model_str == 'FAUX_SWITCH' or not self.target_port:
+ return None
+ if model_str:
+ switch_model = usi.SwitchModel.Value(model_str)
+ else:
+ switch_model = usi.SwitchModel.OVS_SWITCH
+ params = {
+ "ip_addr": switch_config["ip"],
+ "device_port": self.target_port,
+ "model": switch_model,
+ "username": switch_config["username"],
+ "password": switch_config["password"]
+ }
+ return usi.SwitchInfo(**params)
+
def is_running(self):
"""Return True if this host is running active test."""
return self.state != _STATE.ERROR and self.state != _STATE.DONE
@@ -285,29 +328,50 @@ def notify_activate(self):
self._record_result('startup', state=MODE.HOLD)
return self.state == _STATE.WAITING
+ def connect_port(self, connect):
+ """Connects/Disconnects port for this host"""
+ switch_info = self._build_switch_info()
+ if not switch_info:
+ self.logger.info('No switch model found, skipping port connect')
+ return False
+ try:
+ with grpc.insecure_channel(self._usi_config.get('url')) as channel:
+ timeout = self._usi_config.get('rpc_timeout_sec', self._RPC_TIMEOUT_SEC)
+ stub = usi_service.USIServiceStub(channel)
+ if connect:
+ res = stub.connect(switch_info, timeout=timeout)
+ else:
+ res = stub.disconnect(switch_info, timeout=timeout)
+ self.logger.info('Target port %s %s successful? %s', self.target_port, "connect"
+ if connect else "disconnect", res.success)
+ except Exception as e:
+ self.logger.error(e)
+ raise e
+ return True
+
def _prepare(self):
- LOGGER.info('Target port %d waiting for ip as %s', self.target_port, self.target_mac)
+ self.logger.info('Target device %s waiting for ip', self)
self._state_transition(_STATE.WAITING, _STATE.INIT)
self.record_result('sanity', state=MODE.DONE)
- self.record_result('ipaddr', state=MODE.EXEC)
+ self.record_result('acquire', state=MODE.EXEC)
static_ip = self._get_static_ip()
if static_ip:
- LOGGER.info('Target port %d using static ip', self.target_port)
+ self.logger.info('Target device %s using static ip', self)
time.sleep(self._STARTUP_MIN_TIME_SEC)
self.runner.ip_notify(MODE.NOPE, {
'mac': self.target_mac,
'ip': static_ip,
'delta': -1
- }, self.gateway.port_set)
+ }, self.gateway)
else:
dhcp_mode = self._get_dhcp_mode()
# enables dhcp response for this device
wait_time = self.runner.config.get("long_dhcp_response_sec") \
if dhcp_mode == 'long_response' else 0
- LOGGER.info('Target port %d using %s DHCP mode, wait %s',
- self.target_port, dhcp_mode, wait_time)
- self.gateway.execute_script('change_dhcp_response_time', self.target_mac, wait_time)
- _ = [listener(self) for listener in self._dhcp_listeners]
+ self.logger.info('Target device %s using %s DHCP mode, wait %s',
+ self, dhcp_mode, wait_time)
+ self.gateway.change_dhcp_response_time(self.target_mac, wait_time)
+ _ = [listener(self.device) for listener in self._dhcp_listeners]
def _aux_module_timeout_handler(self):
# clean up tcp monitor that could be open
@@ -315,7 +379,7 @@ def _aux_module_timeout_handler(self):
def _main_module_timeout_handler(self):
self.test_host.terminate()
- self._docker_callback(exception=self._TIMEOUT_EXCEPTION)
+ self._module_callback(exception=self._TIMEOUT_EXCEPTION)
def heartbeat(self):
"""Checks module run time for each event loop"""
@@ -326,7 +390,8 @@ def heartbeat(self):
nowtime = gcp.parse_timestamp(gcp.get_timestamp())
if nowtime >= timeout:
if self.timeout_handler:
- LOGGER.error('Monitoring timeout for %s after %ds', self.test_name, timeout_sec)
+ self.logger.error('Monitoring timeout for %s after %ds', self.test_name,
+ timeout_sec)
# ensure it's called once
handler, self.timeout_handler = self.timeout_handler, None
handler()
@@ -340,19 +405,20 @@ def _finalize_report(self):
report_paths = self._report.finalize()
if self._trigger_path:
report_paths.update({'trigger_path': self._trigger_path})
- LOGGER.info('Finalized with reports %s', list(report_paths.keys()))
+ self.logger.info('Finalized with reports %s', list(report_paths.keys()))
report_blobs = {name: self._upload_file(path) for name, path in report_paths.items()}
self.record_result('terminate', state=MODE.TERM, **report_blobs)
self._report = None
def terminate(self, reason, trigger=True):
"""Terminate this host"""
- LOGGER.info('Target port %d terminate, running %s, trigger %s: %s', self.target_port,
- self._host_name(), trigger, reason)
+ self.logger.info('Target device %s terminate, running %s, trigger %s: %s', self,
+ self._host_name(), trigger, reason)
self._state_transition(_STATE.TERM)
self._release_config()
self._monitor_cleanup()
- self.runner.network.delete_mirror_interface(self.target_port)
+ if self.target_port:
+ self.runner.network.delete_mirror_interface(self.target_port)
self._finalize_report()
if self.test_host:
try:
@@ -360,12 +426,12 @@ def terminate(self, reason, trigger=True):
self.test_host = None
self.timeout_handler = None
except Exception as e:
- LOGGER.error('Target port %d terminating test: %s', self.target_port, e)
- LOGGER.exception(e)
+ self.logger.error('Target device %s terminating test: %s', self, self.test_name)
+ self.logger.exception(e)
if trigger:
- self.runner.target_set_complete(self.target_port,
- 'Target port %d termination: %s' % (
- self.target_port, self.test_host))
+ self.runner.target_set_complete(self.device,
+ 'Target device %s termination: %s' % (
+ self, self.test_host))
def idle_handler(self):
"""Trigger events from idle state"""
@@ -380,8 +446,11 @@ def ip_notify(self, target_ip, state=MODE.DONE, delta_sec=-1):
with open(self._trigger_path, 'a') as output_stream:
output_stream.write('%s %s %d\n' % (target_ip, state, delta_sec))
self._all_ips.append({"ip": target_ip, "timestamp": time.time()})
- if self._get_dhcp_mode() == "ip_change" and len(self._all_ips) == 1:
- self.gateway.request_new_ip(self.target_mac)
+ # Update ip directly if it's already triggered.
+ if self.target_ip:
+ self.target_ip = target_ip
+ if self.test_host:
+ self.test_host.ip_listener(target_ip)
def trigger_ready(self):
"""Check if this host is ready to be triggered"""
@@ -397,41 +466,42 @@ def trigger_ready(self):
def trigger(self, state=MODE.DONE, target_ip=None, exception=None, delta_sec=-1):
"""Handle device trigger"""
if not self.target_ip and not self.trigger_ready():
- LOGGER.warn('Target port %d ignoring premature trigger', self.target_port)
+ self.logger.warn('Target device %s ignoring premature trigger', self)
return False
if self.target_ip:
- LOGGER.debug('Target port %d already triggered', self.target_port)
+ self.logger.debug('Target device %s already triggered', self)
assert self.target_ip == target_ip, "target_ip mismatch"
return True
self.target_ip = target_ip
self._record_result('info', state='%s/%s' % (self.target_mac, target_ip))
- self.record_result('ipaddr', ip=target_ip, state=state, exception=exception)
+ self.record_result('acquire', ip=target_ip, state=state, exception=exception)
if exception:
self._state_transition(_STATE.ERROR)
- self.runner.target_set_error(self.target_port, exception)
+ self.runner.target_set_error(self.device, exception)
else:
- LOGGER.info('Target port %d triggered as %s', self.target_port, target_ip)
+ self.logger.info('Target device %s triggered as %s', self, target_ip)
self._state_transition(_STATE.BASE, _STATE.WAITING)
return True
def _ping_test(self, src, dst, src_addr=None):
if not src or not dst:
- LOGGER.error('Invalid ping test params, src=%s, dst=%s', src, dst)
+ self.logger.error('Invalid ping test params, src=%s, dst=%s', src, dst)
return False
return self.runner.ping_test(src, dst, src_addr=src_addr)
def _startup_scan(self):
self._startup_file = os.path.join(self.scan_base, 'startup.pcap')
self._startup_time = datetime.now()
- LOGGER.info('Target port %d startup pcap capture', self.target_port)
+ self.logger.info('Target device %s startup pcap capture', self)
self._monitor_scan(self._startup_file)
def _monitor_scan(self, output_file, timeout=None):
assert not self._monitor_ref, 'tcp_monitor already active'
network = self.runner.network
tcp_filter = ''
- LOGGER.info('Target port %d pcap intf %s for %ss output in %s',
- self.target_port, self._mirror_intf_name, timeout, output_file)
+ self.logger.info('Target device %s pcap intf %s for %s seconds output in %s',
+ self, self._mirror_intf_name, timeout if timeout else 'infinite',
+ output_file)
helper = tcpdump_helper.TcpdumpHelper(network.pri, tcp_filter, packets=None,
intf_name=self._mirror_intf_name,
timeout=timeout, pcap_out=output_file,
@@ -447,10 +517,10 @@ def _base_start(self):
success = self._base_tests()
self._monitor_cleanup()
if not success:
- LOGGER.warning('Target port %d base tests failed', self.target_port)
+ self.logger.warning('Target device %s base tests failed', self)
self._state_transition(_STATE.ERROR)
return
- LOGGER.info('Target port %d done with base.', self.target_port)
+ self.logger.info('Target device %s done with base.', self)
self._background_scan()
except Exception as e:
self._monitor_cleanup()
@@ -458,7 +528,7 @@ def _base_start(self):
def _monitor_cleanup(self, forget=True):
if self._monitor_ref:
- LOGGER.info('Target port %d network pcap complete', self.target_port)
+ self.logger.info('Target device %s network pcap complete', self)
active = self._monitor_ref.stream() and not self._monitor_ref.stream().closed
assert active == forget, 'forget and active mismatch'
self._upload_file(self._startup_file)
@@ -468,22 +538,22 @@ def _monitor_cleanup(self, forget=True):
self._monitor_ref = None
def _monitor_error(self, exception, forget=False):
- LOGGER.error('Target port %d monitor error: %s', self.target_port, exception)
+ self.logger.error('Target device %s monitor error: %s', self, exception)
self._monitor_cleanup(forget=forget)
self.record_result(self.test_name, exception=exception)
self._state_transition(_STATE.ERROR)
- self.runner.target_set_error(self.target_port, exception)
+ self.runner.target_set_error(self.device, exception)
def _background_scan(self):
self._state_transition(_STATE.MONITOR, _STATE.BASE)
if not self._monitor_scan_sec:
- LOGGER.info('Target port %d skipping background pcap', self.target_port)
+ self.logger.info('Target device %s skipping background pcap', self)
self._monitor_continue()
return
self.record_result('monitor', time=self._monitor_scan_sec, state=MODE.EXEC)
monitor_file = os.path.join(self.scan_base, 'monitor.pcap')
- LOGGER.info('Target port %d background pcap for %ds',
- self.target_port, self._monitor_scan_sec)
+ self.logger.info('Target device %s background pcap for %ds',
+ self, self._monitor_scan_sec)
self._monitor_scan(monitor_file, timeout=self._monitor_scan_sec)
def _monitor_timeout(self, timeout):
@@ -494,19 +564,20 @@ def _monitor_timeout(self, timeout):
self._monitor_complete()
def _monitor_complete(self):
- LOGGER.info('Target port %d pcap complete', self.target_port)
+ self.logger.info('Target device %s pcap complete', self)
self._monitor_cleanup(forget=False)
self.record_result('monitor', state=MODE.DONE)
self._monitor_continue()
def _monitor_continue(self):
self._state_transition(_STATE.NEXT, _STATE.MONITOR)
+ self.test_name = None
self._run_next_test()
def _base_tests(self):
self.record_result('base', state=MODE.EXEC)
if not self._ping_test(self.gateway.host, self.target_ip):
- LOGGER.debug('Target port %d warmup ping failed', self.target_port)
+ self.logger.debug('Target device %s warmup ping failed', self)
try:
success1 = self._ping_test(self.gateway.host, self.target_ip), 'simple ping failed'
success2 = self._ping_test(self.gateway.host, self.target_ip,
@@ -521,22 +592,21 @@ def _base_tests(self):
return True
def _run_next_test(self):
+ assert not self.test_name, 'test_name defined: %s' % self.test_name
try:
if self.remaining_tests:
- LOGGER.debug('Target port %d executing tests %s',
- self.target_port, self.remaining_tests)
- self.timeout_handler = self._main_module_timeout_handler
- self._docker_test(self.remaining_tests.pop(0))
+ self.logger.debug('Target device %s executing tests %s',
+ self, self.remaining_tests)
+ self._run_test(self.remaining_tests.pop(0))
else:
- LOGGER.info('Target port %d no more tests remaining', self.target_port)
+ self.logger.info('Target device %s no more tests remaining', self)
self.timeout_handler = self._aux_module_timeout_handler
self._state_transition(_STATE.DONE, _STATE.NEXT)
- self.test_name = None
self.record_result('finish', state=MODE.FINE)
except Exception as e:
- LOGGER.error('Target port %d start error: %s', self.target_port, e)
+ self.logger.error('Target device %s start error: %s', self, e)
self._state_transition(_STATE.ERROR)
- self.runner.target_set_error(self.target_port, e)
+ self.runner.target_set_error(self.device, e)
def _inst_config_path(self):
return os.path.abspath(os.path.join(self._INST_DIR, self._CONFIG_DIR))
@@ -547,36 +617,61 @@ def _device_aux_path(self):
os.makedirs(path)
return path
- def _docker_test(self, test_name):
- self.test_name = test_name
- self.test_start = gcp.get_timestamp()
- self.test_host = docker_test.DockerTest(self.runner, self.target_port,
- self.devdir, test_name)
- LOGGER.debug('test_host start %s/%s', test_name, self._host_name())
+ def _new_test(self, test_name):
+ clazz = ipaddr_test.IpAddrTest if test_name == 'ipaddr' else docker_test.DockerTest
+ return clazz(self, self.devdir, test_name, self._loaded_config)
+
+ def _run_test(self, test_name):
+ self.timeout_handler = self._main_module_timeout_handler
+ self.test_host = self._new_test(test_name)
+
+ self.logger.info('Target device %s start %s', self, self._host_name())
try:
- self.test_port = self.runner.allocate_test_port(self.target_port)
+ self.test_port = self.gateway.allocate_test_port()
except Exception as e:
self.test_host = None
raise e
try:
- self._start_test_host()
+ self._start_test(test_name)
+ params = self._get_module_params()
+ self.test_host.start(self.test_port, params, self._module_callback, self._finish_hook)
except Exception as e:
self.test_host = None
- self.runner.release_test_port(self.target_port, self.test_port)
+ self.gateway.release_test_port(self.test_port)
self.test_port = None
self._monitor_cleanup()
raise e
- def _start_test_host(self):
- params = self._get_module_params()
+ def _start_test(self, test_name):
+ self.test_name = test_name
+ self.test_start = gcp.get_timestamp()
self._write_module_config(self._loaded_config, self._host_tmp_path())
self._record_result(self.test_name, config=self._loaded_config, state=MODE.CONF)
self.record_result(self.test_name, state=MODE.EXEC)
self._monitor_scan(os.path.join(self.scan_base, 'test_%s.pcap' % self.test_name))
self._state_transition(_STATE.TESTING, _STATE.NEXT)
- self.test_host.start(self.test_port, params, self._docker_callback, self._finish_hook)
+
+ def _end_test(self, state=MODE.DONE, return_code=None, exception=None):
+ self._monitor_cleanup()
+ self._state_transition(_STATE.NEXT, _STATE.TESTING)
+ report_path = os.path.join(self._host_tmp_path(), 'report.txt')
+ activation_log_path = os.path.join(self._host_dir_path(), 'activate.log')
+ module_config_path = os.path.join(self._host_tmp_path(), self._MODULE_CONFIG)
+ remote_paths = {}
+ for result_type, path in ((ResultType.REPORT_PATH, report_path),
+ (ResultType.ACTIVATION_LOG_PATH, activation_log_path),
+ (ResultType.MODULE_CONFIG_PATH, module_config_path)):
+ if os.path.isfile(path):
+ self._report.accumulate(self.test_name, {result_type: path})
+ remote_paths[result_type.value] = self._upload_file(path)
+ self.record_result(self.test_name, state=state, code=return_code, exception=exception,
+ **remote_paths)
+ self.test_name = None
+ self.test_host = None
+ self.timeout_handler = None
+ self._run_next_test()
def _get_module_params(self):
switch_setup = self.switch_setup if 'mods_addr' in self.switch_setup else None
@@ -585,7 +680,7 @@ def _get_module_params(self):
'local_ip': ext_loip,
'target_ip': self.target_ip,
'target_mac': self.target_mac,
- 'target_port': str(self.target_port),
+ 'target_port': str(self.target_port) if self.target_port else None,
'gateway_ip': self.gateway.host.IP(),
'gateway_mac': self.gateway.host.MAC(),
'inst_base': self._inst_config_path(),
@@ -616,46 +711,39 @@ def _host_tmp_path(self):
return os.path.join(self._host_dir_path(), 'tmp')
def _finish_hook(self):
- if self._finish_hook_script:
+ script = self.config.get('finish_hook')
+ if script:
finish_dir = os.path.join(self.devdir, 'finish', self._host_name())
shutil.rmtree(finish_dir, ignore_errors=True)
os.makedirs(finish_dir)
- LOGGER.info('Executing finish_hook: %s %s', self._finish_hook_script, finish_dir)
- os.system('%s %s 2>&1 > %s/finish.out' %
- (self._finish_hook_script, finish_dir, finish_dir))
-
- def _docker_callback(self, return_code=None, exception=None):
+ self.logger.info('Executing finish_hook: %s %s', script, finish_dir)
+ os.system('%s %s 2>&1 > %s/finish.out' % (script, finish_dir, finish_dir))
+
+ def _topology_hook(self):
+ if self._topology_hook_script:
+ update_dir = self._NETWORK_DIR
+ self.logger.info('Executing topology_hook: %s %s',
+ self._topology_hook_script, update_dir)
+ os.system('%s %s 2>&1 > %s/update.out' %
+ (self._topology_hook_script, update_dir, update_dir))
+
+ def _module_callback(self, return_code=None, exception=None):
host_name = self._host_name()
- LOGGER.info('Host callback %s/%s was %s with %s',
- self.test_name, host_name, return_code, exception)
- self._monitor_cleanup()
+ self.logger.info('Host callback %s/%s was %s with %s',
+ self.test_name, host_name, return_code, exception)
failed = return_code or exception
state = MODE.MERR if failed else MODE.DONE
- report_path = os.path.join(self._host_tmp_path(), 'report.txt')
- activation_log_path = os.path.join(self._host_dir_path(), 'activate.log')
- module_config_path = os.path.join(self._host_tmp_path(), self._MODULE_CONFIG)
- remote_paths = {}
- for result_type, path in ((ResultType.REPORT_PATH, report_path),
- (ResultType.ACTIVATION_LOG_PATH, activation_log_path),
- (ResultType.MODULE_CONFIG_PATH, module_config_path)):
- if os.path.isfile(path):
- self._report.accumulate(self.test_name, {result_type: path})
- remote_paths[result_type.value] = self._upload_file(path)
- self.record_result(self.test_name, state=state, code=return_code, exception=exception,
- **remote_paths)
- self.runner.release_test_port(self.target_port, self.test_port)
- self._state_transition(_STATE.NEXT, _STATE.TESTING)
- assert self.test_host, '_docker_callback with no test_host defined'
- self.test_host = None
- self.timeout_handler = None
- self._run_next_test()
+ self.gateway.release_test_port(self.test_port)
+ assert self.test_host, '_module_callback with no test_host defined'
+ self._end_test(state=state, return_code=return_code, exception=exception)
def _merge_run_info(self, config):
config['run_info'] = {
'run_id': self.run_id,
'mac_addr': self.target_mac,
'started': gcp.get_timestamp(),
- 'switch': self._get_switch_config()
+ 'switch': self._get_switch_config(),
+ 'usi': self._usi_config
}
config['run_info'].update(self.runner.get_run_info())
@@ -663,17 +751,17 @@ def _load_module_config(self, run_info=True):
config = self.runner.get_base_config()
if run_info:
self._merge_run_info(config)
- self._load_config(config, self._type_path())
- self._load_config(config, self._device_base)
- self._load_config(config, self._port_base)
+ self._load_config('type', config, self._type_path())
+ self._load_config('device', config, self._device_base)
+ self._load_config('port', config, self._port_base)
return config
def record_result(self, name, **kwargs):
"""Record a named result for this test"""
current = gcp.get_timestamp()
if name != self.test_name:
- LOGGER.debug('Target port %d report %s start %s',
- self.target_port, name, current)
+ self.logger.debug('Target device %s report %s start %s',
+ self, name, current)
self.test_name = name
self.test_start = current
if name:
@@ -688,6 +776,7 @@ def _record_result(self, name, run_info=True, current=None, **kwargs):
result = {
'name': name,
'runid': (self.run_id if run_info else None),
+ 'daq_run_id': self.runner.daq_run_id,
'device_id': self.target_mac,
'started': self.test_start,
'timestamp': current if current else gcp.get_timestamp(),
@@ -709,12 +798,12 @@ def _exception_message(self, exception):
return str(exception)
def _control_updated(self, control_config):
- LOGGER.info('Updated control config: %s %s', self.target_mac, control_config)
+ self.logger.info('Updated control config: %s %s', self, control_config)
paused = control_config.get('paused')
if not paused and self.is_ready():
self._start_run()
elif paused and not self.is_ready():
- LOGGER.warning('Inconsistent control state for update of %s', self.target_mac)
+ self.logger.warning('Inconsistent control state for update of %s', self)
def reload_config(self):
"""Trigger a config reload due to an external config change."""
@@ -723,24 +812,29 @@ def reload_config(self):
if device_ready:
self._loaded_config = new_config
config_bundle = self._make_config_bundle(new_config)
- LOGGER.info('Device config reloaded: %s %s', device_ready, self.target_mac)
+ self.logger.info('Device config reloaded: %s %s', device_ready, self)
self._record_result(None, run_info=device_ready, config=config_bundle)
return new_config
def _dev_config_updated(self, dev_config):
- LOGGER.info('Device config update: %s %s', self.target_mac, dev_config)
+ self.logger.info('Device config update: %s %s', self, dev_config)
self._write_module_config(dev_config, self._device_base)
self.reload_config()
def _initialize_config(self):
- dev_config = self._load_config({}, self._device_base)
+ dev_config = self._load_config('base', {}, self._device_base)
self._gcp.register_config(self._DEVICE_PATH % self.target_mac,
dev_config, self._dev_config_updated)
- self._gcp.register_config(self._CONTROL_PATH % self.target_port,
- self._make_control_bundle(),
- self._control_updated, immediate=True)
+ if self.target_port:
+ self._gcp.register_config(self._CONTROL_PATH % self.target_port,
+ self._make_control_bundle(),
+ self._control_updated, immediate=True)
self._record_result(None, config=self._make_config_bundle())
def _release_config(self):
self._gcp.release_config(self._DEVICE_PATH % self.target_mac)
- self._gcp.release_config(self._CONTROL_PATH % self.target_port)
+ if self.target_port:
+ self._gcp.release_config(self._CONTROL_PATH % self.target_port)
+
+ def __repr__(self):
+ return str(self.device) + (" on port %d" % self.target_port if self.target_port else "")
diff --git a/daq/ipaddr_test.py b/daq/ipaddr_test.py
new file mode 100644
index 0000000000..f701404d90
--- /dev/null
+++ b/daq/ipaddr_test.py
@@ -0,0 +1,88 @@
+"""Test module encapsulating ip-address tests (including DHCP)"""
+
+from __future__ import absolute_import
+import time
+import os
+import copy
+import logger
+
+
+from base_module import HostModule
+
+LOGGER = logger.get_logger('ipaddr')
+
+
+class IpAddrTest(HostModule):
+ """Module for inline ipaddr tests"""
+
+ def __init__(self, host, tmpdir, test_name, module_config):
+ super().__init__(host, tmpdir, test_name, module_config)
+ self.test_dhcp_ranges = copy.copy(self.test_config.get('dhcp_ranges', []))
+ self.log_path = os.path.join(self.tmpdir, 'nodes', self.host_name, 'activate.log')
+ self.log_file = None
+ self._ip_callback = None
+ self.tests = [
+ ('dhcp port_toggle test', self._dhcp_port_toggle_test),
+ ('dhcp multi subnet test', self._multi_subnet_test),
+ ('ip change test', self._ip_change_test),
+ ('finalize', self._finalize)
+ ]
+
+ def start(self, port, params, callback, finish_hook):
+ """Start the ip-addr tests"""
+ super().start(port, params, callback, finish_hook)
+ LOGGER.debug('Target device %s starting ipaddr test %s', self.device, self.test_name)
+ self.log_file = open(self.log_path, 'w')
+ self._next_test()
+
+ def _next_test(self):
+ try:
+ name, func = self.tests.pop(0)
+ self.log('Running ' + name)
+ func()
+ except Exception as e:
+ self.log(str(e))
+ self._finalize(exception=e)
+
+ def log(self, message):
+ """Log an activation message"""
+ LOGGER.info(message)
+ self.log_file.write(message + '\n')
+
+ def _dhcp_port_toggle_test(self):
+ if not self.host.connect_port(False):
+ self.log('disconnect port not enabled')
+ return
+ time.sleep(self.host.config.get("port_debounce_sec", 0) + 1)
+ self.host.connect_port(True)
+ self._ip_callback = self._next_test
+
+ def _multi_subnet_test(self):
+ if not self.test_dhcp_ranges:
+ self._next_test()
+ return
+ dhcp_range = self.test_dhcp_ranges.pop(0)
+ self.log('Testing dhcp range: ' + str(dhcp_range))
+ args = (dhcp_range["start"], dhcp_range["end"], dhcp_range["prefix_length"])
+ self.host.gateway.change_dhcp_range(*args)
+ self._ip_callback = self._multi_subnet_test if self.test_dhcp_ranges else self._next_test
+
+ def _ip_change_test(self):
+ self.host.gateway.request_new_ip(self.host.target_mac)
+ self._ip_callback = self._next_test
+
+ def _finalize(self, exception=None):
+ self.terminate()
+ self.callback(exception=exception)
+
+ def terminate(self):
+ """Terminate this set of tests"""
+ self.log('Module terminating')
+ self.log_file.close()
+ self.log_file = None
+
+ def ip_listener(self, target_ip):
+ """Respond to a ip notification event"""
+ self.log('ip notification %s' % target_ip)
+ if self._ip_callback:
+ self._ip_callback()
diff --git a/daq/network.py b/daq/network.py
index b622205926..255a6bd259 100644
--- a/daq/network.py
+++ b/daq/network.py
@@ -136,7 +136,7 @@ def _attach_sec_device_links(self):
def is_system_port(self, dpid, port):
"""Check if the dpid/port combo is the system trunk port"""
- return dpid == self.topology.PRI_DPID and port == self.topology.PRI_STACK_PORT
+ return dpid == self.topology.PRI_DPID and port == self.topology.PRI_TRUNK_PORT
def is_device_port(self, dpid, port):
"""Check if the dpid/port combo is for a valid device"""
diff --git a/daq/report.py b/daq/report.py
index 311c8fe89e..d3ee8e5046 100644
--- a/daq/report.py
+++ b/daq/report.py
@@ -16,7 +16,6 @@
import gcp
import logger
-
LOGGER = logger.get_logger('report')
class ResultType(Enum):
@@ -306,6 +305,7 @@ def _get_test_info(self, test_name):
return self._module_config.get('tests', {}).get(test_name, {})
def _write_repitems(self):
+ from host import get_test_config # Deferring import
for (test_name, result_dict) in self._repitems.items():
# To not write a module header if there is nothing to report
def writeln(line, test_name=test_name):
@@ -318,7 +318,8 @@ def writeln(line, test_name=test_name):
writeln(self._TEST_SUBHEADER % "Report")
self._append_file(result_dict[ResultType.REPORT_PATH])
if ResultType.MODULE_CONFIG in result_dict:
- config = result_dict[ResultType.MODULE_CONFIG].get("modules", {}).get(test_name)
+ module_configs = result_dict[ResultType.MODULE_CONFIG]
+ config = get_test_config(module_configs, test_name)
if config and len(config) > 0:
writeln(self._TEST_SUBHEADER % "Module Config")
table = MdTable(["Attribute", "Value"])
diff --git a/daq/runner.py b/daq/runner.py
index 631aa43526..26b5ab3c54 100644
--- a/daq/runner.py
+++ b/daq/runner.py
@@ -25,18 +25,102 @@
class PortInfo:
"""Simple container for device port info"""
active = False
- flapping_start = 0
- mac = None
- host = None
- gateway = None
-
+ flapping_start = None
+ port_no = None
+
+
+class IpInfo:
+ """Simple container for device ip info"""
+ ip_addr = None
+ state = None
+ delta_sec = None
+
+
+class Device:
+ """Simple container for device info"""
+ def __init__(self):
+ self.mac = None
+ self.host = None
+ self.gateway = None
+ self.group = None
+ self.port = None
+ self.dhcp_ready = False
+ self.ip_info = IpInfo()
+ self.set_id = None
+
+ def __repr__(self):
+ return self.mac.replace(":", "")
+
+
+class Devices:
+ """Container for all devices"""
+ def __init__(self):
+ self._devices = {}
+ self._set_ids = set()
+
+ def new_device(self, mac, port_info=None):
+ """Adding a new device"""
+ assert mac not in self._devices, "Device with mac: %s is already added." % mac
+ device = Device()
+ device.mac = mac
+ self._devices[mac] = device
+ device.port = port_info if port_info else PortInfo()
+ port_no = device.port.port_no
+ set_id = port_no if port_no else self._allocate_set_id()
+ assert set_id not in self._set_ids, "Duplicate device set id %d" % set_id
+ self._set_ids.add(set_id)
+ device.set_id = set_id
+ return device
+
+ def _allocate_set_id(self):
+ set_id = 1
+ while set_id in self._set_ids:
+ set_id += 1
+ return set_id
+
+ def remove(self, device):
+ """Removing a device"""
+ assert self.contains(device), "Device %s not found." % device
+ del self._devices[device.mac]
+ self._set_ids.remove(device.set_id)
+
+ def get(self, device_mac):
+ """Get a device using its mac address"""
+ return self._devices.get(device_mac)
+
+ def get_by_port_info(self, port):
+ """Get a device using its port info object"""
+ for device in self._devices.values():
+ if device.port == port:
+ return device
+ return None
+
+ def get_by_gateway(self, gateway):
+ """Get devices under specified gateway"""
+ return [device for device in self._devices.values() if device.gateway == gateway]
+
+ def get_by_group(self, group_name):
+ """Get devices under a group name"""
+ return [device for device in self._devices.values() if device.group == group_name]
+
+ def get_all_devices(self):
+ """Get all devices"""
+ return list(self._devices.values())
+
+ def get_triggered_devices(self):
+ """Get devices with hosts"""
+ return [device for device in self._devices.values() if device.host]
+
+ def contains(self, device):
+ """Returns true if the device is expected"""
+ return self._devices.get(device.mac) == device
class DAQRunner:
"""Main runner class controlling DAQ. Primarily mediates between
faucet events, connected hosts (to test), and gcp for logging. This
class owns the main event loop and shards out work to subclasses."""
- MAX_GATEWAYS = 10
+ MAX_GATEWAYS = 9
_DEFAULT_RETENTION_DAYS = 30
_MODULE_CONFIG = 'module_config.json'
_RUNNER_CONFIG_PATH = 'runner/setup'
@@ -45,13 +129,11 @@ class owns the main event loop and shards out work to subclasses."""
def __init__(self, config):
self.configurator = configurator.Configurator()
+ self.gateway_sets = set(range(1, self.MAX_GATEWAYS+1))
self.config = config
- self._port_info = {}
self._result_sets = {}
- self._mac_port_map = {}
- self._device_groups = {}
- self._gateway_sets = {}
- self._target_mac_ip = {}
+ self._devices = Devices()
+ self._ports = {}
self._callback_queue = []
self._callback_lock = threading.Lock()
self.gcp = gcp.GcpManager(self.config, self._queue_callback)
@@ -65,8 +147,8 @@ def __init__(self, config):
self._linger_exit = 0
self.faucet_events = None
self.single_shot = config.get('single_shot', False)
- self.event_trigger = config.get('event_trigger', False)
self.fail_mode = config.get('fail_mode', False)
+ self.run_trigger_type = config.get('run_trigger_type', 'PORT')
self.run_tests = True
self.stream_monitor = None
self.exception = None
@@ -75,30 +157,22 @@ def __init__(self, config):
self._default_port_flap_timeout = int(config.get('port_flap_timeout_sec', 0))
self.result_log = self._open_result_log()
self._system_active = False
- self._dhcp_ready = set()
- self._ip_info = {}
logging_client = self.gcp.get_logging_client()
- self._daq_run_id = uuid.uuid4()
+ self.daq_run_id = self._init_daq_run_id()
if logging_client:
logger.set_stackdriver_client(logging_client,
- labels={"daq_run_id": str(self._daq_run_id)})
+ labels={"daq_run_id": self.daq_run_id})
test_list = self._get_test_list(config.get('host_tests', self._DEFAULT_TESTS_FILE), [])
if self.config.get('keep_hold'):
LOGGER.info('Appending test_hold to master test list')
test_list.append('hold')
config['test_list'] = test_list
- LOGGER.info('DAQ RUN id: %s' % self._daq_run_id)
+ LOGGER.info('DAQ RUN id: %s' % self.daq_run_id)
LOGGER.info('Configured with tests %s' % ', '.join(config['test_list']))
LOGGER.info('DAQ version %s' % self._daq_version)
LOGGER.info('LSB release %s' % self._lsb_release)
LOGGER.info('system uname %s' % self._sys_uname)
- def _flush_faucet_events(self):
- LOGGER.info('Flushing faucet event queue...')
- if self.faucet_events:
- while self.faucet_events.next_event():
- pass
-
def _open_result_log(self):
return open(self._RESULT_LOG_FILE, 'w')
@@ -106,6 +180,12 @@ def _get_states(self):
states = connected_host.pre_states() + self.config['test_list']
return states + connected_host.post_states()
+ def _init_daq_run_id(self):
+ daq_run_id = str(uuid.uuid4())
+ with open('inst/daq_run_id.txt', 'w') as output_stream:
+ output_stream.write(daq_run_id + '\n')
+ return daq_run_id
+
def _send_heartbeat(self):
message = {
'name': 'status',
@@ -123,7 +203,7 @@ def get_run_info(self):
'version': self._daq_version,
'lsb': self._lsb_release,
'uname': self._sys_uname,
- 'daq_run_id': str(self._daq_run_id)
+ 'daq_run_id': self.daq_run_id
}
data_retention_days = self.config.get('run_data_retention_days',
self._DEFAULT_RETENTION_DAYS)
@@ -182,9 +262,12 @@ def _handle_faucet_events(self):
LOGGER.debug('port_state: %s %s', dpid, port)
self._handle_port_state(dpid, port, active)
return
- (dpid, port, target_mac) = self.faucet_events.as_port_learn(event)
- if dpid and port:
- self._handle_port_learn(dpid, port, target_mac)
+ (dpid, port, target_mac, vid) = self.faucet_events.as_port_learn(event)
+ if dpid and port and vid:
+ if self.run_trigger_type == "PORT":
+ self._handle_port_learn(dpid, port, vid, target_mac)
+ elif self.run_trigger_type == "VLAN" and self.network.is_system_port(dpid, port):
+ self._handle_device_learn(vid, target_mac)
return
(dpid, restart_type) = self.faucet_events.as_config_change(event)
if dpid is not None:
@@ -203,44 +286,57 @@ def _handle_port_state(self, dpid, port, active):
LOGGER.debug('Unknown port %s on dpid %s is active %s', port, dpid, active)
return
- if port not in self._port_info:
- self._port_info[port] = PortInfo()
+ if port not in self._ports:
+ self._ports[port] = PortInfo()
+ self._ports[port].port_no = port
- if active != self._port_info[port].active:
+ if active != self._ports[port].active:
LOGGER.info('Port %s dpid %s is now %s', port, dpid, "active" if active else "inactive")
if active:
self._activate_port(port)
else:
- port_info = self._port_info[port]
- if port_info.host and not port_info.flapping_start:
+ device = self._devices.get_by_port_info(self._ports[port])
+ port_info = self._ports[port]
+ if device and device.host and not port_info.flapping_start:
port_info.flapping_start = time.time()
if port_info.active:
- if port_info.mac and not port_info.flapping_start:
- self._direct_port_traffic(port_info.mac, port, None)
+ if device and not port_info.flapping_start:
+ self._direct_port_traffic(device.mac, port, None)
self._deactivate_port(port)
self._send_heartbeat()
def _activate_port(self, port):
- port_info = self._port_info[port]
+ port_info = self._ports[port]
port_info.flapping_start = 0
port_info.active = True
def _deactivate_port(self, port):
- port_info = self._port_info[port]
+ port_info = self._ports[port]
port_info.active = False
def _direct_port_traffic(self, mac, port, target):
self.network.direct_port_traffic(mac, port, target)
- def _handle_port_learn(self, dpid, port, target_mac):
+ def _handle_port_learn(self, dpid, port, vid, target_mac):
if self.network.is_device_port(dpid, port):
LOGGER.info('Port %s dpid %s learned %s', port, dpid, target_mac)
- self._mac_port_map[target_mac] = port
- self._port_info[port].mac = target_mac
- self._target_set_trigger(port)
+ if port not in self._ports:
+ self._ports[port] = PortInfo()
+ self._ports[port].port_no = port
+ if not self._devices.get(target_mac):
+ self._devices.new_device(target_mac, port_info=self._ports[port])
+ self._target_set_trigger(self._devices.get(target_mac))
else:
LOGGER.debug('Port %s dpid %s learned %s (ignored)', port, dpid, target_mac)
+ def _handle_device_learn(self, vid, target_mac):
+ LOGGER.info('%s learned on vid %s', target_mac, vid)
+ if not self._devices.get(target_mac):
+ device = self._devices.new_device(target_mac)
+ else:
+ device = self._devices.get(target_mac)
+ self._target_set_trigger(device)
+
def _queue_callback(self, callback):
with self._callback_lock:
LOGGER.debug('Register callback')
@@ -259,21 +355,19 @@ def _handle_system_idle(self):
# Some synthetic faucet events don't come in on the socket, so process them here.
self._handle_faucet_events()
all_idle = True
- for target_port, target_host in self._get_port_hosts():
+ for device in self._devices.get_triggered_devices():
try:
- if target_host.is_running():
+ if device.host.is_running():
all_idle = False
- target_host.idle_handler()
+ device.host.idle_handler()
else:
- self.target_set_complete(target_port, 'target set not active')
+ self.target_set_complete(device, 'target set not active')
except Exception as e:
- self.target_set_error(target_host.target_port, e)
- if not self.event_trigger:
- for target_port, port_info in self._port_info.items():
- if port_info.active and port_info.mac:
- self._target_set_trigger(target_port)
- all_idle = False
- if not self._get_running_ports() and not self.run_tests:
+ self.target_set_error(device, e)
+ for device in self._devices.get_all_devices():
+ self._target_set_trigger(device)
+ all_idle = False
+ if not self._devices.get_triggered_devices() and not self.run_tests:
if self.faucet_events and not self._linger_exit:
self.shutdown()
if self._linger_exit == 1:
@@ -281,20 +375,19 @@ def _handle_system_idle(self):
LOGGER.warning('Result linger on exit.')
all_idle = False
if all_idle:
- LOGGER.debug('No active device ports, waiting for trigger event...')
+ LOGGER.debug('No active device, waiting for trigger event...')
def _reap_stale_ports(self):
- for port, port_info in copy.copy(self._port_info).items():
- if not port_info.flapping_start or not port_info.host:
+ for device in self._devices.get_triggered_devices():
+ if not device.port.flapping_start:
continue
- host = port_info.host
- timeout_sec = host.get_port_flap_timeout(host.test_name)
+ timeout_sec = device.host.get_port_flap_timeout(device.host.test_name)
if timeout_sec is None:
timeout_sec = self._default_port_flap_timeout
- if (port_info.flapping_start + timeout_sec) <= time.time():
+ if (device.port.flapping_start + timeout_sec) <= time.time():
exception = DaqException('port not active for %ds' % timeout_sec)
- self.target_set_error(port, exception)
- port_info.flapping_start = 0
+ self.target_set_error(device, exception)
+ device.port.flapping_start = 0
def shutdown(self):
"""Shutdown this runner by closing all active components"""
@@ -308,16 +401,16 @@ def shutdown(self):
def _loop_hook(self):
self._handle_queued_events()
- states = {p: h.state for p, h in self._get_port_hosts()}
+ states = {device.mac: device.host.state for device in self._devices.get_triggered_devices()}
LOGGER.debug('Active target sets/state: %s', states)
def _terminate(self):
- for target_port in self._get_running_ports():
- self.target_set_error(target_port, DaqException('terminated'))
+ for device in self._devices.get_triggered_devices():
+ self.target_set_error(device, DaqException('terminated'))
def _module_heartbeat(self):
# Should probably be converted to a separate thread to timeout any blocking fn calls
- _ = [host.heartbeat() for _, host in self._get_port_hosts()]
+ _ = [device.host.heartbeat() for device in self._devices.get_triggered_devices()]
def main_loop(self):
"""Run main loop to execute tests"""
@@ -329,8 +422,6 @@ def main_loop(self):
self.stream_monitor = monitor
self.monitor_stream('faucet', self.faucet_events.sock, self._handle_faucet_events,
priority=10)
- if self.event_trigger:
- self._flush_faucet_events()
LOGGER.info('Entering main event loop.')
LOGGER.info('See docs/troubleshooting.md if this blocks for more than a few minutes.')
while self.stream_monitor.event_loop():
@@ -351,64 +442,62 @@ def main_loop(self):
self._terminate()
- def _target_set_trigger(self, target_port):
- target_active = target_port in self._port_info and self._port_info[target_port].active
- assert target_active, 'Target port %d not active' % target_port
-
- target_mac = self._port_info[target_port].mac
- assert target_mac, 'Target port %d triggered but not learned' % target_port
+ def _target_set_trigger(self, device):
+ assert self._devices.contains(device), 'Target device %s is not expected' % device.mac
+ port_trigger = device.port.port_no is not None
+ if port_trigger:
+ assert device.port.active, 'Target port %d is not active' % device.port.port_no
if not self._system_active:
- LOGGER.warning('Target port %d ignored, system not active', target_port)
+ LOGGER.warning('Target device %s ignored, system is not active', device.mac)
return False
- if self._port_info[target_port].host:
- LOGGER.debug('Target port %d already triggered', target_port)
+ if device.host:
+ LOGGER.debug('Target device %s already triggered', device.mac)
return False
if not self.run_tests:
- LOGGER.debug('Target port %d trigger suppressed', target_port)
+ LOGGER.debug('Target device %s trigger suppressed', device.mac)
return False
try:
- group_name = self.network.device_group_for(target_mac)
- gateway = self._activate_device_group(group_name, target_port)
+ group_name = self.network.device_group_for(device.mac)
+ device.group = group_name
+ gateway = self._activate_device_group(device)
if gateway.activated:
- LOGGER.debug('Target port %d trigger ignored b/c activated gateway', target_port)
+ LOGGER.debug('Target device %s trigger ignored b/c activated gateway', device.mac)
return False
except Exception as e:
- LOGGER.error('Target port %d target trigger error %s', target_port, str(e))
+ LOGGER.error('Target device %s target trigger error %s', device.mac, str(e))
if self.fail_mode:
LOGGER.warning('Suppressing further tests due to failure.')
self.run_tests = False
return False
- target = {
- 'port': target_port,
- 'group': group_name,
- 'fake': gateway.fake_target,
- 'port_set': gateway.port_set,
- 'mac': target_mac
- }
-
# Stops all DHCP response initially
# Selectively enables dhcp response at ipaddr stage based on dhcp mode
- gateway.execute_script('change_dhcp_response_time', target_mac, -1)
- gateway.attach_target(target_port, target)
-
+ gateway.stop_dhcp_response(device.mac)
+ gateway.attach_target(device)
+ device.gateway = gateway
try:
self.run_count += 1
- new_host = connected_host.ConnectedHost(self, gateway, target, self.config)
- self._port_info[target_port].host = new_host
- self._port_info[target_port].gateway = gateway
- LOGGER.info('Target port %d registered %s', target_port, target_mac)
+ new_host = connected_host.ConnectedHost(self, device, self.config)
+ device.host = new_host
new_host.register_dhcp_ready_listener(self._dhcp_ready_listener)
new_host.initialize()
- self._direct_port_traffic(target_mac, target_port, target)
+ if port_trigger:
+ target = {
+ 'port': device.port.port_no,
+ 'group': group_name,
+ 'fake': gateway.fake_target,
+ 'port_set': gateway.port_set,
+ 'mac': device.mac
+ }
+ self._direct_port_traffic(device.mac, device.port.port_no, target)
return True
except Exception as e:
- self.target_set_error(target_port, e)
+ self.target_set_error(device, e)
def _get_test_list(self, test_file, test_list):
no_test = self.config.get('no_test', False)
@@ -438,127 +527,101 @@ def _get_test_list(self, test_file, test_list):
line = file.readline()
return test_list
- def allocate_test_port(self, target_port):
- """Get the test port for the given target_port"""
- gateway = self._port_info[target_port].gateway
- return gateway.allocate_test_port()
-
- def release_test_port(self, target_port, test_port):
- """Release the given test port"""
- gateway = self._port_info[target_port].gateway
- return gateway.release_test_port(test_port)
-
- def _activate_device_group(self, group_name, target_port):
- if group_name in self._device_groups:
- existing = self._device_groups[group_name]
- LOGGER.debug('Gateway for existing device group %s is %s', group_name, existing.name)
+ def _activate_device_group(self, device):
+ group_name = device.group
+ group_devices = self._devices.get_by_group(group_name)
+ existing_gateways = {device.gateway for device in group_devices if device.gateway}
+ if existing_gateways:
+ existing = existing_gateways.pop()
+ LOGGER.info('Gateway for existing device group %s is %s', group_name, existing)
return existing
- set_num = self._find_gateway_set(target_port)
+
+ set_num = self._find_gateway_set(device)
LOGGER.info('Gateway for device group %s not found, initializing base %d...',
- group_name, set_num)
+ device.group, set_num)
gateway = gateway_manager.Gateway(self, group_name, set_num, self.network)
- self._gateway_sets[set_num] = group_name
- self._device_groups[group_name] = gateway
try:
gateway.initialize()
except Exception:
LOGGER.error('Cleaning up from failed gateway initialization')
- LOGGER.debug('Clearing target %s gateway group %s for %s',
- target_port, set_num, group_name)
- del self._gateway_sets[set_num]
- del self._device_groups[group_name]
+ LOGGER.debug('Clearing %s gateway group %s for %s',
+ device, set_num, group_name)
+ self.gateway_sets.add(set_num)
raise
return gateway
- def ip_notify(self, state, target, gateway_set, exception=None):
+ def ip_notify(self, state, target, gateway, exception=None):
"""Handle a DHCP / Static IP notification"""
if exception:
assert not target, 'unexpected exception with target'
- LOGGER.error('IP exception for gw%02d: %s', gateway_set, exception)
+ LOGGER.error('IP exception for %s: %s', gateway, exception)
LOGGER.exception(exception)
- self._terminate_gateway_set(gateway_set)
+ self._terminate_gateway_set(gateway)
return
target_mac, target_ip, delta_sec = target['mac'], target['ip'], target['delta']
- LOGGER.info('IP notify %s is %s on gw%02d (%s/%d)', target_mac,
- target_ip, gateway_set, state, delta_sec)
+ LOGGER.info('IP notify %s is %s on %s (%s/%d)', target_mac,
+ target_ip, gateway, state, delta_sec)
if not target_mac:
LOGGER.warning('IP target mac missing')
return
- self._target_mac_ip[target_mac] = target_ip
- host = self._get_host_from_mac(target_mac)
- if host:
- self._ip_info[host] = (state, target, gateway_set)
- host.ip_notify(target_ip, state, delta_sec)
- self._check_and_activate_gateway(host)
-
- def _get_host_from_mac(self, mac):
- if mac not in self._mac_port_map:
- return None
- return self._port_info[self._mac_port_map[mac]].host
-
- def _get_port_hosts(self):
- return list({p: i.host for p, i in self._port_info.items() if i.host}.items())
-
- def _get_running_ports(self):
- return [p for p, i in self._port_info.items() if i.host]
+ device = self._devices.get(target_mac)
+ device.ip_info.ip_addr = target_ip
+ device.ip_info.state = state
+ device.ip_info.delta_sec = delta_sec
+ if device and device.host:
+ device.host.ip_notify(target_ip, state, delta_sec)
+ self._check_and_activate_gateway(device)
def _get_active_ports(self):
- return [p for p, i in self._port_info.items() if i.active]
+ return [p.port_no for p in self._ports.values() if p.active]
- def _check_and_activate_gateway(self, host):
+ def _check_and_activate_gateway(self, device):
# Host ready to be activated and DHCP happened / Static IP
- if host not in self._ip_info or host not in self._dhcp_ready:
+ ip_info = device.ip_info
+ if not ip_info.ip_addr or not device.dhcp_ready:
return
- state, target, gateway_set = self._ip_info[host]
- target_mac, target_ip, delta_sec = target['mac'], target['ip'], target['delta']
- (gateway, ready_devices) = self._should_activate_target(target_mac, target_ip, gateway_set)
+ (gateway, ready_devices) = self._should_activate_target(device)
if not ready_devices:
return
-
if ready_devices is True:
- self._get_host_from_mac(target_mac).trigger(state, target_ip=target_ip,
- delta_sec=delta_sec)
+ device.host.trigger(ip_info.state, target_ip=ip_info.ip_addr,
+ delta_sec=ip_info.delta_sec)
else:
- self._activate_gateway(state, gateway, ready_devices, delta_sec)
+ self._activate_gateway(ip_info.state, gateway, ready_devices, ip_info.delta_sec)
- def _dhcp_ready_listener(self, host):
- self._dhcp_ready.add(host)
- self._check_and_activate_gateway(host)
+ def _dhcp_ready_listener(self, device):
+ device.dhcp_ready = True
+ self._check_and_activate_gateway(device)
def _activate_gateway(self, state, gateway, ready_devices, delta_sec):
gateway.activate()
if len(ready_devices) > 1:
state = 'group'
delta_sec = -1
- for ready_mac in ready_devices:
- LOGGER.info('IP activating target %s', ready_mac)
- ready_host = self._get_host_from_mac(ready_mac)
- ready_ip = self._target_mac_ip[ready_mac]
- triggered = ready_host.trigger(state, target_ip=ready_ip, delta_sec=delta_sec)
- assert triggered, 'host %s not triggered' % ready_mac
-
- def _should_activate_target(self, target_mac, target_ip, gateway_set):
- target_host = self._get_host_from_mac(target_mac)
- if not target_host:
- LOGGER.warning('DHCP targets missing %s', target_mac)
+ for device in ready_devices:
+ LOGGER.info('IP activating target %s', device)
+ target_ip, delta_sec = device.ip_info.ip_addr, device.ip_info.delta_sec
+ triggered = device.host.trigger(state, target_ip=target_ip, delta_sec=delta_sec)
+ assert triggered, 'Device %s not triggered' % device
+
+ def _should_activate_target(self, device):
+ if not device.host:
+ LOGGER.warning('DHCP targets missing %s', device)
return False, False
-
- group_name = self._gateway_sets[gateway_set]
- gateway = self._device_groups[group_name]
-
+ gateway, group_name = device.gateway, device.group
if gateway.activated:
LOGGER.info('DHCP activation group %s already activated', group_name)
return gateway, True
- if not target_host.notify_activate():
- LOGGER.info('DHCP device %s ignoring spurious notify', target_mac)
+ if not device.host.notify_activate():
+ LOGGER.info('DHCP device %s ignoring spurious notify', device)
return gateway, False
- ready_devices = gateway.target_ready(target_mac)
+ ready_devices = gateway.target_ready(device)
group_size = self.network.device_group_size(group_name)
remaining = group_size - len(ready_devices)
@@ -566,30 +629,27 @@ def _should_activate_target(self, target_mac, target_ip, gateway_set):
LOGGER.info('DHCP waiting for %d additional members of group %s', remaining, group_name)
return gateway, False
- hosts = map(self._get_host_from_mac, ready_devices)
- ready_trigger = all(map(lambda host: host.trigger_ready(), hosts))
+ ready_trigger = all(map(lambda host: device.host.trigger_ready(), ready_devices))
if not ready_trigger:
LOGGER.info('DHCP device group %s not ready to trigger', group_name)
return gateway, False
return gateway, ready_devices
- def _terminate_gateway_set(self, gateway_set):
- assert gateway_set in self._gateway_sets, 'Gateway set %s not found'
- group_name = self._gateway_sets[gateway_set]
- gateway = self._device_groups[group_name]
- ports = [target['port'] for target in gateway.get_targets()]
- LOGGER.info('Terminating gateway group %s set %s, ports %s', group_name, gateway_set, ports)
- for target_port in ports:
- self.target_set_error(target_port, DaqException('terminated'))
-
- def _find_gateway_set(self, target_port):
- if target_port not in self._gateway_sets:
- return target_port
- for entry in range(1, self.MAX_GATEWAYS):
- if entry not in self._gateway_sets:
- return entry
- raise Exception('Could not allocate open gateway set')
+ def _terminate_gateway_set(self, gateway):
+ gateway_devices = self._devices.get_by_gateway(gateway)
+ assert gateway_devices, '%s not found' % gateway
+ LOGGER.info('Terminating %s', gateway)
+ for device in gateway_devices:
+ self.target_set_error(device, DaqException('terminated'))
+
+ def _find_gateway_set(self, device):
+ if not self.gateway_sets:
+ raise Exception('Could not allocate open gateway set')
+ if device.port.port_no in self.gateway_sets:
+ self.gateway_sets.remove(device.port.port_no)
+ return device.port.port_no
+ return self.gateway_sets.pop()
@staticmethod
def ping_test(src, dst, src_addr=None):
@@ -608,36 +668,34 @@ def ping_test(src, dst, src_addr=None):
LOGGER.info('Test ping failure: %s', e)
return False
- def target_set_error(self, target_port, exception):
- """Handle an error in the target port set"""
- running = bool(target_port in self._port_info and self._port_info[target_port].host)
- LOGGER.error('Target port %d running %s exception: %s', target_port, running, exception)
+ def target_set_error(self, device, exception):
+ """Handle an error in the target set"""
+ running = bool(device.host)
+ LOGGER.error('Target device %s running %s exception: %s', device, running, exception)
LOGGER.exception(exception)
if running:
- target_host = self._port_info[target_port].host
- target_host.record_result(target_host.test_name, exception=exception)
- self.target_set_complete(target_port, str(exception))
+ device.host.record_result(device.host.test_name, exception=exception)
+ self.target_set_complete(device, str(exception))
else:
stack = ''.join(
traceback.format_exception(etype=type(exception), value=exception,
tb=exception.__traceback__))
- self._target_set_finalize(target_port,
+ self._target_set_finalize(device,
{'exception': {'exception': str(exception),
'traceback': stack}},
str(exception))
- self._detach_gateway(target_port)
+ self._detach_gateway(device)
- def target_set_complete(self, target_port, reason):
+ def target_set_complete(self, device, reason):
"""Handle completion of a target_set"""
- target_host = self._port_info[target_port].host
- self._target_set_finalize(target_port, target_host.results, reason)
- self._target_set_cancel(target_port)
+ self._target_set_finalize(device, device.host.results, reason)
+ self._target_set_cancel(device)
- def _target_set_finalize(self, target_port, result_set, reason):
- results = self._combine_result_set(target_port, result_set)
- LOGGER.info('Target port %d finalize: %s (%s)', target_port, results, reason)
+ def _target_set_finalize(self, device, result_set, reason):
+ results = self._combine_result_set(device, result_set)
+ LOGGER.info('Target device %s finalize: %s (%s)', device, results, reason)
if self.result_log:
- self.result_log.write('%02d: %s\n' % (target_port, results))
+ self.result_log.write('%s: %s\n' % (device, results))
self.result_log.flush()
suppress_tests = self.fail_mode or self.result_linger
@@ -646,49 +704,48 @@ def _target_set_finalize(self, target_port, result_set, reason):
self.run_tests = False
if self.result_linger:
self._linger_exit = 1
- self._result_sets[target_port] = result_set
+ self._result_sets[device] = result_set
- def _target_set_cancel(self, target_port):
- target_host = self._port_info[target_port].host
+ def _target_set_cancel(self, device):
+ target_host = device.host
if target_host:
- self._port_info[target_port].host = None
- target_mac = self._port_info[target_port].mac
- del self._mac_port_map[target_mac]
- target_gateway = self._port_info[target_port].gateway
- LOGGER.info('Target port %d cancel %s (#%d/%s).',
- target_port, target_mac, self.run_count, self.run_limit)
- results = self._combine_result_set(target_port, self._result_sets.get(target_port))
+ device.host = None
+ target_gateway = device.gateway
+ target_port = device.port.port_no
+ LOGGER.info('Target device %s cancel (#%d/%s).', device.mac, self.run_count,
+ self.run_limit)
+
+ results = self._combine_result_set(device, self._result_sets.get(device))
this_result_linger = results and self.result_linger
target_gateway_linger = target_gateway and target_gateway.result_linger
if target_gateway_linger or this_result_linger:
- LOGGER.warning('Target port %d result_linger: %s', target_port, results)
- self._activate_port(target_port)
+ LOGGER.warning('Target device %s result_linger: %s', device.mac, results)
+ if target_port:
+ self._activate_port(target_port)
target_gateway.result_linger = True
else:
- self._direct_port_traffic(target_mac, target_port, None)
+ if target_port:
+ self._direct_port_traffic(device.mac, target_port, None)
target_host.terminate('_target_set_cancel', trigger=False)
if target_gateway:
- self._detach_gateway(target_port)
+ self._detach_gateway(device)
if self.run_limit and self.run_count >= self.run_limit and self.run_tests:
LOGGER.warning('Suppressing future tests because run limit reached.')
self.run_tests = False
if self.single_shot and self.run_tests:
LOGGER.warning('Suppressing future tests because test done in single shot.')
self.run_tests = False
- LOGGER.info('Remaining target sets: %s', self._get_running_ports())
+ self._devices.remove(device)
+ LOGGER.info('Remaining target sets: %s', self._devices.get_triggered_devices())
- def _detach_gateway(self, target_port):
- target_gateway = self._port_info[target_port].gateway
+ def _detach_gateway(self, device):
+ target_gateway = device.gateway
if not target_gateway:
return
- self._port_info[target_port].gateway = None
- target_mac = self._port_info[target_port].mac
- if not target_gateway.detach_target(target_port):
- LOGGER.info('Retiring target gateway %s, %s, %s, %s',
- target_port, target_mac, target_gateway.name, target_gateway.port_set)
- group_name = self.network.device_group_for(target_mac)
- del self._device_groups[group_name]
- del self._gateway_sets[target_gateway.port_set]
+ device.gateway = None
+ if not target_gateway.detach_target(device):
+ LOGGER.info('Retiring %s. Last device: %s', target_gateway, device)
+ self.gateway_sets.add(target_gateway.port_set)
target_gateway.terminate()
def monitor_stream(self, *args, **kwargs):
@@ -720,7 +777,7 @@ def _combine_result_set(self, set_key, result_sets):
exp_msg = result.get('exception')
status = exp_msg if exp_msg else code if name != 'fail' else not code
if status != 0:
- results.append('%02d:%s:%s' % (set_key, name, status))
+ results.append('%s:%s:%s' % (set_key, name, status))
return results
def finalize(self):
@@ -742,12 +799,15 @@ def _base_config_changed(self, new_config):
self._MODULE_CONFIG)
self._base_config = self._load_base_config(register=False)
self._publish_runner_config(self._base_config)
- _ = [host.reload_config() for _, host in self._get_port_hosts()]
+ _ = [device.host.reload_config() for device in self._devices.get_triggered_devices()]
def _load_base_config(self, register=True):
- base = self.configurator.load_and_merge({}, self.config.get('base_conf'))
- site_config = self.configurator.load_config(self.config.get('site_path'),
- self._MODULE_CONFIG, optional=True)
+ base_conf = self.config.get('base_conf')
+ LOGGER.info('Loading base module config from %s', base_conf)
+ base = self.configurator.load_and_merge({}, base_conf)
+ site_path = self.config.get('site_path')
+ LOGGER.info('Loading site module config from %s', base_conf)
+ site_config = self.configurator.load_config(site_path, self._MODULE_CONFIG, optional=True)
if register:
self.gcp.register_config(self._RUNNER_CONFIG_PATH, site_config,
self._base_config_changed)
diff --git a/daq/topology.py b/daq/topology.py
index 2438b49012..2cda43ebe4 100644
--- a/daq/topology.py
+++ b/daq/topology.py
@@ -31,13 +31,14 @@ class FaucetTopology:
INCOMING_ACL_FORMAT = "dp_%s_incoming_acl"
PORTSET_ACL_FORMAT = "dp_%s_portset_%d_acl"
LOCAL_ACL_FORMAT = "dp_%s_local_acl"
- _DEFAULT_STACK_PORT_NAME = "stack_sec"
+ _DEFAULT_SEC_TRUNK_NAME = "trunk_sec"
_MIRROR_IFACE_FORMAT = "mirror-%d"
_MIRROR_PORT_BASE = 1000
_SWITCH_LOCAL_PORT = _MIRROR_PORT_BASE
_VLAN_BASE = 1000
PRI_DPID = 1
- PRI_STACK_PORT = 1
+ PRI_TRUNK_PORT = 1
+ PRI_TRUNK_NAME = 'trunk_pri'
_NO_VLAN = "0x0000/0x1000"
def __init__(self, config):
@@ -92,7 +93,7 @@ def get_sec_dpid(self):
return self.sec_dpid
def get_sec_port(self):
- """Return the secondary stacking port"""
+ """Return the secondary trunk port"""
return self.sec_port
def get_device_intfs(self):
@@ -173,20 +174,23 @@ def _update_port_vlan(self, port_no, port_set):
def _port_set_vlan(self, port_set=None):
return self._VLAN_BASE + (port_set if port_set else 0)
- def _make_pri_stack_interface(self):
+ def _make_pri_trunk_interface(self):
interface = {}
interface['acl_in'] = self.INCOMING_ACL_FORMAT % self.pri_name
- interface['stack'] = {'dp': self.sec_name, 'port': self.sec_port}
- interface['name'] = 'stack_pri'
+ interface['tagged_vlans'] = self._vlan_tags()
+ interface['name'] = self.PRI_TRUNK_NAME
return interface
- def _make_sec_stack_interface(self):
+ def _make_sec_trunk_interface(self):
interface = {}
interface['acl_in'] = self.INCOMING_ACL_FORMAT % self.sec_name
- interface['stack'] = {'dp': self.pri_name, 'port': self.PRI_STACK_PORT}
- interface['name'] = self.get_ext_intf() or self._DEFAULT_STACK_PORT_NAME
+ interface['tagged_vlans'] = self._vlan_tags()
+ interface['name'] = self.get_ext_intf() or self._DEFAULT_SEC_TRUNK_NAME
return interface
+ def _vlan_tags(self):
+ return list(range(self._VLAN_BASE, self._VLAN_BASE + self.sec_port))
+
def _make_default_acl_rules(self):
rules = []
if not self._append_acl_template(rules, 'raw'):
@@ -201,7 +205,7 @@ def _make_sec_port_interface(self, port_no):
def _make_pri_interfaces(self):
interfaces = {}
- interfaces[self.PRI_STACK_PORT] = self._make_pri_stack_interface()
+ interfaces[self.PRI_TRUNK_PORT] = self._make_pri_trunk_interface()
for port_set in range(1, self.sec_port):
for port in self._get_gw_ports(port_set):
interfaces[port] = self._make_gw_interface(port_set)
@@ -212,7 +216,7 @@ def _make_pri_interfaces(self):
def _make_sec_interfaces(self):
interfaces = {}
- interfaces[self.sec_port] = self._make_sec_stack_interface()
+ interfaces[self.sec_port] = self._make_sec_trunk_interface()
for port in range(1, self.sec_port):
interfaces[port] = self._make_sec_port_interface(port)
return interfaces
@@ -227,23 +231,24 @@ def _make_acl_include(self):
def _make_pri_topology(self):
pri_dp = {}
pri_dp['dp_id'] = self.PRI_DPID
- pri_dp['name'] = self.pri_name
- pri_dp['stack'] = {'priority':1}
pri_dp['interfaces'] = self._make_pri_interfaces()
return pri_dp
def _make_sec_topology(self):
sec_dp = {}
sec_dp['dp_id'] = self.sec_dpid
- sec_dp['name'] = self.sec_name
sec_dp['interfaces'] = self._make_sec_interfaces()
return sec_dp
+ def _has_sec_switch(self):
+ return self.sec_dpid and self.sec_port
+
def _make_base_network_topology(self):
assert self.pri, 'pri dataplane not configured'
dps = {}
dps['pri'] = self._make_pri_topology()
- dps['sec'] = self._make_sec_topology()
+ if self._has_sec_switch():
+ dps['sec'] = self._make_sec_topology()
topology = {}
topology['dps'] = dps
topology['vlans'] = self._make_vlan_description(10)
diff --git a/daq/utils.py b/daq/utils.py
index 1bc3a75c1d..6bb093d484 100644
--- a/daq/utils.py
+++ b/daq/utils.py
@@ -1,8 +1,7 @@
"""Utility functions for DAQ"""
-import yaml
-
from google.protobuf import json_format
+import yaml
def yaml_proto(file_name, proto_func):
diff --git a/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml b/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml
new file mode 100644
index 0000000000..8b57f4527a
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/.idea/google-java-format.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/validator/.idea/gradle.xml b/docker/include/bacnet/bacnetFaux/.idea/gradle.xml
similarity index 74%
rename from validator/.idea/gradle.xml
rename to docker/include/bacnet/bacnetFaux/.idea/gradle.xml
index 854749173b..d50e06cdb1 100644
--- a/validator/.idea/gradle.xml
+++ b/docker/include/bacnet/bacnetFaux/.idea/gradle.xml
@@ -1,9 +1,9 @@
-
+
@@ -11,10 +11,8 @@
-
-
-
\ No newline at end of file
+
diff --git a/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml b/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml
new file mode 100644
index 0000000000..fc50401e9e
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/.idea/libraries/bacnet4j.xml
@@ -0,0 +1,9 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pubber/.idea/misc.xml b/docker/include/bacnet/bacnetFaux/.idea/misc.xml
similarity index 59%
rename from pubber/.idea/misc.xml
rename to docker/include/bacnet/bacnetFaux/.idea/misc.xml
index 012255a52d..bc8d0a3a63 100644
--- a/pubber/.idea/misc.xml
+++ b/docker/include/bacnet/bacnetFaux/.idea/misc.xml
@@ -1,6 +1,7 @@
-
+
+
\ No newline at end of file
diff --git a/docker/include/bacnet/bacnetFaux/build.gradle b/docker/include/bacnet/bacnetFaux/build.gradle
new file mode 100644
index 0000000000..85657774fd
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/build.gradle
@@ -0,0 +1,41 @@
+plugins {
+ id 'java'
+}
+
+group 'bacnetFaux'
+version '1.0-SNAPSHOT'
+
+sourceCompatibility = 1.8
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+ testCompile group: 'junit', name: 'junit', version: '4.13'
+ implementation fileTree(dir: 'libs', include: ['*.jar'])
+ implementation 'com.googlecode.json-simple:json-simple:1.1.1'
+}
+
+jar {
+ manifest {
+ attributes 'Main-Class': 'Main'
+ }
+}
+
+task fatJar(type: Jar) {
+ manifest.from jar.manifest
+ classifier = 'all'
+ from {
+ configurations.runtimeClasspath.collect { it.isDirectory() ? it : zipTree(it) }
+ } {
+ exclude "META-INF/*.SF"
+ exclude "META-INF/*.DSA"
+ exclude "META-INF/*.RSA"
+ }
+ with jar
+}
+
+artifacts {
+ archives fatJar
+}
diff --git a/subset/security/security_passwords/gradle/wrapper/gradle-wrapper.jar b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.jar
old mode 100755
new mode 100644
similarity index 100%
rename from subset/security/security_passwords/gradle/wrapper/gradle-wrapper.jar
rename to docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.jar
diff --git a/validator/gradle/wrapper/gradle-wrapper.properties b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties
similarity index 92%
rename from validator/gradle/wrapper/gradle-wrapper.properties
rename to docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties
index 9e6fcc10e9..12d38de6a4 100644
--- a/validator/gradle/wrapper/gradle-wrapper.properties
+++ b/docker/include/bacnet/bacnetFaux/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
+zipStorePath=wrapper/dists
diff --git a/subset/connection/mac_oui/gradlew b/docker/include/bacnet/bacnetFaux/gradlew
similarity index 100%
rename from subset/connection/mac_oui/gradlew
rename to docker/include/bacnet/bacnetFaux/gradlew
diff --git a/subset/security/security_passwords/gradlew.bat b/docker/include/bacnet/bacnetFaux/gradlew.bat
old mode 100755
new mode 100644
similarity index 100%
rename from subset/security/security_passwords/gradlew.bat
rename to docker/include/bacnet/bacnetFaux/gradlew.bat
diff --git a/docker/include/bacnet/bacnetFaux/settings.gradle b/docker/include/bacnet/bacnetFaux/settings.gradle
new file mode 100644
index 0000000000..600831e41d
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/settings.gradle
@@ -0,0 +1,2 @@
+rootProject.name = 'bacnet'
+
diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Analog.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Analog.java
similarity index 100%
rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Analog.java
rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Analog.java
diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Binary.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Binary.java
similarity index 100%
rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/Binary.java
rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/Binary.java
diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/EntryPoint.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/EntryPoint.java
similarity index 100%
rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/EntryPoint.java
rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/EntryPoint.java
diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/JSON.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/JSON.java
similarity index 100%
rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/JSON.java
rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/JSON.java
diff --git a/subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/helper/Device.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/Device.java
similarity index 100%
rename from subset/bacnet/bacnetTests/src/main/java/FauxDeviceEngine/helper/Device.java
rename to docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/Device.java
diff --git a/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java
new file mode 100644
index 0000000000..35ef1bc27c
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/src/main/java/FauxDeviceEngine/helper/FileManager.java
@@ -0,0 +1,74 @@
+package helper;
+
+import java.io.File;
+
+public class FileManager {
+
+ private String filePath = "";
+ private String csvName = "pics";
+ private String csvExtension = ".csv";
+ private boolean debug = false;
+
+ /**
+ * Checks if pics.csv exists.
+ * @return if pics.csv exists
+ */
+ public boolean checkDevicePicCSV() {
+ String csvFolder = getCSVPath();
+ try {
+ File[] listFiles = new File(csvFolder).listFiles();
+ for (int i = 0; i < listFiles.length; i++) {
+ if (listFiles[i].isFile()) {
+ String fileName = listFiles[i].getName();
+ if (fileName.contains(csvName)
+ && fileName.endsWith(csvExtension)) {
+ System.out.println("pics.csv file found in " + csvFolder);
+ setFilePath(fileName);
+ return true;
+ }
+ }
+ }
+ String errorMessage = "pics.csv not found.\n";
+ System.err.println(errorMessage);
+ } catch (Exception e) {
+ System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder);
+ }
+ return false;
+ }
+
+ private void setFilePath(String fileName) {
+ String absolutePath = getCSVPath();
+ this.filePath = absolutePath + "/" + fileName;
+ }
+
+ public String getFilePath() {
+ return this.filePath;
+ }
+
+ /**
+ * Returns absolute path to the working directory.
+ */
+ public String getAbsolutePath() {
+ String absolutePath = "";
+ String systemPath = System.getProperty("user.dir");
+ System.out.println("system_path: " + systemPath);
+ String[] pathArr = systemPath.split("/");
+ for (int count = 0; count < pathArr.length; count++) {
+ if (pathArr[count].equals("bacnetTests")) {
+ break;
+ }
+ absolutePath += pathArr[count] + "/";
+ }
+ return absolutePath;
+ }
+
+ /**
+ * Returns directory pics.csv is located within test container
+ */
+ public String getCSVPath() {
+ if (debug) {
+ return "src/main/resources";
+ }
+ return "/config/type";
+ }
+}
diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json
new file mode 100644
index 0000000000..29324d53d3
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Fail.json
@@ -0,0 +1,184 @@
+[
+ {
+ "DeviceID": "1234",
+ "AnalogInput01": {
+ "Object_Name": "device_run_command",
+ "Device_Type": "0to1Volts",
+ "Deadband": "21f",
+ "Resolution": "0.3f",
+ "Event_Enable": "true true false",
+ "Event_State": "0",
+ "Object_Type": "0",
+ "Time_Delay_Normal": "0",
+ "Low_Limit": "0",
+ "Limit_Enable": "false false",
+ "Cov_Increment": "1.0f",
+ "Status_Flags": "false false false false",
+ "Update_Interval": "1001",
+ "Acked_Transitions": "true true true",
+ "High_Limit": "0",
+ "Notify_Type": "0",
+ "Event_Detection_Enable": "false",
+ "Min_Pres_Value": "103.78f",
+ "Max_Pres_Value": "145.89f",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Units": "64",
+ "Profile_Name": "FD-01",
+ "Out_Of_Service": "false"
+ }
+ },
+ {
+ "AnalogOutput01": {
+ "Event_State": "0",
+ "Time_Delay_Normal": "0",
+ "Reliability": "4",
+ "Resolution": "0.3f",
+ "Event_Algorithm_Inhibit": "false",
+ "Out_Of_Service": "false",
+ "Status_Flags": "false false false false",
+ "Object_Type": "1",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Low_Limit": "0",
+ "Deadband": "22f",
+ "Cov_Increment": "1.0f",
+ "Limit_Enable": "false false",
+ "Object_Name": "fan_run_speed_percentage_command",
+ "Notification_Class": "3",
+ "Units": "62",
+ "Notify_Type": "0",
+ "Device_Type": "0to1Volts",
+ "Event_Enable": "true true false",
+ "Event_Detection_Enable": "false",
+ "Event_Message_Texts": "true",
+ "High_Limit": "0",
+ "Acked_Transitions": "true true true",
+ "Priority_Array": "true",
+ "Relinquish_Default": "0.1"
+ }
+ },
+ {
+ "AnalogValue01": {
+ "Object_Name":"chilled_water_valve_percentage_setpoint",
+ "Present_Value":"5.1f",
+ "Deadband": "21.0f",
+ "Out_Of_Service": "false",
+ "Event_Message_Texts": "true",
+ "Event_State": "1",
+ "Time_Delay_Normal": "1",
+ "Low_Limit": "0",
+ "Object_Type": "2",
+ "Cov_Increment": "1.2f",
+ "Limit_Enable": "false false",
+ "Status_Flags": "false true false true",
+ "Acked_Transitions": "true false true",
+ "High_Limit": "0",
+ "Notify_Type": "0",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Notification_Class": "2",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Units": "62",
+ "Event_Algorithm_Inhibit": "false"
+ }
+ },
+ {
+ "BinaryInput01": {
+ "Object_Name":"chiller_water_valve_percentage_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "3",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Polarity": "0",
+ "Device_Type": "0to1Volts"
+ }
+ },
+ {
+ "BinaryOutput01": {
+ "Object_Name":"fun_run_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "4",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Minimum_On_Time": "0",
+ "Minimum_Off_Time": "0",
+ "Relinquish_Default": "0.0",
+ "Feedback_Value": "0",
+ "Polarity": "0",
+ "Device_Type": "0to1Volts"
+ }
+ },
+ {
+ "BinaryValue01": {
+ "Object_Name":"device1_run_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "4",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Minimum_On_Time": "0",
+ "Minimum_Off_Time": "0",
+ "Relinquish_Default": "0.0",
+ "Feedback_Value": "0"
+ }
+ }
+]
diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json
new file mode 100644
index 0000000000..678817d3ff
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/src/main/resources/Faux-Device-Pass.json
@@ -0,0 +1,185 @@
+[
+ {
+ "DeviceID": "1234",
+ "AnalogInput01": {
+ "Present_Value": "5.1",
+ "Object_Name": "device_run_command",
+ "Device_Type": "0to1Volts",
+ "Deadband": "21f",
+ "Resolution": "0.3f",
+ "Event_Enable": "true true false",
+ "Event_State": "0",
+ "Object_Type": "0",
+ "Time_Delay_Normal": "0",
+ "Low_Limit": "0",
+ "Limit_Enable": "false false",
+ "Cov_Increment": "1.0f",
+ "Status_Flags": "false false false false",
+ "Update_Interval": "1001",
+ "Acked_Transitions": "true true true",
+ "High_Limit": "0",
+ "Notify_Type": "0",
+ "Event_Detection_Enable": "false",
+ "Min_Pres_Value": "103.78f",
+ "Max_Pres_Value": "145.89f",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Units": "64",
+ "Profile_Name": "FD-01",
+ "Out_Of_Service": "false"
+ }
+ },
+ {
+ "AnalogOutput01": {
+ "Event_State": "0",
+ "Time_Delay_Normal": "0",
+ "Reliability": "4",
+ "Resolution": "0.3f",
+ "Event_Algorithm_Inhibit": "false",
+ "Out_Of_Service": "false",
+ "Status_Flags": "false false false false",
+ "Object_Type": "1",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Low_Limit": "0",
+ "Deadband": "22f",
+ "Cov_Increment": "1.0f",
+ "Limit_Enable": "false false",
+ "Object_Name": "fan_run_speed_percentage_command",
+ "Notification_Class": "3",
+ "Units": "62",
+ "Notify_Type": "0",
+ "Device_Type": "0to1Volts",
+ "Event_Enable": "true true false",
+ "Event_Detection_Enable": "false",
+ "Event_Message_Texts": "true",
+ "High_Limit": "0",
+ "Acked_Transitions": "true true true",
+ "Priority_Array": "true",
+ "Relinquish_Default": "0.1"
+ }
+ },
+ {
+ "AnalogValue01": {
+ "Object_Name":"chilled_water_valve_percentage_setpoint",
+ "Present_Value":"5.1f",
+ "Deadband": "21.0f",
+ "Out_Of_Service": "false",
+ "Event_Message_Texts": "true",
+ "Event_State": "1",
+ "Time_Delay_Normal": "1",
+ "Low_Limit": "0",
+ "Object_Type": "2",
+ "Cov_Increment": "1.2f",
+ "Limit_Enable": "false false",
+ "Status_Flags": "false true false true",
+ "Acked_Transitions": "true false true",
+ "High_Limit": "0",
+ "Notify_Type": "0",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Notification_Class": "2",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Units": "62",
+ "Event_Algorithm_Inhibit": "false"
+ }
+ },
+ {
+ "BinaryInput01": {
+ "Object_Name":"chiller_water_valve_percentage_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "3",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Polarity": "0",
+ "Device_Type": "0to1Volts"
+ }
+ },
+ {
+ "BinaryOutput01": {
+ "Object_Name":"fun_run_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "4",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Minimum_On_Time": "0",
+ "Minimum_Off_Time": "0",
+ "Relinquish_Default": "0.0",
+ "Feedback_Value": "0",
+ "Polarity": "0",
+ "Device_Type": "0to1Volts"
+ }
+ },
+ {
+ "BinaryValue01": {
+ "Object_Name":"device1_run_command",
+ "Present_Value":"5",
+ "Out_Of_Service": "false",
+ "Active_Text": "TRUE",
+ "Time_Of_State_Count_Reset": "13/05/2019",
+ "Event_Enable": "true true false",
+ "Change_Of_State_Count": "0",
+ "Event_State": "0",
+ "Object_Type": "4",
+ "Time_Delay_Normal": "0",
+ "Inactive_Text": "FALSE",
+ "Alarm_Value": "0",
+ "Acked_Transitions": "true false true",
+ "Status_Flags": "false false false false",
+ "Change_Of_State_Time": "13/05/2019",
+ "Notify_Type": "0",
+ "Time_Of_Active_Time_Reset": "13/05/2019",
+ "Event_Detection_Enable": "false",
+ "Reliability": "4",
+ "Event_Message_Texts": "true",
+ "Elapsed_Active_Time": "0",
+ "Notification_Class": "3",
+ "Description": "Faux-Device created to run inside DAQ",
+ "Event_Algorithm_Inhibit": "false",
+ "Minimum_On_Time": "0",
+ "Minimum_Off_Time": "0",
+ "Relinquish_Default": "0.0",
+ "Feedback_Value": "0"
+ }
+ }
+]
diff --git a/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv b/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv
new file mode 100644
index 0000000000..aab3985dec
--- /dev/null
+++ b/docker/include/bacnet/bacnetFaux/src/main/resources/pics.csv
@@ -0,0 +1,486 @@
+Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported,
+Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE,
+Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE,
+ ,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Device_Type,,O,TRUE,
+Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Analogue_Input,Update_Interval,,O,TRUE,
+Bacnet_Analogue_Input,Units,,R,TRUE,
+Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE,
+Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE,
+Bacnet_Analogue_Input,Resolution,,O,TRUE,
+Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Input,COV_Period,,O,TRUE,
+Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Input,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Input,High_Limit,,O,TRUE,
+Bacnet_Analogue_Input,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Input,Deadband,,O,TRUE,
+Bacnet_Analogue_Input,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE,
+Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE,
+Bacnet_Analogue_Output,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Device_Type,,O,TRUE,
+Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Analogue_Output,Units,,R,TRUE,
+Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE,
+Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE,
+Bacnet_Analogue_Output,Resolution,,O,TRUE,
+Bacnet_Analogue_Output,Priority_Array,,R,TRUE,
+Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE,
+Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Output,COV_Period,,O,TRUE,
+Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Output,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Output,High_Limit,,O,TRUE,
+Bacnet_Analogue_Output,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Output,Deadband,,O,TRUE,
+Bacnet_Analogue_Output,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Analogue_Value,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Value,Units,,O,TRUE,
+Bacnet_Analogue_Value,Priority_Array,,O,TRUE,
+Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE,
+Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE,
+Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Value,COV_Period,,O,TRUE,
+Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Value,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Value,High_Limit,,O,TRUE,
+Bacnet_Analogue_Value,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Value,Deadband,,O,TRUE,
+Bacnet_Analogue_Value,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE,
+Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Binary_Input,Present_Value,REAL,R,TRUE,
+Bacnet_Binary_Input,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Input,Device_Type,,O,TRUE,
+Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Binary_Input,Polarity,,R,TRUE,
+Bacnet_Binary_Input,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Input,Active_Text,,O,TRUE,
+Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Input,COV_Period,,O,TRUE,
+Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Input,Time_Delay,,O,TRUE,
+Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Input,Alarm_Value,,O,TRUE,
+Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE,
+Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Binary_Output,Present_Value,REAL,O,TRUE,
+Bacnet_Binary_Output,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Output,Device_Type,,O,TRUE,
+Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Binary_Output,Polarity,,O,TRUE,
+Bacnet_Binary_Output,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Output,Active_Text,,O,TRUE,
+Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE,
+Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE,
+Bacnet_Binary_Output,Priority_Array,,O,TRUE,
+Bacnet_Binary_Output,Relinquish_Default,,O,TRUE,
+Bacnet_Binary_Output,COV_Period,,O,TRUE,
+Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Output,Time_Delay,,O,TRUE,
+Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Output,Feedback_Value,,O,TRUE,
+Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Binary_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Binary_Value,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Binary_Value,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Value,Active_Text,,O,TRUE,
+Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE,
+Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE,
+Bacnet_Binary_Value,Priority_Array,,O,TRUE,
+Bacnet_Binary_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Binary_Value,COV_Period,,O,TRUE,
+Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Value,Time_Delay,,O,TRUE,
+Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Value,Alarm_Value,,O,TRUE,
+Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Calendar,Object_Name,CharacterString,O,TRUE,
+Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Calendar,Present_Value,REAL,O,TRUE,
+Bacnet_Calendar,Description,CharacterString,O,TRUE,
+Bacnet_Calendar,Date_List,,O,TRUE,
+Bacnet_Calendar,Time_To_Next_State,,O,TRUE,
+Bacnet_Calendar,Next_State,,O,TRUE,
+Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE,
+Device,Object_Name,CharacterString,W,TRUE,
+Device,Object_Type,BACnetObjectType,R,TRUE,
+Device,System_Status,,R,TRUE,
+Device,Vendor_Name,,R,TRUE,
+Device,Vendor_Identifier,,R,TRUE,
+Device,Model_Name,,R,TRUE,
+Device,Firmware_Revision,,R,TRUE,
+Device,Application_Software_Version,,R,TRUE,
+Device,Location,,O,TRUE,
+Device,Description,CharacterString,O,TRUE,
+Device,Protocol_Version,,R,TRUE,
+Device,Protocol_Revision,,R,TRUE,
+Device,Protocol_Services_Supported,,R,TRUE,
+Device,Protocol_Object_Types_Supported,,R,TRUE,
+Device,Object_List,,R,TRUE,
+Device,Max_APDU_Length_Accepted,,R,TRUE,
+Device,Segmentation_Supported,,R,TRUE,
+Device,Max_Segments_Accepted,,O,TRUE,
+Device,Local_Date,,O,TRUE,
+Device,Local_Time,,O,TRUE,
+Device,UTC_Offset,,O,TRUE,
+Device,Daylight_Savings_Status,,O,TRUE,
+Device,Apdu_Segment_Timeout,,O,TRUE,
+Device,APDU_Timeout,,W,TRUE,
+Device,Number_Of_APDU_Retries,,W,TRUE,
+Device,Time_Synchronization_Recipients,,O,TRUE,
+Device,Device_Address_Binding,,R,TRUE,
+Device,Database_Revision,,R,TRUE,
+Device,Configuration_Files,,O,TRUE,
+Device,Last_Restore_Time,,O,TRUE,
+Device,Backup_Failure_Timeout,,O,TRUE,
+Device,Backup_Preparation_Time,,O,TRUE,
+Device,Restore_Preparation_Time,,O,TRUE,
+Device,Restore_Completion_Time,,O,TRUE,
+Device,Backup_And_Restore_State,,O,TRUE,
+Device,Active_COV_Subscriptions,,O,TRUE,
+Device,Last_Restart_Reason,,O,TRUE,
+Device,Time_Of_Device_Restart,,O,TRUE,
+Device,Restart_Notification_Recipients,,O,TRUE,
+Device,Utc_Time_Synchronization_Recipients,,O,TRUE,
+Device,Max_Master,,O,TRUE,
+Device,Max_Info_Frames,,O,TRUE,
+Device,Time_Synchronization_Interval,,O,TRUE,
+Device,Align_Intervals,,O,TRUE,
+Device,Interval_Offset,,O,TRUE,
+Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Event_Enrollment,Object_Name,CharacterString,O,TRUE,
+Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE,
+Event_Enrollment,Description,CharacterString,O,TRUE,
+Event_Enrollment,Event_Type,,O,TRUE,
+Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE,
+Event_Enrollment,Event_Parameters,,O,TRUE,
+Event_Enrollment,Object_Property_Reference,,O,TRUE,
+Event_Enrollment,Event_State,BACnetEventState,O,TRUE,
+Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Event_Enrollment,Notification_Class,Unsigned,O,TRUE,
+Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Event_Enrollment,Time_Delay_Normal,,O,TRUE,
+Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE,
+Event_Enrollment,Reliability,BACnetReliability,O,TRUE,
+Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_File,Object_Name,CharacterString,O,TRUE,
+Bacnet_File,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_File,Description,CharacterString,O,TRUE,
+Bacnet_File,File_Type,,O,TRUE,
+Bacnet_File,File_Size,,O,TRUE,
+Bacnet_File,Modification_Date,,O,TRUE,
+Bacnet_File,Archive,,O,TRUE,
+Bacnet_File,Read_Only,,O,TRUE,
+Bacnet_File,File_Access_Method,,O,TRUE,
+Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Loop,Object_Name,CharacterString,O,TRUE,
+Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Loop,Present_Value,REAL,O,TRUE,
+Bacnet_Loop,Description,CharacterString,O,TRUE,
+Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Loop,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Loop,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Loop,Update_Interval,,O,TRUE,
+Bacnet_Loop,Output_Units,,O,TRUE,
+Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Value,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Units,,O,TRUE,
+Bacnet_Loop,Setpoint_Reference,,O,TRUE,
+Bacnet_Loop,Setpoint,,O,TRUE,
+Bacnet_Loop,Action,,O,TRUE,
+Bacnet_Loop,Proportional_Constant,,O,TRUE,
+Bacnet_Loop,Proportional_Constant_Units,,O,TRUE,
+Bacnet_Loop,Integral_Constant,,O,TRUE,
+Bacnet_Loop,Integral_Constant_Units,,O,TRUE,
+Bacnet_Loop,Derivative_Constant,,O,TRUE,
+Bacnet_Loop,Derivative_Constant_Units,,O,TRUE,
+Bacnet_Loop,Bias,,O,TRUE,
+Bacnet_Loop,Maximum_Output,,O,TRUE,
+Bacnet_Loop,Minimum_Output,,O,TRUE,
+Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE,
+Bacnet_Loop,LoopDeadband,,O,TRUE,
+Bacnet_Loop,Saturation_Time,,O,TRUE,
+Bacnet_Loop,COV_Increment,REAL,O,TRUE,
+Bacnet_Loop,COV_Period,,O,TRUE,
+Bacnet_Loop,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Loop,Ramp_Time,,O,TRUE,
+Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE,
+Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE,
+Bacnet_Loop,Time_Delay,,O,TRUE,
+Bacnet_Loop,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Loop,Error_Limit,,O,TRUE,
+Bacnet_Loop,Deadband,,O,TRUE,
+Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Loop,Time_Delay_Normal,,O,TRUE,
+Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE,
+Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Device_Type,,O,TRUE,
+Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Input,Number_of_States,,O,TRUE,
+Bacnet_Multi-state_Input,State_Text,,O,TRUE,
+Bacnet_Multi-state_Input,COV_Period,,O,TRUE,
+Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Multi-state_Input,Time_Delay,,O,TRUE,
+Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE,
+Bacnet_Multi-state_Input,Fault_Values,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Value,Number_of_States,,O,TRUE,
+Bacnet_Multi-state_Value,State_Text,,O,TRUE,
+Bacnet_Multi-state_Value,Priority_Array,,O,TRUE,
+Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Multi-state_Value,COV_Period,,O,TRUE,
+Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Multi-state_Value,Time_Delay,,O,TRUE,
+Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE,
+Bacnet_Multi-state_Value,Fault_Values,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Program,Object_Name,CharacterString,O,TRUE,
+Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Program,Description,CharacterString,O,TRUE,
+Bacnet_Program,Program_State,,O,TRUE,
+Bacnet_Program,Program_Change,,O,TRUE,
+Bacnet_Program,Description_Of_Halt,,O,TRUE,
+Bacnet_Program,Reason_For_Halt,,O,TRUE,
+Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Program,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Notification,Object_Name,CharacterString,O,TRUE,
+Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Notification,Description,CharacterString,O,TRUE,
+Bacnet_Notification,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Notification,Priority,,O,TRUE,
+Bacnet_Notification,Ack_Required,,O,TRUE,
+Bacnet_Notification,Recipient_List,,O,TRUE,
+Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Schedule,Object_Name,CharacterString,O,TRUE,
+Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Schedule,Description,CharacterString,O,TRUE,
+Bacnet_Schedule,Present_Value,REAL,O,TRUE,
+Bacnet_Schedule,Effective_Period,,O,TRUE,
+Bacnet_Schedule,Weekly_Schedule,,O,TRUE,
+Bacnet_Schedule,Exception_Schedule,,O,TRUE,
+Bacnet_Schedule,Schedule_Default,,O,TRUE,
+Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE,
+Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE,
+Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Schedule,Time_To_Next_State,,O,TRUE,
+Bacnet_Schedule,Next_State,,O,TRUE,
+Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE,
+Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Trend_Log,Description,CharacterString,O,TRUE,
+Bacnet_Trend_Log,Enable,,O,TRUE,
+Bacnet_Trend_Log,Start_Time,,O,TRUE,
+Bacnet_Trend_Log,Stop_Time,,O,TRUE,
+Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE,
+Bacnet_Trend_Log,Log_Interval,,O,TRUE,
+Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE,
+Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE,
+Bacnet_Trend_Log,Stop_When_Full,,O,TRUE,
+Bacnet_Trend_Log,Buffer_Size,,O,TRUE,
+Bacnet_Trend_Log,Log_Buffer,,O,TRUE,
+Bacnet_Trend_Log,Record_Count,,O,TRUE,
+Bacnet_Trend_Log,Total_Record_Count,,O,TRUE,
+Bacnet_Trend_Log,Logging_Type,,O,TRUE,
+Bacnet_Trend_Log,Align_Intervals,,O,TRUE,
+Bacnet_Trend_Log,Interval_Offset,,O,TRUE,
+Bacnet_Trend_Log,Trigger,,O,TRUE,
+Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Trend_Log,Notification_Threshold,,O,TRUE,
+Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE,
+Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE,
+Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Trend_Log,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
\ No newline at end of file
diff --git a/docker/include/bin/start_faux b/docker/include/bin/start_faux
index 94c9711ab3..016a60bb7a 100755
--- a/docker/include/bin/start_faux
+++ b/docker/include/bin/start_faux
@@ -68,6 +68,13 @@ elif [ -n "${options[passwordfail]}" ]; then
service ssh start
fi
+# security.nmap.http faux device setup
+if [ -n "${options[ohttp]}" ]; then
+ cp /root/nginx/nginxpass.conf /etc/nginx/nginx.conf
+ sed -i 's/listen 80;/listen 12345; listen 54321;/' /etc/nginx/nginx.conf
+ service nginx start
+fi
+
# To capture all the data in/out of the faux device for debugging, uncomment
# the following lines. The pcap file will end up in inst/faux/{hostname}.pcap
# on the DAQ controller.
@@ -87,7 +94,11 @@ if [ -n "${options[xdhcp]}" ]; then
fi
ip addr show $intf_name
- (while true; do ping -c 1 10.20.0.1; sleep 5; done) &
+ if [ -n "${options[opendns]}" ]; then
+ dhcp_dns=8.8.8.8
+ echo nameserver ${dhcp_dns#DNS=} > /etc/resolv.conf
+ route add default gw 10.20.255.254 $intf_name # fixed IP in start_networking
+ fi
else
echo Running dhclient...
dhclient -v
@@ -126,19 +137,36 @@ if [ -n "${options[telnet]}" ]; then
(while true; do echo Telnet `hostname`; nc -nvlt -p 23 -e `which hostname`; done) &
fi
+if [ -n "${options[ssh]}" ]; then
+ echo Starting SSH server
+ /usr/local/sbin/sshd
+elif [ -n "${options[sshv1]}" ]; then
+ echo Starting SSHv1 server
+ echo 'Protocol 1' >> /usr/local/etc/sshd_config
+ /usr/local/sbin/sshd
+fi
+
if [ -n "${options[bacnet]}" ]; then
echo Starting bacnet loop device.
- java -cp bacnetTests/build/libs/bacnet-1.0-SNAPSHOT-all.jar \
+ java -cp bacnetFaux/build/libs/bacnet-1.0-SNAPSHOT-all.jar \
FauxDeviceEngine.EntryPoint $local_ip $broadcast_ip "Faux-Device-Pass.json" &
elif [ -n "${options[bacnetfail]}" ]; then
echo Starting bacnet loop device.
- java -cp bacnetTests/build/libs/bacnet-1.0-SNAPSHOT-all.jar \
+ java -cp bacnetFaux/build/libs/bacnet-1.0-SNAPSHOT-all.jar \
FauxDeviceEngine.EntryPoint $local_ip $broadcast_ip "Faux-Device-Fail.json" &
fi
-if [ -n "${options[ntp_client]}" ]; then
- echo Starting ntp client.
- java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar "time.google.com" "123" "3" &
+# NTPv4 query to the NTP server learnt from DHCP.
+# NTPv3 query to the IP of time.google.com (since resolv.conf is modified by other tests)
+if [ -n "${options[ntpv4]}" ]; then
+ dhcp_ntp=$(fgrep NTPSERVERS= /run/ntpdate.dhcp)
+ ntp_server=`echo $dhcp_ntp | cut -d "'" -f 2`
+ echo Transmitting NTP query to $ntp_server using NTPv4
+ java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar $ntp_server 123 4 2 > ntp.log &
+elif [ -n "${options[ntpv3]}" ]; then
+ STATIC_NTP_SERVER=216.239.35.8
+ echo Transmitting NTP query to $STATIC_NTP_SERVER using NTPv3
+ java -jar NTPClient/build/libs/NTPClient-1.0-SNAPSHOT.jar $STATIC_NTP_SERVER 123 3 2 > ntp.log &
fi
# ntp_pass queries the NTP server learnt from DHCP. ntp_fail sends to time.google.com
@@ -155,16 +183,16 @@ if [ -n "${options[ntp_pass]}" -o -n "${options[ntp_fail]}" ]; then
fi
echo Transmitting NTP query to $ntp_server
ntpdate -q -p 1 $ntp_server
- sleep 10
+ sleep 5
done) &
fi
if [ -n "${options[broadcast_client]}" ]; then
- echo Starting broatcast client.
- cip_port=41794
+ echo Starting broadcast client.
+ port=41794
cycle_seconds=20
duration_seconds=360
- python TransportClient/client.py $broadcast_ip $cip_port broadcast $duration_seconds $cycle_seconds &
+ python TransportClient/client.py $broadcast_ip $port broadcast $duration_seconds $cycle_seconds &
fi
if [ -n "${options[discover]}" ]; then
@@ -172,11 +200,6 @@ if [ -n "${options[discover]}" ]; then
bin/bacnet_discover loop &
fi
-if [ -n "${options[curl]}" ]; then
- echo Starting curl loop.
- (while true; do curl -o - http://google.com; sleep 1; done) &
-fi
-
if [ -n "${options[brute]}" ]; then
echo Starting brute server.
(python pentests/brute_server.py bad 10000; echo Brute done.) &
@@ -201,13 +224,24 @@ fi
if [ -n "${options[pubber]}" ]; then
echo Running cloud pubber tool...
(while date; do
- pubber/bin/run
- # Do https query in case pubber is not configured, for testing port 443
- curl -o /dev/null https://google.com
- sleep 30
+ pubber/bin/run local/pubber.json
+ # Do https query in case pubber is not configured, for testing port 443
+ curl -o /dev/null https://google.com
+ sleep 30
done) &
fi
+# Periodically sends ARP packets
+if [ -z "${options[xarp]}" ]; then
+ echo Starting arp send loop.
+ (while true; do arpsend -D -e 10.20.254.254 $intf_name; sleep 2; done) &
+fi
+
+if [ -n "${options[curl]}" ]; then
+ echo Starting curl loop.
+ (while true; do curl -o - http://google.com; sleep 1; done) &
+fi
+
conf_file=/config/start/start_faux.sh
if [ -f $conf_file ]; then
echo Loading $conf_file...
diff --git a/docker/include/bin/test_ping b/docker/include/bin/test_ping
index 99bc957d83..b66393a079 100755
--- a/docker/include/bin/test_ping
+++ b/docker/include/bin/test_ping
@@ -88,8 +88,8 @@ echo Done with basic connectivity tests | tee -a $MONO_LOG
echo Checking startup NTP
ntp_target=${TARGET_IP%.*}.2
-ntp_request=`tcpdump -env -c 1 -r /scans/startup.pcap dst port 123 | wc -l`
-ntp_proper=`tcpdump -env -c 1 -r /scans/startup.pcap dst port 123 and dst host $ntp_target | wc -l`
+ntp_request=`tcpdump -env -c 1 -r /scans/monitor.pcap dst port 123 | wc -l`
+ntp_proper=`tcpdump -env -c 1 -r /scans/monitor.pcap dst port 123 and dst host $ntp_target | wc -l`
if [ "$ntp_request" == 0 ]; then
ntp_result=skip
ntp_summary="No NTP traffic detected"
diff --git a/validator/.idea/codeStyles/codeStyleConfig.xml b/docker/include/network/NTPClient/.idea/codeStyles/codeStyleConfig.xml
similarity index 100%
rename from validator/.idea/codeStyles/codeStyleConfig.xml
rename to docker/include/network/NTPClient/.idea/codeStyles/codeStyleConfig.xml
diff --git a/subset/network/NTPClient/.idea/gradle.xml b/docker/include/network/NTPClient/.idea/gradle.xml
similarity index 100%
rename from subset/network/NTPClient/.idea/gradle.xml
rename to docker/include/network/NTPClient/.idea/gradle.xml
diff --git a/subset/network/NTPClient/.idea/jarRepositories.xml b/docker/include/network/NTPClient/.idea/jarRepositories.xml
similarity index 100%
rename from subset/network/NTPClient/.idea/jarRepositories.xml
rename to docker/include/network/NTPClient/.idea/jarRepositories.xml
diff --git a/subset/network/NTPClient/.idea/misc.xml b/docker/include/network/NTPClient/.idea/misc.xml
similarity index 100%
rename from subset/network/NTPClient/.idea/misc.xml
rename to docker/include/network/NTPClient/.idea/misc.xml
diff --git a/subset/network/NTPClient/.idea/vcs.xml b/docker/include/network/NTPClient/.idea/vcs.xml
similarity index 100%
rename from subset/network/NTPClient/.idea/vcs.xml
rename to docker/include/network/NTPClient/.idea/vcs.xml
diff --git a/subset/network/NTPClient/build.gradle b/docker/include/network/NTPClient/build.gradle
similarity index 100%
rename from subset/network/NTPClient/build.gradle
rename to docker/include/network/NTPClient/build.gradle
diff --git a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.jar b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.jar
similarity index 100%
rename from subset/network/NTPClient/gradle/wrapper/gradle-wrapper.jar
rename to docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.jar
diff --git a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties
similarity index 92%
rename from subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties
rename to docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties
index 38c1d48d19..12d38de6a4 100644
--- a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.properties
+++ b/docker/include/network/NTPClient/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
diff --git a/subset/network/NTPClient/gradlew b/docker/include/network/NTPClient/gradlew
similarity index 100%
rename from subset/network/NTPClient/gradlew
rename to docker/include/network/NTPClient/gradlew
diff --git a/subset/network/NTPClient/gradlew.bat b/docker/include/network/NTPClient/gradlew.bat
similarity index 100%
rename from subset/network/NTPClient/gradlew.bat
rename to docker/include/network/NTPClient/gradlew.bat
diff --git a/subset/network/NTPClient/settings.gradle b/docker/include/network/NTPClient/settings.gradle
similarity index 100%
rename from subset/network/NTPClient/settings.gradle
rename to docker/include/network/NTPClient/settings.gradle
diff --git a/subset/network/NTPClient/src/main/java/META-INF/MANIFEST.MF b/docker/include/network/NTPClient/src/main/java/META-INF/MANIFEST.MF
similarity index 100%
rename from subset/network/NTPClient/src/main/java/META-INF/MANIFEST.MF
rename to docker/include/network/NTPClient/src/main/java/META-INF/MANIFEST.MF
diff --git a/docker/include/network/NTPClient/src/main/java/Main.java b/docker/include/network/NTPClient/src/main/java/Main.java
new file mode 100644
index 0000000000..0bfe635d31
--- /dev/null
+++ b/docker/include/network/NTPClient/src/main/java/Main.java
@@ -0,0 +1,92 @@
+import java.io.IOException;
+import java.net.DatagramPacket;
+import java.net.DatagramSocket;
+import java.net.InetAddress;
+import java.text.DecimalFormat;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+public class Main {
+
+ static final double SECONDS_FROM_01_01_1900_TO_01_01_1970 = 2208988800.0;
+ static String serverName = "time.google.com";
+ static byte version = 3;
+ static int port = 123;
+ static int timerPeriod = 10;
+ static byte leapIndicator = 3;
+
+ /**
+ * Constructs and sends NTP packets to target NTP server.
+ */
+
+ public static void main(String[] args) {
+ if (args.length < 2) {
+ throw new IllegalArgumentException("Usage: server_name port version timerPeriod");
+ }
+ serverName = args[0];
+ port = Integer.parseInt(args[1]);
+ version = Byte.parseByte(args[2]);
+ timerPeriod = Integer.parseInt(args[3]);
+
+ Runnable senderRunnable = new Runnable() {
+ @Override
+ public void run() {
+ try {
+ sendRequest();
+ } catch (IOException e) {
+ System.out.println(e.getMessage());
+ }
+ }
+ };
+ ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
+ executor.scheduleAtFixedRate(senderRunnable, 0, timerPeriod, TimeUnit.SECONDS);
+ }
+
+ private static void sendRequest() throws IOException {
+ DatagramSocket socket = new DatagramSocket();
+ InetAddress address = InetAddress.getByName(serverName);
+ byte[] buf = new NtpMessage(SECONDS_FROM_01_01_1900_TO_01_01_1970, leapIndicator, version).toByteArray();
+ DatagramPacket packet = new DatagramPacket(buf, buf.length, address, port);
+
+ // Set the transmit timestamp *just* before sending the packet
+ NtpMessage.encodeTimestamp(packet.getData(), 40,
+ (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970);
+ sendPacket(socket, packet, buf);
+ }
+
+ private static void sendPacket(DatagramSocket socket, DatagramPacket packet, byte[] buf) throws IOException {
+ socket.send(packet);
+
+ // Get response
+ System.out.println("NTP request sent, waiting for response...\n");
+ packet = new DatagramPacket(buf, buf.length);
+ socket.receive(packet);
+
+ // Display response
+ System.out.println("NTP server: " + serverName);
+
+ // Process response
+ NtpMessage msg = new NtpMessage(packet.getData());
+
+ // Immediately record the incoming timestamp
+ double destinationTimestamp =
+ (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970;
+ System.out.println(msg.toString());
+ System.out.println("Dest. timestamp: "
+ + NtpMessage.timestampToString(destinationTimestamp));
+
+ double roundTripDelay = (destinationTimestamp - msg.originateTimestamp)
+ - (msg.transmitTimestamp - msg.receiveTimestamp);
+ System.out.println("Round-trip delay: "
+ + new DecimalFormat("0.00").format(roundTripDelay * 1000) + " ms");
+ double localClockOffset =
+ ((msg.receiveTimestamp - msg.originateTimestamp)
+ + (msg.transmitTimestamp - destinationTimestamp)) / 2;
+ System.out.println("Local clock offset: "
+ + new DecimalFormat("0.00").format(localClockOffset * 1000) + " ms");
+ if (localClockOffset * 1000 < 128) {
+ leapIndicator = 0;
+ }
+ }
+}
diff --git a/docker/include/network/NTPClient/src/main/java/NtpMessage.java b/docker/include/network/NTPClient/src/main/java/NtpMessage.java
new file mode 100644
index 0000000000..a441b14959
--- /dev/null
+++ b/docker/include/network/NTPClient/src/main/java/NtpMessage.java
@@ -0,0 +1,203 @@
+import java.text.DecimalFormat;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+public class NtpMessage {
+ public byte leapIndicator = 3;
+ public byte version = 0;
+ public byte mode = 0;
+ public short stratum = 0;
+ public byte pollInterval = 0;
+ public byte precision = 0;
+ public double rootDelay = 0;
+ public double rootDispersion = 0;
+ public byte[] referenceIdentifier = {0, 0, 0, 0};
+ public double referenceTimestamp = 0;
+ public double originateTimestamp = 0;
+ public double receiveTimestamp = 0;
+ public double transmitTimestamp = 0;
+
+ /**
+ * Constructs a new NtpMessage from an array of bytes.
+ */
+ public NtpMessage(byte[] array) {
+ leapIndicator = (byte)((array[0] >> 6) & 0x3);
+ version = (byte)((array[0] >> 3) & 0x7);
+ mode = (byte)(array[0] & 0x7);
+ stratum = unsignedByteToShort(array[1]);
+ pollInterval = array[2];
+ precision = array[3];
+
+ rootDelay = (array[4] * 256.0)
+ + unsignedByteToShort(array[5])
+ + (unsignedByteToShort(array[6]) / 256.0)
+ + (unsignedByteToShort(array[7]) / 65536.0);
+
+ rootDispersion = (unsignedByteToShort(array[8]) * 256.0)
+ + unsignedByteToShort(array[9])
+ + (unsignedByteToShort(array[10]) / 256.0)
+ + (unsignedByteToShort(array[11]) / 65536.0);
+
+ referenceIdentifier[0] = array[12];
+ referenceIdentifier[1] = array[13];
+ referenceIdentifier[2] = array[14];
+ referenceIdentifier[3] = array[15];
+
+ referenceTimestamp = decodeTimestamp(array, 16);
+ originateTimestamp = decodeTimestamp(array, 24);
+ receiveTimestamp = decodeTimestamp(array, 32);
+ transmitTimestamp = decodeTimestamp(array, 40);
+ }
+
+ /**
+ * Constructs a new NtpMessage in client -> server mode, and sets the
+ * transmit timestamp to the current time.
+ */
+ public NtpMessage(double secondsDiff, byte leapIndicator, byte version) {
+ this.mode = 3;
+ this.leapIndicator = leapIndicator;
+ this.version = version;
+ this.transmitTimestamp = (System.currentTimeMillis() / 1000.0) + secondsDiff;
+ }
+
+ /**
+ * This method constructs the data bytes of a raw NTP packet.
+ */
+ public byte[] toByteArray() {
+ byte[] p = new byte[48];
+
+ p[0] = (byte)(leapIndicator << 6 | version << 3 | mode);
+ p[1] = (byte) stratum;
+ p[2] = (byte) pollInterval;
+ p[3] = (byte) precision;
+
+ // root delay is a signed 16.16-bit FP, in Java an int is 32-bits
+ int l = (int)(rootDelay * 65536.0);
+ p[4] = (byte)((l >> 24) & 0xFF);
+ p[5] = (byte)((l >> 16) & 0xFF);
+ p[6] = (byte)((l >> 8) & 0xFF);
+ p[7] = (byte)(l & 0xFF);
+
+ // root dispersion is an unsigned 16.16-bit FP, in Java there are no
+ // unsigned primitive types, so we use a long which is 64-bits
+ long ul = (long)(rootDispersion * 65536.0);
+ p[8] = (byte)((ul >> 24) & 0xFF);
+ p[9] = (byte)((ul >> 16) & 0xFF);
+ p[10] = (byte)((ul >> 8) & 0xFF);
+ p[11] = (byte)(ul & 0xFF);
+
+ p[12] = referenceIdentifier[0];
+ p[13] = referenceIdentifier[1];
+ p[14] = referenceIdentifier[2];
+ p[15] = referenceIdentifier[3];
+
+ encodeTimestamp(p, 16, referenceTimestamp);
+ encodeTimestamp(p, 24, originateTimestamp);
+ encodeTimestamp(p, 32, receiveTimestamp);
+ encodeTimestamp(p, 40, transmitTimestamp);
+
+ return p;
+ }
+
+ /**
+ * Returns a string representation of a NtpMessage.
+ */
+ public String toString() {
+ String precisionStr =
+ new DecimalFormat("0.#E0").format(Math.pow(2, precision));
+
+ return "Leap indicator: " + leapIndicator + "\n"
+ + "Version: " + version + "\n"
+ + "Mode: " + mode + "\n"
+ + "Stratum: " + stratum + "\n"
+ + "Poll: " + pollInterval + "\n"
+ + "Precision: " + precision + " (" + precisionStr + " seconds)\n"
+ + "Root delay: " + new DecimalFormat("0.00").format(rootDelay * 1000) + " ms\n"
+ + "Root dispersion: " + new DecimalFormat("0.00").format(rootDispersion * 1000) + " ms\n"
+ + "Reference identifier: " + referenceIdentifierToString(referenceIdentifier, stratum, version) + "\n"
+ + "Reference timestamp: " + timestampToString(referenceTimestamp) + "\n"
+ + "Originate timestamp: " + timestampToString(originateTimestamp) + "\n"
+ + "Receive timestamp: " + timestampToString(receiveTimestamp) + "\n"
+ + "Transmit timestamp: " + timestampToString(transmitTimestamp);
+ }
+
+ /**
+ * Converts an unsigned byte to a short. By default, Java assumes that
+ * a byte is signed.
+ */
+ public static short unsignedByteToShort(byte b) {
+ if ((b & 0x80) == 0x80) {
+ return (short)(128 + (b & 0x7f));
+ } else {
+ return (short) b;
+ }
+ }
+
+ /**
+ * Will read 8 bytes of a message beginning at pointer
+ * and return it as a double, according to the NTP 64-bit timestamp
+ * format.
+ */
+ public static double decodeTimestamp(byte[] array, int pointer) {
+ double r = 0.0;
+
+ for (int i = 0; i < 8; i++) {
+ r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8);
+ }
+
+ return r;
+ }
+
+ /**
+ * Encodes a timestamp in the specified position in the message.
+ */
+ public static void encodeTimestamp(byte[] array, int pointer, double timestamp) {
+ // Converts a double into a 64-bit fixed point
+ for (int i = 0; i < 8; i++) {
+ // 2^24, 2^16, 2^8, .. 2^-32
+ double base = Math.pow(2, (3 - i) * 8);
+ // Capture byte value
+ array[pointer + i] = (byte)(timestamp / base);
+ // Subtract captured value from remaining total
+ timestamp = timestamp - (double)(unsignedByteToShort(array[pointer + i]) * base);
+ }
+ array[7] = (byte)(Math.random() * 255.0);
+ }
+
+ /**
+ * Returns a timestamp (number of seconds since 00:00 1-Jan-1900) as a
+ * formatted date/time string.
+ */
+ public static String timestampToString(double timestamp) {
+ if (timestamp == 0) {
+ return "0";
+ }
+ double utc = timestamp - (2208988800.0);
+ long ms = (long)(utc * 1000.0);
+ String date = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss").format(new Date(ms));
+ double fraction = timestamp - ((long) timestamp);
+ String fractionSting = new DecimalFormat(".000000").format(fraction);
+ return date + fractionSting;
+ }
+
+ /**
+ * Returns a string representation of a reference identifier according
+ * to the rules set out in RFC 2030.
+ */
+ public static String referenceIdentifierToString(byte[] ref, short stratum, byte version) {
+ if (stratum == 0 || stratum == 1) {
+ return new String(ref);
+ } else if (version == 3) {
+ return unsignedByteToShort(ref[0]) + "."
+ + unsignedByteToShort(ref[1]) + "."
+ + unsignedByteToShort(ref[2]) + "."
+ + unsignedByteToShort(ref[3]);
+ } else if (version == 4) {
+ return "" + ((unsignedByteToShort(ref[0]) / 256.0)
+ + (unsignedByteToShort(ref[1]) / 65536.0)
+ + (unsignedByteToShort(ref[2]) / 16777216.0)
+ + (unsignedByteToShort(ref[3]) / 4294967296.0));
+ }
+ return "";
+ }
+}
diff --git a/subset/network/TransportClient/client.py b/docker/include/network/TransportClient/client.py
similarity index 59%
rename from subset/network/TransportClient/client.py
rename to docker/include/network/TransportClient/client.py
index e72b83ec2f..15a01d951c 100644
--- a/subset/network/TransportClient/client.py
+++ b/docker/include/network/TransportClient/client.py
@@ -1,3 +1,10 @@
+"""
+ Used within the faux device to start a client which will send out broadcast packets.
+
+ Usage:
+ python TransportClient/client.py $broadcast_ip $port broadcast $duration_seconds $cycle_seconds
+"""
+
import socket, sys, time
arguments = sys.argv
@@ -7,19 +14,25 @@
transport_type = str(arguments[3])
duration_seconds = int(arguments[4])
cycle_seconds = int(arguments[5])
-message = "Fried lizards taste like chicken"
+message = "Fried lizards taste like chicken!"
-def broadcast_setup_socket():
+
+def get_broadcast_socket():
client = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
client.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
return client
+
def send_message(message, transport_type):
+ client = None
+
if transport_type == 'broadcast':
- client = broadcast_setup_socket()
- sent = client.sendto(message, (udp_ip_address, udp_port))
+ client = get_broadcast_socket()
+
+ client.sendto(message, (udp_ip_address, udp_port))
+
-while(duration_seconds > 0):
+while duration_seconds > 0:
print('{t} to {a}'.format(t=transport_type, a=udp_ip_address))
send_message(message, transport_type)
time.sleep(cycle_seconds)
diff --git a/docker/include/networking_scripts/autorestart_dnsmasq b/docker/include/network/scripts/autorestart_dnsmasq
similarity index 100%
rename from docker/include/networking_scripts/autorestart_dnsmasq
rename to docker/include/network/scripts/autorestart_dnsmasq
diff --git a/docker/include/network/scripts/change_dhcp_range b/docker/include/network/scripts/change_dhcp_range
new file mode 100755
index 0000000000..0c45cd6bae
--- /dev/null
+++ b/docker/include/network/scripts/change_dhcp_range
@@ -0,0 +1,24 @@
+#!/bin/bash -e
+#
+# Dynamically change DHCP lease range, requires killing and restarting
+# dnsmasq as per documentation (SIGHUP does not reload configuration file).
+LOCAL_IF=${LOCAL_IF:-$HOSTNAME-eth0}
+
+range_start=$1
+range_end=$2
+prefix_len=$3
+if [ -z $range_start -o -z $range_end -o -z $prefix_len ]; then
+ echo "Usage: change_dhcp_range range_start range_end prefix_len"
+ exit 1
+fi
+while [ $(cat /etc/dnsmasq.conf | egrep "^dhcp-range=" | wc -l) == 0 ]; do
+ sleep 1
+done
+ip addr add $range_start/$prefix_len dev $LOCAL_IF || true
+original=$(cat /etc/dnsmasq.conf | egrep "^dhcp-range=" | head -1)
+lease=$(echo $original | cut -d',' -f 3)
+if [ -n "lease" ]; then
+ lease=",$lease"
+fi
+new="dhcp-range=$range_start,$range_end$lease"
+flock /etc/dnsmasq.conf sed -i s/$original/$new/ /etc/dnsmasq.conf
\ No newline at end of file
diff --git a/docker/include/networking_scripts/change_dhcp_response_time b/docker/include/network/scripts/change_dhcp_response_time
similarity index 100%
rename from docker/include/networking_scripts/change_dhcp_response_time
rename to docker/include/network/scripts/change_dhcp_response_time
diff --git a/docker/include/networking_scripts/change_lease_time b/docker/include/network/scripts/change_lease_time
similarity index 72%
rename from docker/include/networking_scripts/change_lease_time
rename to docker/include/network/scripts/change_lease_time
index 306e985604..0cb8986c8a 100755
--- a/docker/include/networking_scripts/change_lease_time
+++ b/docker/include/network/scripts/change_lease_time
@@ -7,10 +7,10 @@ if [ -z $lease ]; then
echo "Lease time not defined."
exit 1
fi
-while [ $(cat /etc/dnsmasq.conf | grep dhcp-range=10.20 | wc -l) == 0 ]; do
+while [ $(cat /etc/dnsmasq.conf | grep "^dhcp-range=" | wc -l) == 0 ]; do
sleep 1
done
-original=$(cat /etc/dnsmasq.conf | grep dhcp-range=10.20 | head -1)
+original=$(cat /etc/dnsmasq.conf | grep "^dhcp-range=" | head -1)
new="$(echo $original | cut -d',' -f 1,2),$lease"
flock /etc/dnsmasq.conf sed -i s/$original/$new/ /etc/dnsmasq.conf
diff --git a/docker/include/networking_scripts/new_ip b/docker/include/network/scripts/new_ip
similarity index 100%
rename from docker/include/networking_scripts/new_ip
rename to docker/include/network/scripts/new_ip
diff --git a/docker/include/networking_scripts/start_networking b/docker/include/network/scripts/start_networking
similarity index 94%
rename from docker/include/networking_scripts/start_networking
rename to docker/include/network/scripts/start_networking
index a55e24b052..331585c2fe 100755
--- a/docker/include/networking_scripts/start_networking
+++ b/docker/include/network/scripts/start_networking
@@ -40,6 +40,7 @@ if ! ip addr show dev $LOCAL_IF | fgrep -q 'inet '; then
ip addr add 10.20.$subnet.1/16 dev $LOCAL_IF
fi
+ip addr add 10.20.255.254/16 dev $LOCAL_IF #For static ip devices' default gateway IP
echo dhcp-host=*,ignore >> /etc/dnsmasq.conf
# Start the NTP server
diff --git a/subset/pentests/brute_server.py b/docker/include/pentests/brute_server.py
similarity index 100%
rename from subset/pentests/brute_server.py
rename to docker/include/pentests/brute_server.py
diff --git a/subset/security/nginx-site/html/index.html b/docker/include/security/nginx-site/html/index.html
similarity index 100%
rename from subset/security/nginx-site/html/index.html
rename to docker/include/security/nginx-site/html/index.html
diff --git a/subset/security/nginxfail.conf b/docker/include/security/nginxfail.conf
similarity index 100%
rename from subset/security/nginxfail.conf
rename to docker/include/security/nginxfail.conf
diff --git a/subset/security/nginxpass.conf b/docker/include/security/nginxpass.conf
similarity index 100%
rename from subset/security/nginxpass.conf
rename to docker/include/security/nginxpass.conf
diff --git a/docker/include/security/sshfaux/ssh_build.sh b/docker/include/security/sshfaux/ssh_build.sh
new file mode 100644
index 0000000000..f870555f1d
--- /dev/null
+++ b/docker/include/security/sshfaux/ssh_build.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+#
+# Build older versions OpenSSL 1.0.2 and OpenSSH 7.2
+# Used for testing in faux devices only
+#
+# To run SSHD use /usr/local/sbin/sshd
+# SSH components, e.g. ssh-keygen are found in /usr/local/bin
+# SSH configuration and keys found in /usr/local/etc
+
+# Build OpenSSL 1.0.2
+wget https://www.openssl.org/source/openssl-1.0.2g.tar.gz
+tar -xzf openssl-1.0.2g.tar.gz
+cd openssl-1.0.2g
+./config --prefix=/usr/local/openssl --openssldir=/usr/local/openssl
+make -s
+make -s install
+cd ..
+
+# Prepare privellage seperation for SSHD
+source ssh_privsep.sh
+
+# Build OpenSSH 7.2
+wget https://mirrors.mit.edu/pub/OpenBSD/OpenSSH/portable/openssh-7.2p1.tar.gz
+tar -xzf openssh-7.2p1.tar.gz
+cd openssh-7.2p1
+./configure --with-ssl-dir=/usr/local/openssl --with-ssh1
+make -s
+make -s install
diff --git a/docker/include/security/sshfaux/ssh_privsep.sh b/docker/include/security/sshfaux/ssh_privsep.sh
new file mode 100644
index 0000000000..668d825f9e
--- /dev/null
+++ b/docker/include/security/sshfaux/ssh_privsep.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+#
+# Prepare environment for running SSHD with privilege separation
+# https://github.com/openssh/openssh-portable/blob/master/README.privsep
+
+mkdir /etc/ssh
+mkdir /var/empty
+chown root:sys /var/empty
+chmod 755 /var/empty
+groupadd sshd
+useradd -g sshd -c 'sshd privsep' -d /var/empty -s /bin/false sshd
diff --git a/subset/security/tlsfaux/absolute_filepath.py b/docker/include/security/tlsfaux/absolute_filepath.py
similarity index 100%
rename from subset/security/tlsfaux/absolute_filepath.py
rename to docker/include/security/tlsfaux/absolute_filepath.py
diff --git a/subset/security/tlsfaux/certs/server.crt b/docker/include/security/tlsfaux/certs/server.crt
similarity index 100%
rename from subset/security/tlsfaux/certs/server.crt
rename to docker/include/security/tlsfaux/certs/server.crt
diff --git a/subset/security/tlsfaux/certs/server.csr b/docker/include/security/tlsfaux/certs/server.csr
similarity index 100%
rename from subset/security/tlsfaux/certs/server.csr
rename to docker/include/security/tlsfaux/certs/server.csr
diff --git a/subset/security/tlsfaux/certs/server.key b/docker/include/security/tlsfaux/certs/server.key
similarity index 100%
rename from subset/security/tlsfaux/certs/server.key
rename to docker/include/security/tlsfaux/certs/server.key
diff --git a/subset/security/tlsfaux/expcerts/server.crt b/docker/include/security/tlsfaux/expcerts/server.crt
similarity index 100%
rename from subset/security/tlsfaux/expcerts/server.crt
rename to docker/include/security/tlsfaux/expcerts/server.crt
diff --git a/subset/security/tlsfaux/expcerts/server.csr b/docker/include/security/tlsfaux/expcerts/server.csr
similarity index 100%
rename from subset/security/tlsfaux/expcerts/server.csr
rename to docker/include/security/tlsfaux/expcerts/server.csr
diff --git a/subset/security/tlsfaux/expcerts/server.key b/docker/include/security/tlsfaux/expcerts/server.key
similarity index 100%
rename from subset/security/tlsfaux/expcerts/server.key
rename to docker/include/security/tlsfaux/expcerts/server.key
diff --git a/subset/security/tlsfaux/generate_certs.py b/docker/include/security/tlsfaux/generate_certs.py
similarity index 100%
rename from subset/security/tlsfaux/generate_certs.py
rename to docker/include/security/tlsfaux/generate_certs.py
diff --git a/subset/security/tlsfaux/server.py b/docker/include/security/tlsfaux/server.py
similarity index 100%
rename from subset/security/tlsfaux/server.py
rename to docker/include/security/tlsfaux/server.py
diff --git a/docker/modules/Dockerfile.faux1 b/docker/modules/Dockerfile.faux1
index 7c8e9c0be0..14e18e428e 100644
--- a/docker/modules/Dockerfile.faux1
+++ b/docker/modules/Dockerfile.faux1
@@ -15,23 +15,33 @@ ENV BACHASH=94a794a756ee0d37c6a2e53e08747ee021415aa8
RUN bin/retry_cmd git clone https://github.com/grafnu/bacnet4j.git --single-branch \
&& cd bacnet4j && git reset --hard $BACHASH && ../bin/retry_cmd ./gradlew shadow
-COPY pubber/ pubber/
+COPY udmi/pubber/ pubber/
RUN pubber/bin/build
+# Seperate stage to build older version of SSH and SSL
+FROM daqf/aardvark:latest as ssh_build
+
+RUN $AG update && $AG install wget make build-essential gcc libz-dev ca-certificates
+
+# Build SSH, OpenSSL from source and configure
+
+COPY docker/include/security/sshfaux/*.sh ./
+RUN sh ssh_build.sh
+
FROM daqf/aardvark:latest
# Run this separately so it can be shared with other builds.
RUN $AG update && $AG install openjdk-8-jre
RUN $AG update && $AG install openjdk-8-jdk git
RUN $AG update && $AG install isc-dhcp-client ethtool network-manager netcat curl\
- python ifupdown openssl ssh nano apache2-utils ntpdate
+ python ifupdown openssl nano apache2-utils ntpdate vzctl
# Additional OS dependencies
RUN $AG update && $AG install -y telnetd && $AG install xinetd nginx
-COPY subset/network/NTPClient NTPClient
+COPY docker/include/network/NTPClient NTPClient
RUN cd NTPClient && ./gradlew build
-COPY subset/network/TransportClient TransportClient
+COPY docker/include/network/TransportClient TransportClient
# Prefetch resolvconf to dynamically install at runtime in start_faux.
RUN $AG update && cd /tmp && ln -s ~/bin bin && $AG download resolvconf && mv resolvconf_*.deb ~
@@ -42,27 +52,29 @@ COPY --from=java_build /root/bacnet4j/*.jar bacnet4j/
COPY docker/include/bin/bacnet_discover bin/
COPY --from=java_build /root/pubber/build/libs/*.jar pubber/build/libs/
-COPY pubber/bin/run pubber/bin/
+COPY udmi/pubber/bin/run pubber/bin/
-COPY subset/pentests/brute_server.py pentests/
-COPY subset/security/tlsfaux tlsfaux/
+COPY docker/include/pentests/brute_server.py pentests/
+COPY docker/include/security/tlsfaux tlsfaux/
-COPY subset/bacnet/bacnetTests/ bacnetTests
-COPY subset/bacnet/bacnetTests/src/main/resources/Faux*.json tmp/
-COPY --from=java_build /root/bacnet4j/bacnet4j-1.0-SNAPSHOT-all.jar bacnetTests/libs/
-RUN cd bacnetTests && ./gradlew build
-
-# SSH dependency
-COPY subset/security/ssh_additions.config ssh_additions.config
-RUN cat ssh_additions.config >> /etc/ssh/sshd_config
+COPY docker/include/bacnet/bacnetFaux/ bacnetFaux
+COPY docker/include/bacnet/bacnetFaux/src/main/resources/Faux*.json tmp/
+COPY --from=java_build /root/bacnet4j/bacnet4j-1.0-SNAPSHOT-all.jar bacnetFaux/libs/
+RUN cd bacnetFaux && ./gradlew build
# HTTP/HTTPS dependency
-COPY subset/security/nginxpass.conf /root/nginx/
-COPY subset/security/nginxfail.conf /root/nginx/
-COPY subset/security/nginx-site /var/www/nginx-site
-
-# SSH login fix. Otherwise user is kicked off after login
-RUN sed 's@session\s*required\s*pam_loginuid.so@session optional pam_loginuid.so@g' -i /etc/pam.d/sshd
+COPY docker/include/security/nginxpass.conf /root/nginx/
+COPY docker/include/security/nginxfail.conf /root/nginx/
+COPY docker/include/security/nginx-site /var/www/nginx-site
+
+COPY --from=ssh_build /usr/local/openssl/* /usr/local/openssl/
+COPY --from=ssh_build /usr/local/sbin/* /usr/local/sbin/
+COPY --from=ssh_build /usr/local/bin/* /usr/local/bin/
+COPY --from=ssh_build /usr/local/etc/* /usr/local/etc/
+
+COPY docker/include/security/sshfaux/ssh_privsep.sh ssh_privsep.sh
+RUN sh ssh_privsep.sh
+RUN /usr/local/bin/ssh-keygen -A
# Weird workaround for problem running tcdump in a privlidged container.
RUN mv /usr/sbin/tcpdump /usr/bin/tcpdump
diff --git a/docker/modules/Dockerfile.faux2 b/docker/modules/Dockerfile.faux2
index 11ea5020bf..e179f0b9a7 100644
--- a/docker/modules/Dockerfile.faux2
+++ b/docker/modules/Dockerfile.faux2
@@ -26,9 +26,9 @@ COPY docker/include/bin/start_faux docker/include/bin/failing bin/
# Weird workaround for problem running tcdump in a privlidged container.
RUN mv /usr/sbin/tcpdump /usr/bin/tcpdump
-COPY subset/security/tlsfaux tlsfaux/
-COPY subset/security/nginxpass.conf /root/nginx/
-COPY subset/security/nginxfail.conf /root/nginx/
-COPY subset/security/nginx-site /var/www/nginx-site
+COPY docker/include/security/tlsfaux tlsfaux/
+COPY docker/include/security/nginxpass.conf /root/nginx/
+COPY docker/include/security/nginxfail.conf /root/nginx/
+COPY docker/include/security/nginx-site /var/www/nginx-site
ENTRYPOINT ["bin/start_faux"]
diff --git a/docker/modules/Dockerfile.networking b/docker/modules/Dockerfile.networking
index 2001cf1f03..86ae77238b 100644
--- a/docker/modules/Dockerfile.networking
+++ b/docker/modules/Dockerfile.networking
@@ -5,9 +5,9 @@
FROM daqf/aardvark:latest
-RUN $AG update && $AG install dnsmasq ethtool iptables netcat ntp python
+RUN $AG update && $AG install dnsmasq ethtool iptables netcat ntp python curl
-COPY docker/include/networking_scripts/* ./
+COPY docker/include/network/scripts/* ./
RUN mkdir -p /etc
COPY docker/include/etc/ntp.conf /etc
diff --git a/docs/add_test.md b/docs/add_test.md
index f279359570..79592f0d24 100644
--- a/docs/add_test.md
+++ b/docs/add_test.md
@@ -34,7 +34,7 @@ A setup for the `pass` test, as an example, woud be configured as follows
* `echo host_tests=local/local_tests.conf >> local/system.conf` -- Set tests configuration.
This, of course, only works for local development when using the `local_tests.conf` config. To
-formalize a test and include it in the overal system build it should be included in
+formalize a test and include it in the overall system build it should be included in
`config/modules/all.conf`.
## Component Build
@@ -112,7 +112,7 @@ However, with great flexibility comes great responsibility.
Tests should:
- Test _one_ thing well
-- Include an integration test for Travis CI
+- Include an integration test for [Github actions](https://github.com/faucetsdn/daq/actions)
- Adhere to the Google style guide of your chosen language: https://google.github.io/styleguide/
- Have the smallest amount of code possible for the greatest utility for the framework. Keep docker images lean!
- Not add things like the following to the repository:
@@ -124,9 +124,9 @@ Tests should:
- Include the test name and a description of the test in the report output
- Include an informative line in the summary table
-Integration tests don't need to be tedious and, if you're developing one test and seeing a consistent failure on Travis CI, isolate your problem and run _just that part_ of the integration test both locally and on Travis CI.
+Integration tests don't need to be tedious and, if you're developing one test and seeing a consistent failure on Github actions, isolate your problem and run _just that part_ of the integration test both locally and on Github actions.
-The pass/fail state of an integration test corresponds to the result of a `diff` between expected and actual device report output. You can follow the steps in the _Integration Testing Workflow_ section below to mimic the exact process that Travis CI follows. Or, if your local machine builds Docker images slowly, simply modify the test_*.out by hand, amending it to what your report should look like. Then, see if Travis CI agrees.
+The pass/fail state of an integration test corresponds to the result of a `diff` between expected and actual device report output. You can follow the steps in the _Integration Testing Workflow_ section below to mimic the exact process that Github actions follows. Or, if your local machine builds Docker images slowly, simply modify the test_*.out by hand, amending it to what your report should look like. Then, see if Github actions agrees.
Similarly, if you're writing one test and running it within DAQ locally, run _only the test you're developing_. Try not to bloat your precious development hours by waiting for tests to run that you don't care about. Building unnecessary tests is a very efficient time sink.
@@ -145,7 +145,7 @@ All of the commands in these steps are run as the root user by typing "sudo -i"
6. Run `testing/test_x.sh`
7. Copy `out/test_x.out` to `testing/test_x.out`
8. Run `testing/test_x.sh` to check the integration tests now execute successfully in the local machine.
-9. Commit to GitHub to sync local codebase with remote codebase and to trigger the final Travis CI tests
-10. Test should now pass the Travis CI integration tests.
+9. Commit to GitHub to sync local codebase with remote codebase and to trigger the final [Github actions](https://github.com/faucetsdn/daq/actions) tests
+10. Test should now pass the [Github actions](https://github.com/faucetsdn/daq/actions) integration tests.
TODO: write note about hold_tests
diff --git a/docs/build.md b/docs/build.md
index 40e40aa20e..6ff2287518 100644
--- a/docs/build.md
+++ b/docs/build.md
@@ -22,23 +22,11 @@ different dependencies. See `cmd/build help` for more details on different image
## Tests, Tests, and More Tests
In a whirlwind of flagrant appropriateness, the baseline for DAQ development is... testing. Specifically,
-there is a suite of continuous integration tests that run on [Travis CI](https://travis-ci.com/faucetsdn/daq)
+there is a suite of continuous integration tests that run using [Github actions](https://github.com/faucetsdn/daq/actions)
that puts the system through a barrage of tests to make sure all is good. Any PR submission will
-require that these tests pass. It's recommended (but not required) that you set up Travis CI on
-your personal development branch to test your commits in the full Travis CI environment before relying
-on the automatic PR test mechanism.
-
-The `.travis.yml` file contains the information for the tests themselves, primarily listed under the `matrix:`
-subsection that shows all the various tested configurations. Note that this assumes a fully installed environment
-(as setup with `bin/setup_daq`). From there, individual tests can be run locally by
-appending `bin/test_daq` to a `sudo` line of shell environment settings, e.g. as taken from one matrix entry:
-
-~/daq$ sudo DAQ_TEST=base bin/test_daq
-…
-or directly with:
-~/daq$ sudo testing/test_base.sh
-…
-
+require that these tests pass.
+
+For more information, please see [developing docs](https://github.com/faucetsdn/daq/blob/master/docs/developing.md).
## Incremental Builds
diff --git a/docs/changelog.md b/docs/changelog.md
index a67a7cfd32..bedd5c2366 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -1,4 +1,55 @@
# Changelog
+* 1.9.5
+ * Increase nmap module timeout (#611)
+ * Improvements for test development debugging (#609)
+ * Add module config system logging (#607)
+* 1.9.4
+ * Feature/convert switchtests (#601)
+ * Do not infinite spawn ntp (#598)
+ * security.nmap.http test (#563)
+ * Update registrar tool for latest UDMI version (#596)
+ * Feature/vlan trigger (#588)
+ * fix gcp combine report test (#587)
+ * Adding default dns for static ip faux devices (#576)
+ * Add perodic tests (#575)
+ * security.admin.password changes (#461)
+* 1.9.0
+ * Test infrastructure cleanup (#572)
+ * Remove faux dependencies from subset directory (#567)
+ * Github actions (#558)
+ * misc updates to docs (#568)
+ * Incorporate manual test comments (#499)
+ * NTP Update (#525)
+ * Automatic build script (#557)
+* 1.8.2
+ * GRPC timeouts + usi first command wait fix. (#555)
+ * Numerous renovate bot updates.
+* 1.8.0
+ * add security.ssh.version test (#523)
+ * Refactor UDMI to external repo (#544)
+ * Additional DHCP test part 3: IP change test (#543)
+ * Additional DHCP test part 2: Multisubnet test (#539)
+ * Additional DHCP test part 1 (#532)
+ * Support for alternate sec switch (not managed by DAQ) (#531)
+ * Add troubleshooting script (#529)
+ * Using usi in daq (#520)
+ * Use trunk rather than stack between switches (#526)
+ * NTPv4 support (#487)
+ * Feature/usi OVS switch (#521)
+* 1.7.0
+ * Add DAQ version to origin summary (#522)
+ * Add check for git version tag in Travis (#519)
+ * Minor UDMI updates for pubber keygen
+ * Update Minimum Send Test (#498)
+ * Universal Switch Interface (USI) (#496)
+* 1.6.1
+ * fix image pull in cmd/build (#503)
+* 1.6.0
+ * cloud test setup documentation (#495)
+ * Baseline for NTP tests (#494)
+ * Baseline for DNS test (#492)
+ * Add manual test summary to test report (#481)
+ * UDMI logentry schema update (#391)
* 1.5.1
* Fix for local-port-as-string issue (#477)
* 1.5.0
diff --git a/docs/cloud_tests.md b/docs/cloud_tests.md
new file mode 100644
index 0000000000..ff982de80f
--- /dev/null
+++ b/docs/cloud_tests.md
@@ -0,0 +1,97 @@
+# Cloud Connection Testing
+
+A number of additional setup steps are required for enabling testing against "smart devices"
+that communicate with the cloud. The tests themselves are part of the `subset/cloud/test_udmi`
+module included in the standard DAQ distro. The same basic device-to-cloud validation test
+pipeline can be done manually and automatically (through DAQ); it's instructive to fully
+understand the manual test pipeline before engaging with the automated setup.
+
+## Manual Test Pipeline
+
+The overall device-to-cloud pipeline looks something like the following:
+
+* Device sends data to the cloud. There's two kinds of devices:
+ * A faux _reference design_ device called [pubber](https://github.com/faucetsdn/udmi/blob/master/docs/pubber.md),
+ which is a completely contained software device.
+ * An actual physical device. The setup and configuration of that device will be manufacturer
+ dependent and so is out of scope for this (DAQ) documentation.
+* A configured GCP IoT Core project, registry, and device entry. The
+[GCP docs for IoT Core](https://cloud.google.com/iot/docs/how-tos/devices) describe the basics. The
+key part is the _authentication key_ (hahaha) that needs to be setup between the local device and
+cloud device entry.
+* The IoT Core registry is configured with a _PubSub topic_ (not to be confused with an _MQTT topic_),
+that provides the bridge between incoming data and consumers of that data. See the GCP documentation
+on PubSub for more details.
+* (optional) Some devices might need a gateway that communicates with IoT Core
+ on their behalf. In this case the Gateway should be added to the IoT Core as
+ well and the devices bound to it.
+* (optional) The `gcloud` command line can be used to validate that data is being sent from the
+device to the cloud. Something like
+`gcloud pubsub subscriptions pull --auto-ack projects/{project}/subscriptions/{sub_id}`.
+(Complete documentation for how to use `gcloud` commands is out of scope of this documentation.)
+* The [validator tool](https://github.com/faucetsdn/udmi/blob/master/docs/validator.md) is what
+programmatically validates a device data stream, and is what is ultimately used by `test_udmi`
+to validate device-cloud communication.
+
+## Base Local Test Setup
+
+* The `udmi` module needs to be enabled in build. When running `cmd/build` there should be a line
+like `subset/cloud/Dockerfile.test_udmi` in the startup logs.
+This is enabled through the `host_tests` config parameter,
+which can be set to `config/modules/all.conf` if necessary. On startup, there should be a log
+message that includes `udmi`:
+```
+Jun 22 08:32:52 runner INFO Configured with tests pass, fail, ping, bacnet, mudgee, nmap, discover, switch, macoui, bacext, tls, password, udmi, manual
+```
+* A testing gcp service account `gcp_cred` needs to be setup as described in
+[service account setup instructions](service.md).
+* The system's default `module_config` needs to enable the `udmi` test, e.g. as per
+`resources/setups/baseline/module_config.json`. This can be validated by (runtime) checking
+`inst/run-port-01/nodes/udmi01/tmp/module_config.json` to see if it has something like the following:
+```
+ "udmi": {
+ "enabled": true
+ }
+```
+* `site_path` config needs to point to a site definition directory, or defaults to `local/site`.
+This contains all the site-specific information about devices needed for testing.
+* `{site_path}/mac_addrs/{mac_addr}/module_config.json` needs to have a `device_id` defined, e.g.
+as in `resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json`.
+* The GCP IoT Core setup needs to have a proper registry and device configred. This can either
+be done manually or using the [registrar
+tool](https://github.com/faucetsdn/udmi/blob/master/docs/registrar.md) tool.
+
+## Integration Testing
+
+If developing cloud-tests, then the CI build system also needs to have a service account configured
+pointing at a suitable GCP project. To run cloud-based tests, setup the [Github Secrets](https://docs.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets) `GCP_BASE64_CRED`
+env variable with a `base64` encoded service account key for your project. It's recommended to use a dedicated key with a nice name like `daq-ci`, but not required. Note that on linux `-w 0` option is required for proper parsing/formatting, as there can't be any newlines in the copied string.
+
+
+$ base64 -w 0 local/gcp_service_account.json
+ewoICJ1eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiYm9zLWRhcS10ZXN0aW5nIiwKICAicHJpd
+…
+iOiAiaHR0cHM6Ly93LWRhcS10ZXN0aW5nLmlhbS5nc2VydmljZWFjY291bnQuY29tIgp9Cg==
+
+
+### Github Actions CI Testing
+
+* Run the [registrar tool](https://github.com/faucetsdn/udmi/blob/master/docs/registrar.md) to properly configure the cloud project.
+* `gcp_topic` config to `local/system.conf` as described in this doc.
+* Configure test subsystem with proper cloud endpoint in `{test_site}/cloud_iot_config.json`.
+* Configure the DUT with the proper cloud device credentials (device specific). For _faux_ devices, this means copying
+the associated `rsa_private.pkcs8` file to something like `inst/faux/daq-faux-2/local/` (exact path depends on which faux).
+* Test with `udmi/bin/registrar`, `udmi/pubber/bin/run`, and `udmi/bin/validator` manually, before integrated testing through DAQ.
+
+### Is my Github Actions set up correctly?
+
+If Github Actions is set up correctly, you should see logs similar to the one below:
+```
+Running test script testing/test_aux.sh
+Writing test results to inst/test_aux.out and inst/test_aux.gcp
+Decoding GCP_BASE64_CRED to inst/config/gcp_service_account.json
+base64 wc: 1 1 3097
+GCP service account is "daq-ci@daq-testing.iam.gserviceaccount.com"
+```
+
+If the `3097` character count is wildly off, then likely something went wrong with the newlines.
diff --git a/docs/developing.md b/docs/developing.md
index 77848125ef..e4c025a9e5 100644
--- a/docs/developing.md
+++ b/docs/developing.md
@@ -28,16 +28,9 @@ faster than continually working with physical components (unless actively debugg
problems that only manifest themselves physically). If there is a problem in the 'real'
world, then the first step is typically to try and reproduce it virtually.
-## Travis CI
+## Github Actions CI
-Travis is used as the primary CI testing point for DAQ. The
-[facuetsdn/daq dashboard](https://travis-ci.com/faucetsdn/daq/branches) shows the
-status of the current master branch. It is generally recommended to set up
-Travis on your personal repos to test any branches you push/commit. PRs will
-automatically be tested under the destination repo.
-
-Travis runs a suite of tests defined in the `.travis.yml` file. Each `DAQ_TEST`
-entry triggers a separate run through the `bin/test_daq` script. E.g. `DAQ_TEST=many`
+The `.github/workflows` folder contains information for the tests themselves. There are 2 workflows currently in place -- one for main DAQ integration tests and unit tests, and the other for USI related tests. Each workflow file is further broken down into jobs. In the case of tests.yml, there are the `integration_tests` and `unit_tests` jobs. Primarily listed under the `matrix:` subsection shows all the various tested configurations for the `integration_tests`. Each matrix entry triggers a separate run through the `bin/test_daq` script. E.g. `DAQ_TEST=many`
ultimately runs `testing/test_many.sh`. The test output results are compared against
the golden `.out` file (e.g. `testing/test_many.out`) and the tests pass if there
is no difference. (Look in `bin/test_daq` to see exactly what it's doing.)
@@ -45,25 +38,26 @@ is no difference. (Look in `bin/test_daq` to see exactly what it's doing.)
If there are unexplained differences in the `.out` file, then the test output log
itself should be checked to see what actually went wrong, since there's likely
not enough information in the `.out` files to diagnose effectively. The complete
-log output is avaliable from a Travis run (or locally when you run locally), and
-the triggering line from the `.out` difference should be there as well (search for it!).
+log output is avaliable from a [Github actions](https://github.com/faucetsdn/daq/actions) run (or locally when you run locally), and the triggering line from the `.out` difference should be there as well (search for it!).
+
+Note all integration tests assume a fully installed environment (as setup with `bin/setup_daq`).
## Local Integration Tests
-Tests can be run locally with something like `sudo testing/test_aux.sh`, and the output
-will be generated into, e.g., `out/test_aux.out`, that can be compared against the
-corresponding golden `.out` file, e.g., `testing/test_aux.out`. Running tests locally is
-not always 100% exactly the same as running things in a real (against physical devices
+Individual integration tests can be run locally by
+appending `bin/test_daq` to a `sudo` line of shell environment settings, e.g. as taken from one matrix entry:
+
+~/daq$ sudo DAQ_TEST=base bin/test_daq
+…
+or directly with:
+~/daq$ sudo testing/test_base.sh
+…
+
+
+Running tests locally is not always 100% exactly the same as running things in a real (against physical devices
on a physical switch) or CI environment, but in most cases it provides a workable method.
-It is recommended to start from a clear DAQ configuration by running `rm -rf local`
-from the main DAQ folder before running the local integration tests.
-
-When developing a new test, the output should appear in the corresponding `.out` file,
-which should be updated appropriatley. The easiest way to migrate in new changes is to
-just copy the `out/` file to `testing/`, but care must be taken that only expected
-changes are included with a new PR. Ultimately the Travis CI tests must pass, not the
-local tests, to guard against any local filesystem changes.
+When developing a new test, the output should appear in the corresponding `.out` file, which should be updated appropriatley. The easiest way to migrate in new changes is to just copy the `out/` file to `testing/`, but care must be taken that only expected changes are included with a new PR. Ultimately the [Github actions](https://github.com/faucetsdn/daq/actions) tests must pass, not the local tests, to guard against any local filesystem changes.
## Aux Golden Device Report
@@ -85,7 +79,9 @@ as the new golden file (i.e., copy it from `out/report_9a02571e8f01_???.md` to
## Lint Checks
-Lint checks are performed as part of the `testing/test_aux.sh` script. They are extra
-tricky because they are typically very sensitive to the exact version of every package
-installed, so they're somewhat unreliable except when run through a pristine environment
-on Travis.
+To make sure changes to DAQ adheres to the existing code checkstyle, a pre commit hook can be setup to run [bin/check_style](https://github.com/faucetsdn/daq/blob/master/bin/check_style) before a commit. To enable this, simply run the following line under your daq root directory.
+
+~/daq$ echo "bin/check_style" > .git/hooks/pre-commit && chmod +x .git/hooks/pre-commit
+
+
+Lint checks are performed as part of the unit_test job on [Github actions](https://github.com/faucetsdn/daq/actions) as well as on [stickler-ci](https://stickler-ci.com/repositories/51649-faucetsdn-daq) when for every PR.
diff --git a/docs/device_report.md b/docs/device_report.md
index 2ab3ab3320..83c4ff8fa4 100644
--- a/docs/device_report.md
+++ b/docs/device_report.md
@@ -48,15 +48,15 @@ Overall device result FAIL
|Category|Result|
|---|---|
-|Security|PASS|
+|Security|1/2|
|Other|1/2|
|Connectivity|n/a|
-|Expectation|pass|fail|skip|gone|
-|---|---|---|---|---|
-|Required|1|0|0|0|
-|Recommended|2|0|0|0|
-|Other|1|2|22|2|
+|Expectation|pass|fail|skip|info|gone|
+|---|---|---|---|---|---|
+|Required|1|0|0|0|0|
+|Recommended|1|0|0|0|1|
+|Other|6|2|21|1|2|
|Result|Test|Category|Expectation|Notes|
|---|---|---|---|---|
@@ -66,22 +66,27 @@ Overall device result FAIL
|skip|cloud.udmi.pointset|Other|Other|No device id|
|skip|cloud.udmi.state|Other|Other|No device id|
|skip|cloud.udmi.system|Other|Other|No device id|
+|info|communication.type.broadcast|Other|Other|Broadcast packets received. Unicast packets received.|
+|skip|connection.dns.hostname_connect|Other|Other|Device did not send any DNS requests|
|fail|connection.mac_oui|Other|Other|Manufacturer prefix not found!|
+|pass|connection.min_send|Other|Other|ARP packets received. Data packets were sent at a frequency of less than 5 minutes|
+|pass|connection.network.ntp_support|Other|Other|Using NTPv4.|
+|pass|connection.network.ntp_update|Other|Other|Device clock synchronized.|
|skip|connection.port_duplex|Other|Other|No local IP has been set, check system config|
|skip|connection.port_link|Other|Other|No local IP has been set, check system config|
|skip|connection.port_speed|Other|Other|No local IP has been set, check system config|
-|pass|manual.test.travis|Security|Recommended|Manual test - for testing|
-|skip|poe.negotiation|Other|Other|No local IP has been set, check system config|
-|skip|poe.power|Other|Other|No local IP has been set, check system config|
-|skip|poe.support|Other|Other|No local IP has been set, check system config|
+|pass|manual.test.name|Security|Recommended|Manual test - for testing|
+|skip|poe.switch.power|Other|Other|No local IP has been set, check system config|
|fail|protocol.bacnet.pic|Other|Other|PICS file defined however a BACnet device was not found.|
|skip|protocol.bacnet.version|Other|Other|Bacnet device not found.|
|skip|security.firmware|Other|Other|Could not retrieve a firmware version with nmap. Check bacnet port.|
-|skip|security.passwords.http|Other|Other|Port 80 is not open on target device.|
-|skip|security.passwords.https|Other|Other|Port 443 is not open on target device.|
-|skip|security.passwords.ssh|Other|Other|Port 22 is not open on target device.|
-|skip|security.passwords.telnet|Other|Other|Port 23 is not open on target device.|
-|pass|security.ports.nmap|Security|Recommended|Only allowed ports found open.|
+|pass|security.nmap.http|Other|Other|No running http servers have been found.|
+|pass|security.nmap.ports|Other|Other|Only allowed ports found open.|
+|skip|security.passwords.http|Other|Other|Port 80 not open on target device.|
+|skip|security.passwords.https|Other|Other|Port 443 not open on target device.|
+|skip|security.passwords.ssh|Other|Other|Port 22 not open on target device.|
+|skip|security.passwords.telnet|Other|Other|Port 23 not open on target device.|
+|gone|security.ports.nmap|Security|Recommended||
|skip|security.tls.v1|Other|Other|IOException unable to connect to server|
|skip|security.tls.v1.x509|Other|Other|IOException unable to connect to server|
|skip|security.tls.v1_2|Other|Other|IOException unable to connect to server|
@@ -92,15 +97,6 @@ Overall device result FAIL
|gone|unknown.fake.monkey|Other|Other||
-## Module ipaddr
-
-
-#### Module Config
-
-|Attribute|Value|
-|---|---|
-|timeout_sec|300|
-
## Module pass
@@ -159,18 +155,32 @@ RESULT pass base.target.ping target reached
```
--------------------
-security.ports.nmap
+security.nmap.ports
--------------------
Automatic TCP/UDP port scan using nmap
--------------------
# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -sT -sU --host-timeout=4m --open -pU:47808,T:23,443,80, -oG /tmp/nmap.log X.X.X.X
# Ports scanned: TCP(3;23,80,443) UDP(1;47808) SCTP(0;) PROTOCOLS(0;)
Host: X.X.X.X () Status: Up
-Host: X.X.X.X () Ports: 47808/closed/udp//bacnet/// Ignored State: closed (3)
+Host: X.X.X.X () Ports: 47808/closed/udp//bacnet///
# Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX
No invalid ports found.
--------------------
-RESULT pass security.ports.nmap Only allowed ports found open.
+RESULT pass security.nmap.ports Only allowed ports found open.
+
+--------------------
+security.nmap.http
+--------------------
+Check that the device does not have open ports exposing an unencrypted web interface using HTTP
+--------------------
+# Nmap 7.60 scan initiated XXX as: nmap -v -n -T5 -A --script http-methods --host-timeout=4m --open -p- -oG /tmp/http.log X.X.X.X
+# Ports scanned: TCP(65535;1-65535) UDP(0;) SCTP(0;) PROTOCOLS(0;)
+Host: X.X.X.X () Status: Up
+Host: X.X.X.X () Ports: 10000/open/tcp//snet-sensor-mgmt?///
+# Nmap done at XXX -- 1 IP address (1 host up) scanned in XXX
+No running http servers have been found.
+--------------------
+RESULT pass security.nmap.http No running http servers have been found.
```
@@ -178,6 +188,7 @@ RESULT pass security.ports.nmap Only allowed ports found open.
|Attribute|Value|
|---|---|
+|timeout_sec|600|
|enabled|True|
## Module discover
@@ -239,31 +250,13 @@ LOCAL_IP not configured, assuming no network switch.
RESULT skip connection.port_duplex No local IP has been set, check system config
--------------------
-poe.power
+poe.switch.power
--------------------
Verify that the device draws less than the maximum power allocated by the port. This is 15.4W for 802.3af and 30W for 802.3at
--------------------
LOCAL_IP not configured, assuming no network switch.
--------------------
-RESULT skip poe.power No local IP has been set, check system config
-
---------------------
-poe.negotiation
---------------------
-Verify the device autonegotiates power requirements
---------------------
-LOCAL_IP not configured, assuming no network switch.
---------------------
-RESULT skip poe.negotiation No local IP has been set, check system config
-
---------------------
-poe.support
---------------------
-Verify if the device supports PoE
---------------------
-LOCAL_IP not configured, assuming no network switch.
---------------------
-RESULT skip poe.support No local IP has been set, check system config
+RESULT skip poe.switch.power No local IP has been set, check system config
```
@@ -274,30 +267,6 @@ RESULT skip poe.support No local IP has been set, check system config
|enabled|True|
|poe|{'enabled': True}|
-## Module macoui
-
-
-#### Report
-
-```
---------------------
-connection.mac_oui
---------------------
-Check Physical device address OUI against IEEE registration and verify it is registered with the correct manufacturer
---------------------
-Using the host hardware address 9a:02:57:1e:8f:01
-Mac OUI Test
---------------------
-RESULT fail connection.mac_oui Manufacturer prefix not found!
-
-```
-
-#### Module Config
-
-|Attribute|Value|
-|---|---|
-|enabled|True|
-
## Module bacext
@@ -408,92 +377,80 @@ RESULT skip security.tls.v1_3.x509 IOException unable to connect to server
```
--------------------
-security.passwords.http
+security.admin.password.http
--------------------
-Verify all default passwords are updated and new Google provided passwords are set.
+Verify all device manufacturer default passwords are changed for protocol: http, and new passwords are set.
--------------------
-[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: http]
-Starting NMAP check...
Starting Nmap 7.60 ( https://nmap.org ) at XXX
Nmap scan report for daq-faux-1 (X.X.X.X)
Host is up (XXX).
-PORT STATE SERVICE
-10000/tcp open snet-sensor-mgmt
+PORT STATE SERVICE
+80/tcp closed http
MAC Address: 9A:02:57:1E:8F:01 (Unknown)
Nmap done: 1 IP address (1 host up) scanned in XXX
-nmap X.X.X.X
-Done.
+Could not connect to specified port on host.
--------------------
-RESULT skip security.passwords.http Port 80 is not open on target device.
+RESULT skip security.passwords.http Port 80 not open on target device.
--------------------
-security.passwords.https
+security.admin.password.https
--------------------
-Verify all default passwords are updated and new Google provided passwords are set.
+Verify all device manufacturer default passwords are changed for protocol: https, and new passwords are set.
--------------------
-[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: https]
-Starting NMAP check...
Starting Nmap 7.60 ( https://nmap.org ) at XXX
Nmap scan report for daq-faux-1 (X.X.X.X)
Host is up (XXX).
-PORT STATE SERVICE
-10000/tcp open snet-sensor-mgmt
+PORT STATE SERVICE
+443/tcp closed https
MAC Address: 9A:02:57:1E:8F:01 (Unknown)
Nmap done: 1 IP address (1 host up) scanned in XXX
-nmap X.X.X.X
-Done.
+Could not connect to specified port on host.
--------------------
-RESULT skip security.passwords.https Port 443 is not open on target device.
+RESULT skip security.passwords.https Port 443 not open on target device.
--------------------
-security.passwords.telnet
+security.admin.password.ssh
--------------------
-Verify all default passwords are updated and new Google provided passwords are set.
+Verify all device manufacturer default passwords are changed for protocol: ssh, and new passwords are set.
--------------------
-[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: telnet]
-Starting NMAP check...
Starting Nmap 7.60 ( https://nmap.org ) at XXX
Nmap scan report for daq-faux-1 (X.X.X.X)
Host is up (XXX).
-PORT STATE SERVICE
-10000/tcp open snet-sensor-mgmt
+PORT STATE SERVICE
+22/tcp closed ssh
MAC Address: 9A:02:57:1E:8F:01 (Unknown)
Nmap done: 1 IP address (1 host up) scanned in XXX
-nmap X.X.X.X
-Done.
+Could not connect to specified port on host.
--------------------
-RESULT skip security.passwords.telnet Port 23 is not open on target device.
+RESULT skip security.passwords.ssh Port 22 not open on target device.
--------------------
-security.passwords.ssh
+security.admin.password.telnet
--------------------
-Verify all default passwords are updated and new Google provided passwords are set.
+Verify all device manufacturer default passwords are changed for protocol: telnet, and new passwords are set.
--------------------
-[STARTING WITH IP:X.X.X.X, MAC:9a:02:57:1e:8f:01, PROTOCOL: ssh]
-Starting NMAP check...
Starting Nmap 7.60 ( https://nmap.org ) at XXX
Nmap scan report for daq-faux-1 (X.X.X.X)
Host is up (XXX).
-PORT STATE SERVICE
-10000/tcp open snet-sensor-mgmt
+PORT STATE SERVICE
+23/tcp closed telnet
MAC Address: 9A:02:57:1E:8F:01 (Unknown)
Nmap done: 1 IP address (1 host up) scanned in XXX
-nmap X.X.X.X
-Done.
+Could not connect to specified port on host.
--------------------
-RESULT skip security.passwords.ssh Port 22 is not open on target device.
+RESULT skip security.passwords.telnet Port 23 not open on target device.
```
@@ -501,6 +458,7 @@ RESULT skip security.passwords.ssh Port 22 is not open on target device.
|Attribute|Value|
|---|---|
+|dictionary_dir|resources/faux|
|enabled|True|
## Module udmi
@@ -551,13 +509,13 @@ RESULT skip cloud.udmi.system No device id
```
--------------------
-manual.test.travis
+manual.test.name
--------------------
--------------------
No additional information provided
--------------------
-RESULT pass manual.test.travis Manual test - for testing
+RESULT pass manual.test.name Manual test - for testing
```
@@ -567,5 +525,72 @@ RESULT pass manual.test.travis Manual test - for testing
|---|---|
|enabled|True|
+## Module network
+
+
+#### Report
+
+```
+--------------------
+connection.min_send
+--------------------
+Device sends data at a frequency of less than 5 minutes.
+--------------------
+
+
+
+
+
+
+
+
+
+
+
+RESULT pass connection.min_send ARP packets received. Data packets were sent at a frequency of less than 5 minutes
+--------------------
+communication.type.broadcast
+--------------------
+Device sends unicast or broadcast packets.
+--------------------
+
+
+RESULT info communication.type.broadcast Broadcast packets received. Unicast packets received.
+--------------------
+connection.network.ntp_support
+--------------------
+Device supports NTP version 4.
+--------------------
+RESULT pass connection.network.ntp_support Using NTPv4.
+--------------------
+connection.network.ntp_update
+--------------------
+Device synchronizes its time to the NTP server.
+--------------------
+RESULT pass connection.network.ntp_update Device clock synchronized.
+--------------------
+connection.mac_oui
+--------------------
+Check Physical device address OUI against IEEE registration and verify it is registered with the correct manufacturer
+--------------------
+Using the host hardware address 9a:02:57:1e:8f:01
+Mac OUI Test
+--------------------
+RESULT fail connection.mac_oui Manufacturer prefix not found!
+
+--------------------
+connection.dns.hostname_connect
+--------------------
+Check device uses the DNS server from DHCP and resolves hostnames
+--------------------
+RESULT skip connection.dns.hostname_connect Device did not send any DNS requests
+```
+
+#### Module Config
+
+|Attribute|Value|
+|---|---|
+|enabled|True|
+
## Report complete
diff --git a/docs/integration_testing.md b/docs/integration_testing.md
deleted file mode 100644
index 1c6435a9b0..0000000000
--- a/docs/integration_testing.md
+++ /dev/null
@@ -1,83 +0,0 @@
-# Integration Testing
-
-DAQ currently uses Travis CI for integration testing: https://travis-ci.org/
-
-## Configuration
-
-The `test_udmi` test module uses the Registrar and Validator to check that a device is
-properly communicating through Cloud IoT, automated through DAQ.
-
-### GCP Credential
-
-To run cloud-based tests, setup the Travis `GCP_BASE64_CRED` env variable with a `base64` encoded
-service account key for your project. It's recommended to use a dedicated key with a nice name
-like `daq-travis`, but not required. Encode the key value as per below, and cut/paste the
-resulting string into a
-[Travis environment variable](https://docs.travis-ci.com/user/environment-variables/#defining-variables-in-repository-settings)
-for a `GCP_BASE64_CRED` varaible. Note the `-w 0` option is required for proper parsing/formatting,
-as there can't be any newlines in the copied string.
-
-
-$ base64 -w 0 local/gcp_service_account.json
-ewoICJ1eXBlIjogInNlcnZpY2VfYWNjb3VudCIsCiAgInByb2plY3RfaWQiOiAiYm9zLWRhcS10ZXN0aW5nIiwKICAicHJpd
-…
-iOiAiaHR0cHM6Ly93LWRhcS10ZXN0aW5nLmlhbS5nc2VydmljZWFjY291bnQuY29tIgp9Cg==
-
-
-## Travis CI Testing
-
-* Run the [registrar tool](registrar.md) to properly configure the cloud project.
-* `gcp_topic` config to `local/system.conf` as described in this doc.
-* Configure test subsystem with proper cloud endpoint in `{test_site}/cloud_iot_config.json`.
-* Configure the DUT with the proper cloud device credentials (device specific). For _faux_ devices, this means copying
-the assocatied `rsa_private.pkcs8` file to someting like `inst/faux/daq-faux-2/local/` (exact path depends on which faux).
-* Test with `bin/registrar`, `pubber/bin/run`, and `bin/validate` manually, before integrated testing through DAQ.
-
-### Is my Travis set up correctly?
-
-If Travis is set up correctly, you should see messages at the beginning of the log file:
-```
-Setting environment variables from repository settings
-$ export DOCKER_USERNAME=[secure]
-$ export DOCKER_PASSWORD=[secure]
-$ export GCP_BASE64_CRED=[secure]
-```
-
-Further down there would be more details about the cred itself:
-```
-Running test script testing/test_aux.sh
-Writing test results to inst/test_aux.out and inst/test_aux.gcp
-Decoding GCP_BASE64_CRED to inst/config/gcp_service_account.json
-base64 wc: 1 1 3097
-GCP service account is "daq-travis@daq-testing.iam.gserviceaccount.com"
-```
-
-If the `3097` character count is wildly off, then likely something went wrong with the newlines.
-
-### Travis Build For "External" Pull Requests
-
-Travis will not use encrypted environment variables when testing against pull requests
-from foreign github repositories, even if you've forked from another repository that you
-have full control of via Github. Travis authorization != Github authorization, even if
-you sign into Travis using Github! This is as it should be b/c security. see the following
-for more info:
-
-- https://docs.travis-ci.com/user/environment-variables/#defining-variables-in-repository-settings
-- https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions
-
-If your test is failing from a PR, you'll see something like in a similar log location:
-
-```
-Encrypted environment variables have been removed for security reasons.
-See https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions
-Setting environment variables from .travis.yml
-$ export DOCKER_STARTUP_TIMEOUT_MS=60000
-$ export DAQ_TEST=aux
-```
-
-### Other Travis Caveats
-
-Take note the URL in your browser's address bar when running Travis. You might be on either
-travis-ci.com or travis-ci.org . Any particular setup
-may end up across both sites for undertermined reasons. Please consult with your browser's
-exact URL for more clarity.
diff --git a/docs/module_test.md b/docs/module_test.md
index d337e45749..ce600c8885 100644
--- a/docs/module_test.md
+++ b/docs/module_test.md
@@ -80,7 +80,7 @@ RESULT fail security.x509
## Continous Testing
Continuous testing of module-specific builds is handled through the `testing/test_modules.sh`
-script (as invoked by Travis). Execution results are compared against the
+script (as invoked by [Github actions](https://github.com/faucetsdn/daq/actions)). Execution results are compared against the
`testing/test_modules.out` file. To add a new test, add a few lines to the top of the test script
and expected results to the output file. Every test module is required to be continously tested
somewhere, either as part of `test_modules.sh` or elsewhere.
diff --git a/docs/orchestration.md b/docs/orchestration.md
index 4804ec356f..703c5235a0 100644
--- a/docs/orchestration.md
+++ b/docs/orchestration.md
@@ -11,7 +11,7 @@ to change.
## Data Rouces
-The overal orchestration capability relies on several simple data sources:
+The overall orchestration capability relies on several simple data sources:
1. [Overall network topology](topologies.md), which indicates how the network hardware is configured.
2. [Device MUD files](../mud_files), which provide an
[IETF Standard MUD descriptor](https://datatracker.ietf.org/doc/draft-ietf-opsawg-mud/) that describes
diff --git a/docs/pubber.md b/docs/pubber.md
deleted file mode 100644
index 588396d1ac..0000000000
--- a/docs/pubber.md
+++ /dev/null
@@ -1,81 +0,0 @@
-# Pubber Reference Client
-
-The _Pubber_ reference client is a sample implementation of a client-side 'device' that implements
-the [UDMI Schema](../schemas/udmi/README.md). It's not intended to be any sort of production-worthy
-code or library, rather just a proof-of-concept of what needs to happen.
-
-## Build Pubber
-
-
-~/daq$ pubber/bin/build
-Running in /home/peringknife/daq/pubber
-
-> Task :compileJava
-…
-
-BUILD SUCCESSFUL in 2s
-2 actionable tasks: 1 executed, 1 up-to-date
-
-
-## Key Generation
-
-
-~/daq$ pubber/bin/keygen
-Generating a 2048 bit RSA private key
-............+++
-......................................+++
-writing new private key to 'local/rsa_private.pem'
------
-~/daq$ ls -l local/rsa_*
--rw-r--r-- 1 user primarygroup 1094 Nov 19 18:56 local/rsa_cert.pem
--rw------- 1 user primarygroup 1704 Nov 19 18:56 local/rsa_private.pem
--rw-r--r-- 1 user primarygroup 1216 Nov 19 18:56 local/rsa_private.pkcs8
-
-
-After generating the key pair, you'll have to upload/associate the `pubber_cert.pem` public certificate
-with the device entry in the cloud console as an _RS256_cert_. (This can be done when the device is
-created, or anytime after.)
-
-## Configuration
-
-The `local/pubber.json` file configures the key cloud parameters needed for operation
-(the actual values in the file shold match your GCP setup):
-
-~/daq$ cat local/pubber.json
-{
- "projectId": "gcp-account",
- "cloudRegion": "us-central1",
- "registryId": "sensor_hub",
- "deviceId": "AHU-1"
-}
-
-
-## Operation
-
-
-~/daq$ pubber/bin/run
-[main] INFO daq.pubber.Pubber - Reading configuration from /home/user/daq/local/pubber.json
-[main] INFO daq.pubber.Pubber - Starting instance for registry sensor_hub
-[main] INFO daq.pubber.MqttPublisher - Creating new publisher-client for GAT-001
-[main] INFO daq.pubber.MqttPublisher - Attempting connection to sensor_hub:GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Received new config daq.udmi.Message$Config@209307c7
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Starting executor with send message delay 2000
-[main] INFO daq.pubber.Pubber - synchronized start config result true
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-…
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-
-
-
-## Cloud Setup
-
-To use Pubber, there needs to be a cloud-side device entry configured in a GCP project configured to
-use [Cloud IoT](https://cloud.google.com/iot/docs/). The
-[Creating or Editing a Device](https://cloud.google.com/iot/docs/how-tos/devices#creating_or_editing_a_device)
-section of the documentation describe how to create a simple device and key-pair (see next section for
-a helper script). You can/should substitute the relevant values in the configuration below for your
-specific setup. The relevant bits of configuration are the information in the local/pubber.json
-file (see above), and the generated public key (also see above).
-
-Alternatively, you can use the [registrar tool](registrar.md) to automate device registration.
diff --git a/docs/pubsub.md b/docs/pubsub.md
deleted file mode 100644
index 8333453577..0000000000
--- a/docs/pubsub.md
+++ /dev/null
@@ -1,127 +0,0 @@
-# PubSub Setup Documentation
-
-This document describes the [GCP PubSub in Cloud IoT](https://cloud.google.com/iot-core/) mechanism for
-processing device messages. There are three major message types employed by the system:
-* Config : Messages sent from cloud-to-device that _configure_ the device (idempotent).
-* State : Messags sent from device-to-cloud reporting _state_ form the device (idempotent).
-* Events : Messages sent from device-to-cloud for streaming _events_ (non-idempotent).
-
-The exact semantic meaning of theses is determined by the underlying schema used. E.g., the
-[UDMI Schema](../schemas/udmi/README.md) specifies one set of conventions for managing IoT devices.
-
-## Validator Configuration
-
-Streaming validation validates a stream of messages pulled from a GCP PubSub topic. There are three values
-in the `local/system.conf` file required to make it work:
-* `gcp_cred`: The service account credentials, as per the general [DAQ Firebase setup](firebase.md).
-* `gcp_topic`: The _PubSub_ (not MQTT) topic name.
-* `schema_path`: Indicates which schema to validate against.
-
-You will need to add full Project Editor permissions for the service account.
-E.g., to validate messages against the UDMI schema on the `projects/gcp-account/topics/target` topic,
-there should be something like:
-
-
-~/daq$ fgrep gcp_ local/system.conf
-gcp_cred=local/gcp-account-de56aa4b1e47.json
-gcp_topic=target
-schema_path=schemas/udmi
-
-
-## Message/Schema Mapping
-
-When using the
-[GCP Cloud IoT Core MQTT Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events)
-there are multiple ways the subschema used during validation is chosen.
-* An `events` message is validated against the sub-schema indicated by the MQTT topic `subFolder`. E.g., the MQTT
-topic `/devices/{device-id}/events/pointset` will be validated against `.../pointset.json`.
-* [Device state messages](https://cloud.google.com/iot/docs/how-tos/config/getting-state#reporting_device_state)
-are validated against the `.../state.json` schema.
-* All messages have their attributes validated against the `.../attributes.json` schema. These attributes are
-automatically defined by the MQTT Client ID and Topic, so are not explicitly included in any message payload.
-* The `config` messages are artifically injected into the `target` PubSub topic by the configuration script
-(below) so they can be easily checked by the validation engine.
-
-The simple `state_shunt` function in `daq/functions/state_shunt` will automatically send state update messages
-to the `target` PubSub topic. Install this function to enable validation of state updates. (Also make sure to
-configure the Cloud IoT project to send state message to the state topic!)
-
-## Pubber Reference Client
-
-The [Pubber Reference Client](pubber.md) is a complete reference client that can be used to test out streaming
-validation in absence of a real known-working device. The basic setup and documentation listed on the Pubber
-page are assumed to be "running in the background" for the other examples in this section.
-
-## Streaming Validation
-
-Running the `bin/validate` script will will parse the configuration file and automatically start
-verifying PubSub messages against the indicated schema. Using the `pubber` client, the output
-should look something like:
-
-~/daq$ bin/validate
-Loading config from local/system.conf
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Using credentials from /home/user/daq/local/gcp-account-de56aa4b1e47.json
-Executing validator /home/user/daq/schemas/udmi pubsub:target...
-Running schema . in /home/user/daq/schemas/udmi
-Ignoring subfolders []
-Results will be uploaded to https://console.cloud.google.com/firestore/data/registries/?project=gcp-account
-Also found in such directories as /home/user/daq/schemas/udmi/out
-Connecting to pubsub topic target
-Entering pubsub message loop on projects/gcp-account/subscriptions/daq-validator
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-…
-
-
-If there are no _state_ validation messages (but there are _pointset_ ones), then the `state_shunt`
-function described above is not installed properly.
-
-## Injecting Configuration
-
-The `validator/bin/config` script can be used to inject a configuration message to a device:
-
-~/daq$ validator/bin/config GAT-001 schemas/udmi/config.tests/gateway.json
-Configuring gcp-account:us-central1:sensor_hub:GAT-001 from schemas/udmi/config.tests/gateway.json
-messageIds:
-- '301010492284043'
-Updated configuration for device [GAT-001].
-
-
-If using the `pubber` client, there should be a corresponding flury of activity:
-
-…
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Received new config daq.udmi.Message$Config@3666b3a5
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Starting executor with send message delay 2000
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-[MQTT Call: projects/gcp-account/locations/us-central1/registries/sensor_hub/devices/GAT-001] INFO daq.pubber.Pubber - Sending state message for device GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-[pool-1-thread-1] INFO daq.pubber.Pubber - Sending test message for sensor_hub/GAT-001
-…
-
-
-And an associated bit of activity in the validation output:
-
-…
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/config_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/state_GAT-001.json
-Success validating out/pointset_GAT-001.json
-Success validating out/pointset_GAT-001.json
-…
-
diff --git a/docs/registrar.md b/docs/registrar.md
deleted file mode 100644
index f63dfc43c4..0000000000
--- a/docs/registrar.md
+++ /dev/null
@@ -1,182 +0,0 @@
-# Registrar Overview
-
-The `registrar` is a utility program that registers and updates devies in Cloud IoT.
-Running `bin/registrar` will pull the necessary configuraiton values from `local/system.conf`,
-build the executable, and register/update devices.
-
-## Configuration
-
-The `local/system.conf` file should have the following parameters (in `x=y` syntax):
-* `gcp_cred`: Defines the target project and [service account](service.md) to use for configuration.
-* `site_path`: [Site-specific configuration](site_path.md) for the devices that need to be registered.
-* `schema_path`: Path to metadata schema (see the [DAQ PubSub documentation](pubsub.md) for more details/examples).
-
-The target `gcp_cred` service account will need the _Cloud IoT Provisioner_ and _Pub/Sub Publisher_ roles.
-There also needs to be an existing `registrar` topic (or as configured in `cloud_iot_config.json`, below).
-
-## Theory Of Operation
-
-* The target set of _expected_ devices is determined from directory entries in
-_{site_path}_/devices/.
-* Existing devices that are not listed in the site config are blocked (as per
-Cloud IoT device setting).
-* If a device directory does not have an appropriate key, one will be automaticaly generated.
-* Devices not found in the target registry are automatically created.
-* Existing device registy entries are unblocked and updated with the appropriate keys.
-
-## Device Settings
-
-When registering or updating a device, the Registrar manipulates a few key pieces of device
-information:
-* Auth keys: Public authentiation keys for the device.
-* Metadata: Various information about a device (e.g. site-code, location in the building).
-
-This information is sourced from a few key files:
-
-* `{site_dir}/cloud_iot_config.json`:
-Cloud project configuration parameters (`registry_id`, `cloud_region`, etc...).
-* `{site_dir}/devices/{device_id}/metadata.json`:
-Device metadata (e.g. location, key type).
-* `{site_dir}/devices/{device_id}/rsa_private.pem`:
-Generated private key for device (used on-device).
-
-## Sample Output
-
-The produced `registration_summary.json` document provides an overview of the analyzed files,
-clearly any errors that should be addressed for full spec compliance. Additionaly, an
-`errors.json`
-
-
-user@machine:~/daq$ cat local/site/cloud_iot_config.json
-{
- "cloud_region": "us-central1",
- "site_name": "SG-MBC2-B80",
- "registry_id": "iotRegistry",
- "registrar_topic": "registrar"
-}
-user@machine:~/daq$ bin/registrar daq-testing
-Activating venv
-Flattening config from local/system.yaml into inst/config/system.conf
-Note: Some input files use or override a deprecated API.
-Note: Recompile with -Xlint:deprecation for details.
-Running tools version 1.5.1-16-g9ed5861
-Using cloud project bos-daq-testing
-Using site config dir local/site
-Using schema root dir schemas/udmi
-Using device filter
-Reading Cloud IoT config from /home/user/daq/local/site/cloud_iot_config.json
-Initializing with default credentials...
-Jun 12, 2020 1:24:37 PM com.google.auth.oauth2.DefaultCredentialsProvider warnAboutProblematicCredentials
-WARNING: Your application has authenticated using end user credentials from Google Cloud SDK. We recommend that most server applications use service accounts instead. If your application continues to use end user credentials from Cloud SDK, you might receive a "quota exceeded" or "API not enabled" error. For more information about service accounts, see https://cloud.google.com/docs/authentication/.
-Created service for project bos-daq-testing
-Working with project bos-daq-testing registry iotRegistry
-Loading local device AHU-1-1
-Loading local device AHU-1-2
-Fetching remote registry iotRegistry
-Updated device entry AHU-1-1
-Sending metadata message for AHU-1-1
-WARNING: An illegal reflective access operation has occurred
-WARNING: Illegal reflective access by com.google.protobuf.UnsafeUtil (file:/home/user/daq/validator/build/libs/validator-1.0-SNAPSHOT-all.jar) to field java.nio.Buffer.address
-WARNING: Please consider reporting this to the maintainers of com.google.protobuf.UnsafeUtil
-WARNING: Use --illegal-access=warn to enable warnings of further illegal reflective access operations
-WARNING: All illegal access operations will be denied in a future release
-Updated device entry AHU-1-2
-Sending metadata message for AHU-1-2
-Processed 2 devices
-Updating local/site/devices/AHU-1-1/errors.json
-Updating local/site/devices/AHU-1-2/errors.json
-
-Summary:
- Device Envelope: 2
- Device Key: 1
- Device Validating: 2
-Out of 2 total.
-Done with PubSubPusher
-Registrar complete, exit 0
-user@machine:~/daq$ cat local/site/registration_summary.json
-{
- "Envelope" : {
- "AHU-1-1" : "java.lang.IllegalStateException: Validating envelope AHU-1-1",
- "AHU-1-2" : "java.lang.IllegalStateException: Validating envelope AHU-1-2"
- },
- "Key" : {
- "AHU-1-2" : "java.lang.RuntimeException: Duplicate credentials found for AHU-1-1 & AHU-1-2"
- },
- "Validating" : {
- "AHU-1-1" : "org.everit.json.schema.ValidationException: #: 43 schema violations found",
- "AHU-1-2" : "org.everit.json.schema.ValidationException: #: 43 schema violations found"
- }
-}
-user@machine:~/daq$ head local/site/devices/AHU-1-1/errors.json
-Exceptions for AHU-1-1
- Validating envelope AHU-1-1
- #/deviceId: string [AHU-1-1] does not match pattern ^[A-Z]{2,6}-[1-9][0-9]{0,2}$
- #: 43 schema violations found
- #/pointset/points: 40 schema violations found
- #/pointset/points/chilled_return_water_temperature_sensor/units: °C is not a valid enum value
- #/pointset/points/chilled_supply_water_temperature_sensor/units: °C is not a valid enum value
- #/pointset/points/chilled_water_valve_percentage_command/units: % is not a valid enum value
-
-
-## Sequence Diagram
-
-Expected workflow to configure a registry using Registrar:
-
-* `Device`: Target IoT Device
-* `Local`: Local clone of site configuration repo
-* `Registrar`: This utility program
-* `Registry`: Target Cloud IoT Core registry
-* `Repo`: Remote site configuration repo
-
-All operations are manaul except those involving the `Registrar` tool.
-
-
-+---------+ +-------+ +-----------+ +-----------+ +-------+
-| Device | | Local | | Registrar | | Registry | | Repo |
-+---------+ +-------+ +-----------+ +-----------+ +-------+
- | | | | |
- | | | Pull repo locally |
- | |<--------------------------------------------------------------------|
- | | ---------------------\ | | |
- | | | Run Registrar tool |-| | |
- | | |--------------------| | | |
- | | | | |
- | | Read device configs | | |
- | |-------------------------->| | |
- | | | | |
- | | | Read device list | |
- | | |<----------------------------| |
- | | | | |
- | | Write auth keys | | |
- | |<--------------------------| | |
- | | | | |
- | | | Update device entries | |
- | | |---------------------------->| |
- | | ----------------------\ | | |
- | | | Registrar tool done |-| | |
- | | |---------------------| | | |
- | | | | |
- | Install private key | | | |
- |<------------------------| | | |
- | | | | |
- | | Push changes | | |
- | |-------------------------------------------------------------------->|
- | | | | |
-
-
-### Source
-
-Use with [ASCII Sequence Diagram Creator](https://textart.io/sequence#)
-
-
-object Device Local Registrar Registry Repo
-Repo -> Local: Pull repo locally
-note left of Registrar: Run Registrar tool
-Local -> Registrar: Read device configs
-Registry -> Registrar: Read device list
-Registrar -> Local: Write auth keys
-Registrar -> Registry: Update device entries
-note left of Registrar: Registrar tool done
-Local -> Device: Install private key
-Local -> Repo: Push changes
-
diff --git a/docs/service.md b/docs/service.md
index 02bf501c79..4bace1de28 100644
--- a/docs/service.md
+++ b/docs/service.md
@@ -4,7 +4,7 @@ Many functions of DAQ require a standard GCP service account, rather than person
Once created, there's a limited set of permissions that can be granted to enable various bits
and pieces of functionality.
-Each individual install of DAQ should have it's own service account. The accound name is
+Each individual install of DAQ should have it's own service account. The account name is
assumed to be unique, and having multiple installs with the same account will cause confusion
and unpredictable results.
diff --git a/docs/soak_report.md b/docs/soak_report.md
index 7e99b37e7b..748bfeb93c 100644
--- a/docs/soak_report.md
+++ b/docs/soak_report.md
@@ -9,11 +9,12 @@ Source: local
|base.startup.dhcp|2|0|
|base.switch.ping|0|2|
|base.target.ping|2|0|
-|security.ports.nmap|2|0|
+|security.nmap.ports|2|0|
+|security.nmap.http|2|0|
|categories|pass|skip|
|---|---|---|
-|Other|6|2|
+|Other|8|2|
|missing tests|count|
|---|---|
diff --git a/docs/troubleshooting.md b/docs/troubleshooting.md
index 4a8fa1ba09..05aca2c9d6 100644
--- a/docs/troubleshooting.md
+++ b/docs/troubleshooting.md
@@ -9,7 +9,7 @@ mailing list, and use it as the primary source of troubleshooting.
email somebody directly, but will likely result in a slower response time.
* The `inst/cmdrun.log` file contains a copy of the console output from DAQ.
* This file should be attached to communications about resolving DAQ issues.
- * It's not necessary to include any assocaited `local/system.yaml` file, since the
+ * It's not necessary to include any associated `local/system.yaml` file, since the
contents of that are already included.
* Make sure everything is running properly using the internal simulation setup
before tackling anything to do with external switches or physical devices.
@@ -29,12 +29,17 @@ a summary of all test results.
* The determination of _PASS_ vs. _FAIL_ is one of policy, not a technical
consideration. If the question is "Is it OK if this tests fails or not?" then
you need to contact whomever is responsible for policy, not DAQ-proper.
- * The reports are _optionally_ available trough the _optionally_ configured
+ * The reports are _optionally_ available through the _optionally_ configured
GCP instance, but that's only relevant after the basics are working.
-* Capturing a complete zip of the `inst/` directory should encompass all the
-state neesary to diagnose/debug problems, so simply captuing that and sending
-it along would be sufficient in most cases. Be wary of file size, as `inst/`
-can collect cruft over time and occasionally need to be cleaned.
+* Running `bin/techsupport.sh` will create a zipped techsupport file that
+ contains all configuration, packet captures and runtime logs of a run.
+ Sending that file is sufficient in most cases. Be wary of file
+ size, as `inst/` might have large pcap files or older files that can be
+ trimmed to get more manageable file sizes for email attachments.
+* Unless you are developing for DAQ and want the latest code, ensure that you
+ are on the latest stable software version tracked by the git tag `release_stable`.
+* If a test run blocks or errors out, try running `bin/troubleshoot` to detect
+ some common misconfiguration and setup related issues.
## Test-Specific
@@ -82,4 +87,4 @@ directory.
* Filter results for the device's MAC address with something like:
tcpdump -en -r testing.pacp ether host de:vi:ce:ma:ca:dr .
* There is no one-size-fits-all guidance here, because what is expected is
- extremeley test-specific.
+ extremely test-specific.
diff --git a/docs/validator.md b/docs/validator.md
deleted file mode 100644
index 80ab8c4b24..0000000000
--- a/docs/validator.md
+++ /dev/null
@@ -1,152 +0,0 @@
-# Validator Setup
-
-The `validator` is a sub-component of DAQ that can be used to validate JSON files or stream against a schema
-defined by the standard [JSON Schema](https://json-schema.org/) format. The validator does not itself specify
-any policy, i.e. which schema to use when, rather just a mechanism to test and validate.
-
-The "schema set" is a configurable variable, and the system maps various events to different sub-schemas within
-that set. Direct file-based validations run against an explicitly specified sub-schema, while the dynamic PubSub
-validator dynamically chooses the sub-schema based off of message parameters. There's currently two schemas
-available, defined in the `schemas/` subdirectory:
-* `simple`, which is really just there to make sure the system works.
-* [`UDMI`](../schemas/udmi/README.md), which is a building-oriented schema for data collection.
-
-## Validation Mechanisms
-
-There are several different ways to run the validator depending on the specific objective:
-* Local File Validation
-* Integration Testing
-* PubSub Stream Validation
-
-### Local File Validation
-
-Local file validation runs the code against a set of local schemas and inputs. The example below shows
-validating one schema file against one specific test input.
-Specifying a directory, rather than a specific schema or input, will run against the entire set.
-An output file is generated that has details about the schema validation result.
-
-
-~/daq$ validator/bin/validate schemas/simple/simple.json schemas/simple/simple.tests/example.json
-Executing validator schemas/simple/simple.json schemas/simple/simple.tests/example.json...
-Running schema simple.json in /home/user/daq/schemas/simple
-Validating example.json against simple.json
-Validation complete, exit 0
-~/daq$
-
-
-### Integration Testing
-
-The `validator/bin/test` script runs a regression suite of all schemas against all tests.
-This must pass before any PR can be approved. If there is any failure, a bunch of diagnostic
-information will be included about what exactly went wrong.
-
-
-~/daq/validator$ bin/test
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Validating empty.json against config.json
-Validating errors.json against config.json
-…
-Validating example.json against state.json
-Validating error.json against simple.json
-Validating example.json against simple.json
-
-Done with validation.
-
-
-### PubSub Stream Validation
-
-Validating a live PubSub stream requires more setup, but ultimately most closely reflects what an
-actual system would be doing during operation. The [DAQ PubSub Documentation](pubsub.md) details
-how to set this up. It uses the same underlying schema files as the techniques above, but routes
-it though a live stream in the cloud.
-
-Streaming validation validates a stream of messages pulled from a GCP PubSub topic.
-There are three configuration values required in the `local/system.yaml` file to make it work:
-* `gcp_cred`: The service account credentials, as per the general [DAQ Firebase setup](firebase.md).
-* `gcp_topic`: The _PubSub_ (not MQTT) topic name.
-* `schema_path`: Indicates which schema to validate against.
-
-You will need to add full Project Editor permissions for the service account.
-E.g., to validate messages on the `projects/gcp-account/topics/telemetry` topic,
-there should be something like:
-
-
-~/daq$ fgrep gcp_ local/system.conf
-gcp_cred=local/gcp-project-ce6716521378.json
-gcp_topic=telemetry
-schema_path=schemas/abacab/
-
-
-Running `bin/validate` will parse the configuration file and automatically start
-verifying PubSub messages against the indicated schema.
-The execution output has a link to a location in the Firestore setup
-where schema results will be stored, along with a local directory of results.
-
-
-~/daq$ bin/validate
-Using credentials from /home/user/daq/local/gcp-project-ce6716521378.json
-
-BUILD SUCCESSFUL in 3s
-2 actionable tasks: 2 executed
-Executing validator /home/user/daq/validator/schemas/abacab/ pubsub:telemetry_topic...
-Running schema . in /home/user/daq/validator/schemas/abacab
-Ignoring subfolders []
-Results will be uploaded to https://console.cloud.google.com/firestore/data/registries/?project=gcp-project
-Also found in such directories as /home/user/daq/validator/schemas/abacab/out
-Connecting to pubsub topic telemetry
-Entering pubsub message loop on projects/gcp-project/subscriptions/daq-validator
-Success validating out/pointset_FCU_09_INT_NE_07.json
-Success validating out/pointset_FCU_07_EXT_SW_06.json
-Error validating out/logentry_TCE01_01_NE_Controls.json: DeviceId TCE01_01_NE_Controls must match pattern ^([a-z][_a-z0-9-]*[a-z0-9]|[A-Z][_A-Z0-9-]*[A-Z0-9])$
-Success validating out/logentry_FCU_01_NE_08.json
-Error validating out/pointset_TCE01_01_NE_Controls.json: DeviceId TCE01_01_NE_Controls must match pattern ^([a-z][_a-z0-9-]*[a-z0-9]|[A-Z][_A-Z0-9-]*[A-Z0-9])$
-Success validating out/logentry_FCU_01_SE_04.json
-…
-
-
-## Site Validation
-
-Following on from individual-device validation, it is possible to validate against an entire building model
-This is a WIP provisional feature. But, roughly speaking, it looks like this:
-
-
-~/daq$ export GOOGLE_APPLICATION_CREDENTIALS=local/essential-monkey.json
-~/daq$ validator/bin/validate schemas/udmi pubsub:topic dev site_model/
-
-
-* `schemas/udmi` is the schema to validate against.
-* `pubsub:topic` points to the pub-sub topic stream to validate.
-* `dev` is an arbitrary designator for running different clients against the same project.
-* `site_model/` is a directory containing the requisite building model.
-
-Output from a site validation run will be in `validations/metadata_report.json`.
-
-### Types and Topics
-
-When using the
-[GCP Cloud IoT Core MQTT Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events)
-there are multiple ways the subschema used during validation is chosen.
-* All messages have their attributes validated against the `.../attributes.json` schema. These attributes are
-automatically defined server-side by the MQTT Client ID and Topic, and are not explicitly included in any message payload.
-* A [device event message](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge#publishing_telemetry_events)
-is validated against the sub-schema indicated by the MQTT topic `subFolder`. E.g., the MQTT
-topic `/devices/{device-id}/events/pointset` will be validated against `.../pointset.json`.
-* [Device state messages](https://cloud.google.com/iot/docs/how-tos/config/getting-state#reporting_device_state)
-are validated against the `.../state.json` schema on `/devices/{device-id}/state` MQTT topic.
-* (There currently is no stream validation of
-[device config messages](https://cloud.google.com/iot/docs/how-tos/config/configuring-devices#mqtt), which are sent on the
-`/devices/{device-id}/config` topic.)
-
-See this handy-dandy table:
-
-| Type | Category | subFolder | MQTT Topic | Schema File |
-|----------|----------|-----------|----------------------------------------|---------------|
-| state | state | _n/a_ | `/devices/{device_id}/state` | state.json |
-| config | config | _n/a_ | `/devices/{device-id}/config` | config.json |
-| pointset | event | pointset | `/devices/{device-id}/events/pointset` | pointset.json |
-| logentry | event | logentry | `/devices/{device-id}/events/logentry` | logentry.json |
diff --git a/etc/MININET_VERSION b/etc/MININET_VERSION
new file mode 100644
index 0000000000..2357edf889
--- /dev/null
+++ b/etc/MININET_VERSION
@@ -0,0 +1 @@
+2.3.0d6
diff --git a/etc/UDMI_VERSION b/etc/UDMI_VERSION
new file mode 100644
index 0000000000..9084fa2f71
--- /dev/null
+++ b/etc/UDMI_VERSION
@@ -0,0 +1 @@
+1.1.0
diff --git a/etc/docker_images.txt b/etc/docker_images.txt
index 193e733424..f8a184f0ab 100644
--- a/etc/docker_images.txt
+++ b/etc/docker_images.txt
@@ -1,23 +1,28 @@
-daqf/aardvark 34718b2f3fd5
-daqf/default 3ac95db36ee4
-daqf/faucet 45c13344a8ed
-daqf/faux1 ecff07f12534
-daqf/faux2 39914ae11741
-daqf/gauge 1431053cf25e
-daqf/networking af56b0732100
-daqf/switch 67954aca8dce
-daqf/test_bacext 363b6d476ac8
-daqf/test_bacnet 073a0eb5529f
-daqf/test_brute 700d986d5e83
-daqf/test_discover ad34b17b41e6
-daqf/test_fail c9a7e6b43bd0
-daqf/test_hold cb120980c658
-daqf/test_macoui a828288c855b
-daqf/test_mudgee d4ed15ef1dfc
-daqf/test_nmap 78aa5def41e5
-daqf/test_pass 74167ef0df55
-daqf/test_password 471bd1290918
-daqf/test_ping 5618e0243643
-daqf/test_switch 47585fc0876e
-daqf/test_tls 9c5f28b74fed
-daqf/test_udmi fc13d4c80b0d
+daqf/aardvark 6fb0f6c52222
+daqf/default f8652a12fdd8
+daqf/faucet 1ec12d632685
+daqf/faux1 3d8f075bf6de
+daqf/faux2 58b756b90505
+daqf/gauge ace0ffe33b8f
+daqf/networking 4f25f942b538
+daqf/switch b2113d0aa5d9
+daqf/test_bacext daa0a06c718e
+daqf/test_bacnet 6a3c93c4decc
+daqf/test_brute aa76b01d5eed
+daqf/test_discover 0ca76d766349
+daqf/test_fail 8ef4103069a5
+daqf/test_hold 5c923cd1a464
+daqf/test_macoui a605473e0f8d
+daqf/test_manual 8026fdd99a5b
+daqf/test_mudgee 189aa0b635fd
+daqf/test_network 557df3ae19f9
+daqf/test_nmap 8ceb63e71c79
+daqf/test_ntp a5b21e0039e6
+daqf/test_pass 62dd10381336
+daqf/test_password d318555d2d3e
+daqf/test_ping fe8e4dd5ddc2
+daqf/test_ssh 054efbf1b3c3
+daqf/test_switch e5bb16e85362
+daqf/test_tls 17de1ebf13ce
+daqf/test_udmi b6d5381f32f0
+daqf/usi 348d54ddbb7c
diff --git a/etc/docker_images.ver b/etc/docker_images.ver
index 26ca594609..158c747293 100644
--- a/etc/docker_images.ver
+++ b/etc/docker_images.ver
@@ -1 +1 @@
-1.5.1
+1.9.5
diff --git a/firebase/functions/index.js b/firebase/functions/index.js
index f17a3d2c6c..4db0fd5e53 100644
--- a/firebase/functions/index.js
+++ b/firebase/functions/index.js
@@ -123,7 +123,7 @@ function handleTestResult(origin, siteName, message) {
const deviceDoc = originDoc.collection('device').doc(message.device_id);
const updates = [
- originDoc.set({ 'updated': timestamp }),
+ originDoc.set({ 'updated': timestamp }, { merge: true }),
siteDoc.set({ 'updated': timestamp }),
portDoc.set({ 'updated': timestamp }),
deviceDoc.set({ 'updated': timestamp })
@@ -147,7 +147,7 @@ function handleTestResult(origin, siteName, message) {
}
console.log('Test Result: ', timestamp, origin, siteName, message.port,
- message.runid, message.name, message.device_id, message.state);
+ message.runid, message.daq_run_id, message.name, message.device_id, message.state);
const runDoc = originDoc.collection('runid').doc(message.runid);
const lastDoc = originDoc.collection('last').doc(message.name);
const resultDoc = runDoc.collection('test').doc(message.name);
@@ -168,6 +168,7 @@ function handleTestResult(origin, siteName, message) {
}
return Promise.all([
runDoc.set({ 'updated': timestamp,
+ 'daq_run_id': message.daq_run_id,
'last_name': message.name
}, { merge: true }),
resultDoc.set(message),
@@ -193,17 +194,22 @@ function handleTestResult(origin, siteName, message) {
function handleHeartbeat(origin, message) {
const timestamp = new Date().toJSON();
const originDoc = db.collection('origin').doc(origin);
- console.log('heartbeat', timestamp, origin)
+ console.log('heartbeat', timestamp, origin, message)
const heartbeatDoc = originDoc.collection('runner').doc('heartbeat');
return Promise.all([
- originDoc.set({ 'updated': timestamp }),
+ originDoc.set({
+ 'updated': timestamp,
+ 'version': message.version
+ }),
heartbeatDoc.get().then((result) => {
const current = result.data();
- if (!current || !current.message || current.message.timestamp < message.timestamp)
+ const defined = current && current.message && current.message.timestamp;
+ if (!defined || current.message.timestamp < message.timestamp) {
return heartbeatDoc.set({
'updated': timestamp,
message
});
+ }
})
]);
}
diff --git a/firebase/functions/package-lock.json b/firebase/functions/package-lock.json
index 3d7970d0d1..d39919d4fe 100644
--- a/firebase/functions/package-lock.json
+++ b/firebase/functions/package-lock.json
@@ -14,79 +14,163 @@
"integrity": "sha512-88h74TMQ6wXChPA6h9Q3E1Jg6TkTHep2+k63OWg3s0ozyGVMeY+TTOti7PFPzq5RhszQPQOoCi59es4MaRvgCw=="
},
"@firebase/component": {
- "version": "0.1.13",
- "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.1.13.tgz",
- "integrity": "sha512-DuSIM96NQkE3Yo77IOa5BWw8VBdvCR5cbMLNiFT4X3dTU15Dm0zHjncQHt/6rQpABGNYWAfOCJmSU1v6vc3DFA==",
+ "version": "0.1.18",
+ "resolved": "https://registry.npmjs.org/@firebase/component/-/component-0.1.18.tgz",
+ "integrity": "sha512-c8gd1k/e0sbBTR0xkLIYUN8nVkA0zWxcXGIvdfYtGEsNw6n7kh5HkcxKXOPB8S7bcPpqZkGgBIfvd94IyG2gaQ==",
"requires": {
- "@firebase/util": "0.2.48",
- "tslib": "1.11.1"
+ "@firebase/util": "0.3.1",
+ "tslib": "^1.11.1"
}
},
"@firebase/database": {
- "version": "0.6.4",
- "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.6.4.tgz",
- "integrity": "sha512-m3jaElEEXhr3a9D+M/kbDuRCQG5EmrnSqyEq7iNk3s5ankIrALid0AYm2RZF764F/DIeMFtAzng4EyyEqsaQlQ==",
+ "version": "0.6.11",
+ "resolved": "https://registry.npmjs.org/@firebase/database/-/database-0.6.11.tgz",
+ "integrity": "sha512-QOHhB7+CdjVhEXG9CyX0roA9ARJcEuwbozz0Bix+ULuZqjQ58KUFHMH1apW6EEiUP22d/mYD7dNXsUGshjL9PA==",
"requires": {
"@firebase/auth-interop-types": "0.1.5",
- "@firebase/component": "0.1.13",
- "@firebase/database-types": "0.5.1",
- "@firebase/logger": "0.2.5",
- "@firebase/util": "0.2.48",
+ "@firebase/component": "0.1.18",
+ "@firebase/database-types": "0.5.2",
+ "@firebase/logger": "0.2.6",
+ "@firebase/util": "0.3.1",
"faye-websocket": "0.11.3",
- "tslib": "1.11.1"
+ "tslib": "^1.11.1"
}
},
"@firebase/database-types": {
- "version": "0.5.1",
- "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.5.1.tgz",
- "integrity": "sha512-onQxom1ZBYBJ648w/VNRzUewovEDAH7lvnrrpCd69ukkyrMk6rGEO/PQ9BcNEbhlNtukpsqRS0oNOFlHs0FaSA==",
+ "version": "0.5.2",
+ "resolved": "https://registry.npmjs.org/@firebase/database-types/-/database-types-0.5.2.tgz",
+ "integrity": "sha512-ap2WQOS3LKmGuVFKUghFft7RxXTyZTDr0Xd8y2aqmWsbJVjgozi0huL/EUMgTjGFrATAjcf2A7aNs8AKKZ2a8g==",
"requires": {
"@firebase/app-types": "0.6.1"
}
},
"@firebase/logger": {
- "version": "0.2.5",
- "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.2.5.tgz",
- "integrity": "sha512-qqw3m0tWs/qrg7axTZG/QZq24DIMdSY6dGoWuBn08ddq7+GLF5HiqkRj71XznYeUUbfRq5W9C/PSFnN4JxX+WA=="
+ "version": "0.2.6",
+ "resolved": "https://registry.npmjs.org/@firebase/logger/-/logger-0.2.6.tgz",
+ "integrity": "sha512-KIxcUvW/cRGWlzK9Vd2KB864HlUnCfdTH0taHE0sXW5Xl7+W68suaeau1oKNEqmc3l45azkd4NzXTCWZRZdXrw=="
},
"@firebase/util": {
- "version": "0.2.48",
- "resolved": "https://registry.npmjs.org/@firebase/util/-/util-0.2.48.tgz",
- "integrity": "sha512-6Wzq6IBF//3mrMTmTQ+JmceM0PMQpxV2GVfXhZn/4sMMkkhB0MA908nPDnatoHwUKyWE3BMw+uTLkyBnkuTu5A==",
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/@firebase/util/-/util-0.3.1.tgz",
+ "integrity": "sha512-zjVd9rfL08dRRdZILFn1RZTHb1euCcnD9N/9P56gdBcm2bvT5XsCC4G6t5toQBpE/H/jYe5h6MZMqfLu3EQLXw==",
"requires": {
- "tslib": "1.11.1"
+ "tslib": "^1.11.1"
}
},
"@google-cloud/common": {
- "version": "2.4.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-2.4.0.tgz",
- "integrity": "sha512-zWFjBS35eI9leAHhjfeOYlK5Plcuj/77EzstnrJIZbKgF/nkqjcQuGiMCpzCwOfPyUbz8ZaEOYgbHa759AKbjg==",
+ "version": "3.3.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/common/-/common-3.3.2.tgz",
+ "integrity": "sha512-W7JRLBEJWYtZQQuGQX06U6GBOSLrSrlvZxv6kGNwJtFrusu6AVgZltQ9Pajuz9Dh9aSXy9aTnBcyxn2/O0EGUw==",
"optional": true,
"requires": {
- "@google-cloud/projectify": "^1.0.0",
- "@google-cloud/promisify": "^1.0.0",
- "arrify": "^2.0.0",
- "duplexify": "^3.6.0",
+ "@google-cloud/projectify": "^2.0.0",
+ "@google-cloud/promisify": "^2.0.0",
+ "arrify": "^2.0.1",
+ "duplexify": "^4.1.1",
"ent": "^2.2.0",
"extend": "^3.0.2",
- "google-auth-library": "^5.5.0",
- "retry-request": "^4.0.0",
- "teeny-request": "^6.0.0"
- }
- },
- "@google-cloud/firestore": {
- "version": "3.8.4",
- "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-3.8.4.tgz",
- "integrity": "sha512-LCZeqB6goNKzD5G/wcoqWaQ2uf3FV/dtU5OSypqOWl+vHMTEVh1ap2H21JXaEydxq53lCayGfqjhDQzs0J3Qew==",
- "optional": true,
- "requires": {
- "deep-equal": "^2.0.0",
- "functional-red-black-tree": "^1.0.1",
- "google-gax": "^1.13.0",
- "readable-stream": "^3.4.0",
- "through2": "^3.0.0"
+ "google-auth-library": "^6.0.0",
+ "retry-request": "^4.1.1",
+ "teeny-request": "^7.0.0"
},
"dependencies": {
+ "bignumber.js": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
+ "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==",
+ "optional": true
+ },
+ "duplexify": {
+ "version": "4.1.1",
+ "resolved": "https://registry.npmjs.org/duplexify/-/duplexify-4.1.1.tgz",
+ "integrity": "sha512-DY3xVEmVHTv1wSzKNbwoU6nVjzI369Y6sPoqfYr0/xlx3IdX2n94xIszTcjPO8W8ZIv0Wb0PXNcjuZyT4wiICA==",
+ "optional": true,
+ "requires": {
+ "end-of-stream": "^1.4.1",
+ "inherits": "^2.0.3",
+ "readable-stream": "^3.1.1",
+ "stream-shift": "^1.0.0"
+ }
+ },
+ "gaxios": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
+ "optional": true,
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
+ "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.0.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
+ "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
+ "optional": true,
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^3.0.0",
+ "gcp-metadata": "^4.1.0",
+ "gtoken": "^5.0.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
+ "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
+ "optional": true,
+ "requires": {
+ "node-forge": "^0.9.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
+ "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "google-p12-pem": "^3.0.0",
+ "jws": "^4.0.0",
+ "mime": "^2.2.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "optional": true,
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "optional": true,
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
"readable-stream": {
"version": "3.6.0",
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
@@ -97,6 +181,200 @@
"string_decoder": "^1.1.1",
"util-deprecate": "^1.0.1"
}
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "optional": true
+ }
+ }
+ },
+ "@google-cloud/firestore": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/firestore/-/firestore-4.2.0.tgz",
+ "integrity": "sha512-YCiKaTYCbXSoEvZ8cTmpgg4ebAvmFUOu3hj/aX+lHiOK7LsoFVi4jgNknogSqIiv04bxAysTBodpgn8XoZ4l5g==",
+ "optional": true,
+ "requires": {
+ "fast-deep-equal": "^3.1.1",
+ "functional-red-black-tree": "^1.0.1",
+ "google-gax": "^2.2.0"
+ },
+ "dependencies": {
+ "@grpc/grpc-js": {
+ "version": "1.1.5",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.1.5.tgz",
+ "integrity": "sha512-2huf5z85TdZI4nLmJQ9Zdfd+6vmIyBDs7B4L71bTaHKA9pRsGKAH24XaktMk/xneKJIqAgeIZtg1cyivVZtvrg==",
+ "optional": true,
+ "requires": {
+ "@grpc/proto-loader": "^0.6.0-pre14",
+ "@types/node": "^12.12.47",
+ "google-auth-library": "^6.0.0",
+ "semver": "^6.2.0"
+ },
+ "dependencies": {
+ "@grpc/proto-loader": {
+ "version": "0.6.0-pre9",
+ "resolved": "https://registry.npmjs.org/@grpc/proto-loader/-/proto-loader-0.6.0-pre9.tgz",
+ "integrity": "sha512-oM+LjpEjNzW5pNJjt4/hq1HYayNeQT+eGrOPABJnYHv7TyNPDNzkQ76rDYZF86X5swJOa4EujEMzQ9iiTdPgww==",
+ "optional": true,
+ "requires": {
+ "@types/long": "^4.0.1",
+ "lodash.camelcase": "^4.3.0",
+ "long": "^4.0.0",
+ "protobufjs": "^6.9.0",
+ "yargs": "^15.3.1"
+ }
+ }
+ }
+ },
+ "@types/node": {
+ "version": "12.12.54",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-12.12.54.tgz",
+ "integrity": "sha512-ge4xZ3vSBornVYlDnk7yZ0gK6ChHf/CHB7Gl1I0Jhah8DDnEQqBzgohYG4FX4p81TNirSETOiSyn+y1r9/IR6w==",
+ "optional": true
+ },
+ "bignumber.js": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
+ "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==",
+ "optional": true
+ },
+ "gaxios": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
+ "optional": true,
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
+ "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.0.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
+ "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
+ "optional": true,
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^3.0.0",
+ "gcp-metadata": "^4.1.0",
+ "gtoken": "^5.0.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-gax": {
+ "version": "2.7.0",
+ "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.7.0.tgz",
+ "integrity": "sha512-0dBATy8mMVlfOBrT85Q+NzBpZ4OJZUMrPI9wJULpiIDq2w1zlN30Duor+fQUcMEjanYEc72G58M4iUVve0jfXw==",
+ "optional": true,
+ "requires": {
+ "@grpc/grpc-js": "~1.1.1",
+ "@grpc/proto-loader": "^0.5.1",
+ "@types/long": "^4.0.0",
+ "abort-controller": "^3.0.0",
+ "duplexify": "^3.6.0",
+ "google-auth-library": "^6.0.0",
+ "is-stream-ended": "^0.1.4",
+ "lodash.at": "^4.6.0",
+ "lodash.has": "^4.5.2",
+ "node-fetch": "^2.6.0",
+ "protobufjs": "^6.9.0",
+ "retry-request": "^4.0.0",
+ "semver": "^6.0.0",
+ "walkdir": "^0.4.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
+ "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
+ "optional": true,
+ "requires": {
+ "node-forge": "^0.9.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
+ "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "google-p12-pem": "^3.0.0",
+ "jws": "^4.0.0",
+ "mime": "^2.2.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "optional": true,
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "optional": true,
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "protobufjs": {
+ "version": "6.10.1",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz",
+ "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==",
+ "optional": true,
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.1",
+ "@types/node": "^13.7.0",
+ "long": "^4.0.0"
+ },
+ "dependencies": {
+ "@types/node": {
+ "version": "13.13.15",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-13.13.15.tgz",
+ "integrity": "sha512-kwbcs0jySLxzLsa2nWUAGOd/s21WU1jebrEdtzhsj1D4Yps1EOuyI1Qcu+FD56dL7NRNIJtDDjcqIG22NwkgLw==",
+ "optional": true
+ }
+ }
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "optional": true
}
}
},
@@ -109,85 +387,68 @@
}
},
"@google-cloud/paginator": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-2.0.3.tgz",
- "integrity": "sha512-kp/pkb2p/p0d8/SKUu4mOq8+HGwF8NPzHWkj+VKrIPQPyMRw8deZtrO/OcSiy9C/7bpfU5Txah5ltUNfPkgEXg==",
- "optional": true,
+ "version": "3.0.4",
+ "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.4.tgz",
+ "integrity": "sha512-fKI+jYQdV1F9jtG6tSRro3ilNSeBWVmTzxc8Z0kiPRXcj8eshh9fiF8TtxfDefyUKgTdWgHpzGBwLbZ/OGikJg==",
"requires": {
"arrify": "^2.0.0",
"extend": "^3.0.2"
}
},
"@google-cloud/precise-date": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/@google-cloud/precise-date/-/precise-date-2.0.1.tgz",
- "integrity": "sha512-uXrLK/1rYx6pWNHL5U8NurHwmqLX7CwDFuJtRoaZe9lhe8RU7AJS67CMsMvHB0OziCcBAiKdAFzHm9zljI2nKQ=="
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@google-cloud/precise-date/-/precise-date-2.0.3.tgz",
+ "integrity": "sha512-+SDJ3ZvGkF7hzo6BGa8ZqeK3F6Z4+S+KviC9oOK+XCs3tfMyJCh/4j93XIWINgMMDIh9BgEvlw4306VxlXIlYA=="
},
"@google-cloud/projectify": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-1.0.4.tgz",
- "integrity": "sha512-ZdzQUN02eRsmTKfBj9FDL0KNDIFNjBn/d6tHQmA/+FImH5DO6ZV8E7FzxMgAUiVAUq41RFAkb25p1oHOZ8psfg==",
- "optional": true
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.1.tgz",
+ "integrity": "sha512-ZDG38U/Yy6Zr21LaR3BTiiLtpJl6RkPS/JwoRT453G+6Q1DhlV0waNf8Lfu+YVYGIIxgKnLayJRfYlFJfiI8iQ=="
},
"@google-cloud/promisify": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-1.0.4.tgz",
- "integrity": "sha512-VccZDcOql77obTnFh0TbNED/6ZbbmHDf8UMNnzO1d5g9V0Htfm4k5cllY8P1tJsRKC3zWYGRLaViiupcgVjBoQ==",
- "optional": true
+ "version": "2.0.2",
+ "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.2.tgz",
+ "integrity": "sha512-EvuabjzzZ9E2+OaYf+7P9OAiiwbTxKYL0oGLnREQd+Su2NTQBpomkdlkBowFvyWsaV0d1sSGxrKpSNcrhPqbxg=="
},
"@google-cloud/pubsub": {
- "version": "2.1.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/pubsub/-/pubsub-2.1.0.tgz",
- "integrity": "sha512-9k4ucPR4X9/BKu1ht9RfXAqGpQzLZOGYpGgoq9Cnxhp9SDjAXkgIKN02pYCXZDdoLng25Mf+xkMnc3AfzJimnA==",
+ "version": "2.5.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/pubsub/-/pubsub-2.5.0.tgz",
+ "integrity": "sha512-7bbbQqa+LSTopVjt20EZ8maO6rEpbO7v8EvDImHMsbRS30HJ5+kClbaQTRvhNzhc1qy221A1GbHPHMCQ/U5E3Q==",
"requires": {
"@google-cloud/paginator": "^3.0.0",
"@google-cloud/precise-date": "^2.0.0",
"@google-cloud/projectify": "^2.0.0",
"@google-cloud/promisify": "^2.0.0",
+ "@opentelemetry/api": "^0.10.0",
+ "@opentelemetry/tracing": "^0.10.0",
"@types/duplexify": "^3.6.0",
"@types/long": "^4.0.0",
"arrify": "^2.0.0",
"extend": "^3.0.2",
"google-auth-library": "^6.0.0",
- "google-gax": "^2.1.0",
+ "google-gax": "^2.7.0",
"is-stream-ended": "^0.1.4",
"lodash.snakecase": "^4.1.1",
- "p-defer": "^3.0.0",
- "protobufjs": "^6.8.1"
+ "p-defer": "^3.0.0"
},
"dependencies": {
- "@google-cloud/paginator": {
- "version": "3.0.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/paginator/-/paginator-3.0.0.tgz",
- "integrity": "sha512-iPdxTujlZQlMGNLHPtYoVwRu8IuLFr6y0GJwsX9hKULMgqGXrP/z0MV4ROGpRAkNE1FIfa1aDfNlwZHfF2z4bQ==",
- "requires": {
- "arrify": "^2.0.0",
- "extend": "^3.0.2"
- }
- },
- "@google-cloud/projectify": {
- "version": "2.0.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/projectify/-/projectify-2.0.0.tgz",
- "integrity": "sha512-7wZ+m4N3Imtb5afOPfqNFyj9cKrlfVQ+t5YRxLS7tUpn8Pn/i7QuVubZRTXllaWjO4T5t/gm/r2x7oy5ajjvFQ=="
- },
- "@google-cloud/promisify": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/@google-cloud/promisify/-/promisify-2.0.1.tgz",
- "integrity": "sha512-82EQzwrNauw1fkbUSr3f+50Bcq7g4h0XvLOk8C5e9ABkXYHei7ZPi9tiMMD7Vh3SfcdH97d1ibJ3KBWp2o1J+w=="
- },
"@grpc/grpc-js": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.0.4.tgz",
- "integrity": "sha512-Qawt6HUrEmljQMPWnLnIXpcjelmtIAydi3M9awiG02WWJ1CmIvFEx4IOC1EsWUWUlabOGksRbpfvoIeZKFTNXw==",
+ "version": "1.1.3",
+ "resolved": "https://registry.npmjs.org/@grpc/grpc-js/-/grpc-js-1.1.3.tgz",
+ "integrity": "sha512-HtOsk2YUofBcm1GkPqGzb6pwHhv+74eC2CUO229USIDKRtg30ycbZmqC+HdNtY3nHqoc9IgcRlntFgopyQoYCA==",
"requires": {
- "google-auth-library": "^6.0.0",
"semver": "^6.2.0"
}
},
+ "bignumber.js": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
+ "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A=="
+ },
"gaxios": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz",
- "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==",
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
"requires": {
"abort-controller": "^3.0.0",
"extend": "^3.0.2",
@@ -197,18 +458,18 @@
}
},
"gcp-metadata": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.0.tgz",
- "integrity": "sha512-r57SV28+olVsflPlKyVig3Muo/VDlcsObMtvDGOEtEJXj+DDE8bEl0coIkXh//hbkSDTvo+f5lbihZOndYXQQQ==",
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
+ "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
"requires": {
"gaxios": "^3.0.0",
- "json-bigint": "^0.3.0"
+ "json-bigint": "^1.0.0"
}
},
"google-auth-library": {
- "version": "6.0.1",
- "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.1.tgz",
- "integrity": "sha512-NWEM9W0o+fmUJMK/wEuJ1vAc8H/JAseOWB8tjOAAkz8yobU+5IDtO/rPCbbRwFF1obIOCe0lj1pkq9ld2OFZeg==",
+ "version": "6.0.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
+ "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
"requires": {
"arrify": "^2.0.0",
"base64-js": "^1.3.0",
@@ -218,15 +479,15 @@
"gcp-metadata": "^4.1.0",
"gtoken": "^5.0.0",
"jws": "^4.0.0",
- "lru-cache": "^5.0.0"
+ "lru-cache": "^6.0.0"
}
},
"google-gax": {
- "version": "2.5.0",
- "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.5.0.tgz",
- "integrity": "sha512-Xqh+rinq93qSGOcs5aQdlrwBUR+/9AaFArLCvSGnx7Mye9p4u0dC98r2TO7wB4m1W138Swd6UPYGQyBg9BM/4g==",
+ "version": "2.7.0",
+ "resolved": "https://registry.npmjs.org/google-gax/-/google-gax-2.7.0.tgz",
+ "integrity": "sha512-0dBATy8mMVlfOBrT85Q+NzBpZ4OJZUMrPI9wJULpiIDq2w1zlN30Duor+fQUcMEjanYEc72G58M4iUVve0jfXw==",
"requires": {
- "@grpc/grpc-js": "~1.0.0",
+ "@grpc/grpc-js": "~1.1.1",
"@grpc/proto-loader": "^0.5.1",
"@types/long": "^4.0.0",
"abort-controller": "^3.0.0",
@@ -240,85 +501,102 @@
"retry-request": "^4.0.0",
"semver": "^6.0.0",
"walkdir": "^0.4.0"
- },
- "dependencies": {
- "protobufjs": {
- "version": "6.9.0",
- "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.9.0.tgz",
- "integrity": "sha512-LlGVfEWDXoI/STstRDdZZKb/qusoAWUnmLg9R8OLSO473mBLWHowx8clbX5/+mKDEI+v7GzjoK9tRPZMMcoTrg==",
- "requires": {
- "@protobufjs/aspromise": "^1.1.2",
- "@protobufjs/base64": "^1.1.2",
- "@protobufjs/codegen": "^2.0.4",
- "@protobufjs/eventemitter": "^1.1.0",
- "@protobufjs/fetch": "^1.1.0",
- "@protobufjs/float": "^1.0.2",
- "@protobufjs/inquire": "^1.1.0",
- "@protobufjs/path": "^1.1.2",
- "@protobufjs/pool": "^1.1.0",
- "@protobufjs/utf8": "^1.1.0",
- "@types/long": "^4.0.1",
- "@types/node": "^13.7.0",
- "long": "^4.0.0"
- }
- }
}
},
"google-p12-pem": {
- "version": "3.0.1",
- "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.1.tgz",
- "integrity": "sha512-VlQgtozgNVVVcYTXS36eQz4PXPt9gIPqLOhHN0QiV6W6h4qSCNVKPtKC5INtJsaHHF2r7+nOIa26MJeJMTaZEQ==",
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
+ "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
"requires": {
"node-forge": "^0.9.0"
}
},
"gtoken": {
- "version": "5.0.1",
- "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.1.tgz",
- "integrity": "sha512-33w4FNDkUcyIOq/TqyC+drnKdI4PdXmWp9lZzssyEQKuvu9ZFN3KttaSnDKo52U3E51oujVGop93mKxmqO8HHg==",
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
+ "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
"requires": {
"gaxios": "^3.0.0",
"google-p12-pem": "^3.0.0",
"jws": "^4.0.0",
"mime": "^2.2.0"
}
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "protobufjs": {
+ "version": "6.10.1",
+ "resolved": "https://registry.npmjs.org/protobufjs/-/protobufjs-6.10.1.tgz",
+ "integrity": "sha512-pb8kTchL+1Ceg4lFd5XUpK8PdWacbvV5SK2ULH2ebrYtl4GjJmS24m6CKME67jzV53tbJxHlnNOSqQHbTsR9JQ==",
+ "requires": {
+ "@protobufjs/aspromise": "^1.1.2",
+ "@protobufjs/base64": "^1.1.2",
+ "@protobufjs/codegen": "^2.0.4",
+ "@protobufjs/eventemitter": "^1.1.0",
+ "@protobufjs/fetch": "^1.1.0",
+ "@protobufjs/float": "^1.0.2",
+ "@protobufjs/inquire": "^1.1.0",
+ "@protobufjs/path": "^1.1.2",
+ "@protobufjs/pool": "^1.1.0",
+ "@protobufjs/utf8": "^1.1.0",
+ "@types/long": "^4.0.1",
+ "@types/node": "^13.7.0",
+ "long": "^4.0.0"
+ }
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
}
}
},
"@google-cloud/storage": {
- "version": "4.7.0",
- "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-4.7.0.tgz",
- "integrity": "sha512-f0guAlbeg7Z0m3gKjCfBCu7FG9qS3M3oL5OQQxlvGoPtK7/qg3+W+KQV73O2/sbuS54n0Kh2mvT5K2FWzF5vVQ==",
+ "version": "5.2.0",
+ "resolved": "https://registry.npmjs.org/@google-cloud/storage/-/storage-5.2.0.tgz",
+ "integrity": "sha512-zxHXZajtVA0Qx9IOnDUDb76mtKn5M20LKV/phmnVos7foozG9YZ6yYod90pRC/GgP3eOgxNYdt6KQcapssPsFw==",
"optional": true,
"requires": {
- "@google-cloud/common": "^2.1.1",
- "@google-cloud/paginator": "^2.0.0",
- "@google-cloud/promisify": "^1.0.0",
+ "@google-cloud/common": "^3.3.0",
+ "@google-cloud/paginator": "^3.0.0",
+ "@google-cloud/promisify": "^2.0.0",
"arrify": "^2.0.0",
"compressible": "^2.0.12",
"concat-stream": "^2.0.0",
- "date-and-time": "^0.13.0",
+ "date-and-time": "^0.14.0",
"duplexify": "^3.5.0",
"extend": "^3.0.2",
"gaxios": "^3.0.0",
- "gcs-resumable-upload": "^2.2.4",
+ "gcs-resumable-upload": "^3.1.0",
"hash-stream-validation": "^0.2.2",
"mime": "^2.2.0",
"mime-types": "^2.0.8",
"onetime": "^5.1.0",
- "p-limit": "^2.2.0",
+ "p-limit": "^3.0.1",
"pumpify": "^2.0.0",
- "readable-stream": "^3.4.0",
"snakeize": "^0.1.0",
"stream-events": "^1.0.1",
- "through2": "^3.0.0",
"xdg-basedir": "^4.0.0"
},
"dependencies": {
"gaxios": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.0.3.tgz",
- "integrity": "sha512-PkzQludeIFhd535/yucALT/Wxyj/y2zLyrMwPcJmnLHDugmV49NvAi/vb+VUq/eWztATZCNcb8ue+ywPG+oLuw==",
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
"optional": true,
"requires": {
"abort-controller": "^3.0.0",
@@ -328,15 +606,13 @@
"node-fetch": "^2.3.0"
}
},
- "readable-stream": {
- "version": "3.6.0",
- "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
- "integrity": "sha512-BViHy7LKeTz4oNnkcLJ+lVSL6vpiFeX6/d3oSH8zCW7UxP2onchk+vTGB143xuFjHS3deTgkKoXXymXqymiIdA==",
+ "p-limit": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.0.2.tgz",
+ "integrity": "sha512-iwqZSOoWIW+Ew4kAGUlN16J4M7OB3ysMLSZtnhmqx7njIHFPlxWBX8xo3lVTyFVq6mI/lL9qt2IsN1sHwaxJkg==",
"optional": true,
"requires": {
- "inherits": "^2.0.3",
- "string_decoder": "^1.1.1",
- "util-deprecate": "^1.0.1"
+ "p-try": "^2.0.0"
}
}
}
@@ -358,6 +634,57 @@
"protobufjs": "^6.8.6"
}
},
+ "@opentelemetry/api": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-0.10.2.tgz",
+ "integrity": "sha512-GtpMGd6vkzDMYcpu2t9LlhEgMy/SzBwRnz48EejlRArYqZzqSzAsKmegUK7zHgl+EOIaK9mKHhnRaQu3qw20cA==",
+ "requires": {
+ "@opentelemetry/context-base": "^0.10.2"
+ }
+ },
+ "@opentelemetry/context-base": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/context-base/-/context-base-0.10.2.tgz",
+ "integrity": "sha512-hZNKjKOYsckoOEgBziGMnBcX0M7EtstnCmwz5jZUOUYwlZ+/xxX6z3jPu1XVO2Jivk0eLfuP9GP+vFD49CMetw=="
+ },
+ "@opentelemetry/core": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-0.10.2.tgz",
+ "integrity": "sha512-DhkiTp5eje2zTGd+HAIKWpGE6IR6lq7tUpYt4nnkhOi6Hq9WQAANVDCWEZEbYOw57LkdXbE50FZ/kMvHDm450Q==",
+ "requires": {
+ "@opentelemetry/api": "^0.10.2",
+ "@opentelemetry/context-base": "^0.10.2",
+ "semver": "^7.1.3"
+ },
+ "dependencies": {
+ "semver": {
+ "version": "7.3.2",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.2.tgz",
+ "integrity": "sha512-OrOb32TeeambH6UrhtShmF7CRDqhL6/5XpPNp2DuRH6+9QLw/orhp72j87v8Qa1ScDkvrrBNpZcDejAirJmfXQ=="
+ }
+ }
+ },
+ "@opentelemetry/resources": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-0.10.2.tgz",
+ "integrity": "sha512-5JGC2TPSAIHth615IURt+sSsTljY43zTfJD0JE9PHC6ipZPiQ0dpQDZOrLn8NAMfOHY1jeWwpIuLASjqbXUfuw==",
+ "requires": {
+ "@opentelemetry/api": "^0.10.2",
+ "@opentelemetry/core": "^0.10.2",
+ "gcp-metadata": "^3.5.0"
+ }
+ },
+ "@opentelemetry/tracing": {
+ "version": "0.10.2",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/tracing/-/tracing-0.10.2.tgz",
+ "integrity": "sha512-mNAhARn4dEdOjTa9OdysjI4fRHMbvr4YSbPuH7jhkyPzgoa+DnvnbY3GGpEay6kpuYJsrW8Ef9OIKAV/GndhbQ==",
+ "requires": {
+ "@opentelemetry/api": "^0.10.2",
+ "@opentelemetry/context-base": "^0.10.2",
+ "@opentelemetry/core": "^0.10.2",
+ "@opentelemetry/resources": "^0.10.2"
+ }
+ },
"@protobufjs/aspromise": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/@protobufjs/aspromise/-/aspromise-1.1.2.tgz",
@@ -427,6 +754,12 @@
"@types/node": "*"
}
},
+ "@types/color-name": {
+ "version": "1.1.1",
+ "resolved": "https://registry.npmjs.org/@types/color-name/-/color-name-1.1.1.tgz",
+ "integrity": "sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ==",
+ "optional": true
+ },
"@types/connect": {
"version": "3.4.33",
"resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.33.tgz",
@@ -454,9 +787,9 @@
}
},
"@types/express-serve-static-core": {
- "version": "4.17.7",
- "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.7.tgz",
- "integrity": "sha512-EMgTj/DF9qpgLXyc+Btimg+XoH7A2liE8uKul8qSmMTHCeNYzydDKFdsJskDvw42UsesCnhO63dO0Grbj8J4Dw==",
+ "version": "4.17.9",
+ "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.9.tgz",
+ "integrity": "sha512-DG0BYg6yO+ePW+XoDENYz8zhNGC3jDDEpComMYn7WJc4mY1Us8Rw9ax2YhJXxpyk2SF47PQAoQ0YyVT1a0bEkA==",
"requires": {
"@types/node": "*",
"@types/qs": "*",
@@ -477,9 +810,9 @@
"integrity": "sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w=="
},
"@types/mime": {
- "version": "2.0.2",
- "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.2.tgz",
- "integrity": "sha512-4kPlzbljFcsttWEq6aBW0OZe6BDajAmyvr2xknBG92tejQnvdGtT9+kXSZ580DqpxY9qG2xeQVF9Dq0ymUTo5Q=="
+ "version": "2.0.3",
+ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-2.0.3.tgz",
+ "integrity": "sha512-Jus9s4CDbqwocc5pOAnh8ShfrnMcPHuJYzVcSUU7lrh8Ni5HuIqX3oilL86p3dlTrk0LzHRCgA/GQ7uNCw6l2Q=="
},
"@types/node": {
"version": "13.9.5",
@@ -487,9 +820,9 @@
"integrity": "sha512-hkzMMD3xu6BrJpGVLeQ3htQQNAcOrJjX7WFmtK8zWQpz2UJf13LCFF2ALA7c9OVdvc2vQJeDdjfR35M0sBCxvw=="
},
"@types/qs": {
- "version": "6.9.3",
- "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.3.tgz",
- "integrity": "sha512-7s9EQWupR1fTc2pSMtXRQ9w9gLOcrJn+h7HOXw4evxyvVqMi4f+q7d2tnFe3ng3SNHjtK+0EzGMGFUQX4/AQRA=="
+ "version": "6.9.4",
+ "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.4.tgz",
+ "integrity": "sha512-+wYo+L6ZF6BMoEjtf8zB2esQsqdV6WsjRK/GP9WOgLPrq87PbNWgIxS76dS5uvl/QXtHGakZmwTznIfcPXcKlQ=="
},
"@types/range-parser": {
"version": "1.2.3",
@@ -497,9 +830,9 @@
"integrity": "sha512-ewFXqrQHlFsgc09MK5jP5iR7vumV/BYayNC6PgJO2LPe8vrnNFyjQjSppfEngITi0qvfKtzFvgKymGheFM9UOA=="
},
"@types/serve-static": {
- "version": "1.13.4",
- "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.4.tgz",
- "integrity": "sha512-jTDt0o/YbpNwZbQmE/+2e+lfjJEJJR0I3OFaKQKPWkASkCoW3i6fsUnqudSMcNAfbtmADGu8f4MV4q+GqULmug==",
+ "version": "1.13.5",
+ "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.13.5.tgz",
+ "integrity": "sha512-6M64P58N+OXjU432WoLLBQxbA0LRGBCRm7aAGQJ+SMC1IMl0dgRVi9EFfoDcS2a7Xogygk/eGN94CfwU9UF7UQ==",
"requires": {
"@types/express-serve-static-core": "*",
"@types/mime": "*"
@@ -530,12 +863,22 @@
"debug": "4"
}
},
- "array-filter": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/array-filter/-/array-filter-1.0.0.tgz",
- "integrity": "sha1-uveeYubvTCpMC4MSMtr/7CUfnYM=",
+ "ansi-regex": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
+ "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
"optional": true
},
+ "ansi-styles": {
+ "version": "4.2.1",
+ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
+ "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
+ "optional": true,
+ "requires": {
+ "@types/color-name": "^1.1.1",
+ "color-convert": "^2.0.1"
+ }
+ },
"array-flatten": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/array-flatten/-/array-flatten-1.1.1.tgz",
@@ -543,17 +886,8 @@
},
"arrify": {
"version": "2.0.1",
- "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
- "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug=="
- },
- "available-typed-arrays": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.2.tgz",
- "integrity": "sha512-XWX3OX8Onv97LMk/ftVyBibpGwY5a8SmuxZPzeOxqmuEqUCOM9ZE+uIaD1VNJ5QnvU2UQusvmKbuM1FR8QWGfQ==",
- "optional": true,
- "requires": {
- "array-filter": "^1.0.0"
- }
+ "resolved": "https://registry.npmjs.org/arrify/-/arrify-2.0.1.tgz",
+ "integrity": "sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug=="
},
"base64-js": {
"version": "1.3.1",
@@ -613,6 +947,38 @@
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.0.tgz",
"integrity": "sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg=="
},
+ "camelcase": {
+ "version": "5.3.1",
+ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
+ "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==",
+ "optional": true
+ },
+ "cliui": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
+ "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
+ "optional": true,
+ "requires": {
+ "string-width": "^4.2.0",
+ "strip-ansi": "^6.0.0",
+ "wrap-ansi": "^6.2.0"
+ }
+ },
+ "color-convert": {
+ "version": "2.0.1",
+ "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+ "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+ "optional": true,
+ "requires": {
+ "color-name": "~1.1.4"
+ }
+ },
+ "color-name": {
+ "version": "1.1.4",
+ "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+ "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+ "optional": true
+ },
"compressible": {
"version": "2.0.18",
"resolved": "https://registry.npmjs.org/compressible/-/compressible-2.0.18.tgz",
@@ -712,9 +1078,9 @@
"optional": true
},
"date-and-time": {
- "version": "0.13.1",
- "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.13.1.tgz",
- "integrity": "sha512-/Uge9DJAT+s+oAcDxtBhyR8+sKjUnZbYmyhbmWjTHNtX7B7oWD8YyYdeXcBRbwSj6hVvj+IQegJam7m7czhbFw==",
+ "version": "0.14.0",
+ "resolved": "https://registry.npmjs.org/date-and-time/-/date-and-time-0.14.0.tgz",
+ "integrity": "sha512-0wY8b90XjQkRxv3XGT8k1ffyDQOf4+T+2hiWp7rwYgoEn8OyYDsHZdnVrPlzxbwjLUY66mVBXr59eKOwpSV7lw==",
"optional": true
},
"debug": {
@@ -725,43 +1091,11 @@
"ms": "^2.1.1"
}
},
- "deep-equal": {
- "version": "2.0.3",
- "resolved": "https://registry.npmjs.org/deep-equal/-/deep-equal-2.0.3.tgz",
- "integrity": "sha512-Spqdl4H+ky45I9ByyJtXteOm9CaIrPmnIPmOhrkKGNYWeDgCvJ8jNYVCTjChxW4FqGuZnLHADc8EKRMX6+CgvA==",
- "optional": true,
- "requires": {
- "es-abstract": "^1.17.5",
- "es-get-iterator": "^1.1.0",
- "is-arguments": "^1.0.4",
- "is-date-object": "^1.0.2",
- "is-regex": "^1.0.5",
- "isarray": "^2.0.5",
- "object-is": "^1.1.2",
- "object-keys": "^1.1.1",
- "object.assign": "^4.1.0",
- "regexp.prototype.flags": "^1.3.0",
- "side-channel": "^1.0.2",
- "which-boxed-primitive": "^1.0.1",
- "which-collection": "^1.0.1",
- "which-typed-array": "^1.1.2"
- },
- "dependencies": {
- "isarray": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
- "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
- "optional": true
- }
- }
- },
- "define-properties": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/define-properties/-/define-properties-1.1.3.tgz",
- "integrity": "sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ==",
- "requires": {
- "object-keys": "^1.0.12"
- }
+ "decamelize": {
+ "version": "1.2.0",
+ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
+ "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=",
+ "optional": true
},
"depd": {
"version": "1.1.2",
@@ -814,6 +1148,12 @@
"resolved": "https://registry.npmjs.org/ee-first/-/ee-first-1.1.1.tgz",
"integrity": "sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0="
},
+ "emoji-regex": {
+ "version": "8.0.0",
+ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
+ "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
+ "optional": true
+ },
"encodeurl": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-1.0.2.tgz",
@@ -833,57 +1173,6 @@
"integrity": "sha1-6WQhkyWiHQX0RGai9obtbOX13R0=",
"optional": true
},
- "es-abstract": {
- "version": "1.17.5",
- "resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.17.5.tgz",
- "integrity": "sha512-BR9auzDbySxOcfog0tLECW8l28eRGpDpU3Dm3Hp4q/N+VtLTmyj4EUN088XZWQDW/hzj6sYRDXeOFsaAODKvpg==",
- "requires": {
- "es-to-primitive": "^1.2.1",
- "function-bind": "^1.1.1",
- "has": "^1.0.3",
- "has-symbols": "^1.0.1",
- "is-callable": "^1.1.5",
- "is-regex": "^1.0.5",
- "object-inspect": "^1.7.0",
- "object-keys": "^1.1.1",
- "object.assign": "^4.1.0",
- "string.prototype.trimleft": "^2.1.1",
- "string.prototype.trimright": "^2.1.1"
- }
- },
- "es-get-iterator": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/es-get-iterator/-/es-get-iterator-1.1.0.tgz",
- "integrity": "sha512-UfrmHuWQlNMTs35e1ypnvikg6jCz3SK8v8ImvmDsh36fCVUR1MqoFDiyn0/k52C8NqO3YsO8Oe0azeesNuqSsQ==",
- "optional": true,
- "requires": {
- "es-abstract": "^1.17.4",
- "has-symbols": "^1.0.1",
- "is-arguments": "^1.0.4",
- "is-map": "^2.0.1",
- "is-set": "^2.0.1",
- "is-string": "^1.0.5",
- "isarray": "^2.0.5"
- },
- "dependencies": {
- "isarray": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/isarray/-/isarray-2.0.5.tgz",
- "integrity": "sha512-xHjhDr3cNBK0BzdUJSPXZntQUx/mwMS5Rw4A7lPJ90XGAO6ISP/ePDNuo0vhqOZU+UD5JoodwCAAoZQd3FeAKw==",
- "optional": true
- }
- }
- },
- "es-to-primitive": {
- "version": "1.2.1",
- "resolved": "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz",
- "integrity": "sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA==",
- "requires": {
- "is-callable": "^1.1.4",
- "is-date-object": "^1.0.1",
- "is-symbol": "^1.0.2"
- }
- },
"escape-html": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/escape-html/-/escape-html-1.0.3.tgz",
@@ -961,6 +1250,12 @@
"resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz",
"integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g=="
},
+ "fast-deep-equal": {
+ "version": "3.1.3",
+ "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz",
+ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==",
+ "optional": true
+ },
"fast-text-encoding": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/fast-text-encoding/-/fast-text-encoding-1.0.1.tgz",
@@ -1003,36 +1298,42 @@
}
}
},
+ "find-up": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
+ "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
+ "optional": true,
+ "requires": {
+ "locate-path": "^5.0.0",
+ "path-exists": "^4.0.0"
+ }
+ },
"firebase-admin": {
- "version": "8.12.1",
- "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-8.12.1.tgz",
- "integrity": "sha512-DZ4Q7QQJYaO2BhnhZLrhL+mGRTCLS5WrxjbJtuKGmbKRBepwMhx++EQA5yhnGnIXgDHnp5SrZnVKygNdXtH8BQ==",
- "requires": {
- "@firebase/database": "^0.6.0",
- "@google-cloud/firestore": "^3.0.0",
- "@google-cloud/storage": "^4.1.2",
- "@types/node": "^8.10.59",
+ "version": "9.1.1",
+ "resolved": "https://registry.npmjs.org/firebase-admin/-/firebase-admin-9.1.1.tgz",
+ "integrity": "sha512-HkzY9yN/kOe1EQgjheURAQ4pFBerI54TBL0+nj1fwzKnAnGCpcI73Bbwx99Pk3u2x4rj6bDcsZfz9bA8y7DWtQ==",
+ "requires": {
+ "@firebase/database": "^0.6.10",
+ "@firebase/database-types": "^0.5.2",
+ "@google-cloud/firestore": "^4.0.0",
+ "@google-cloud/storage": "^5.0.0",
+ "@types/node": "^10.10.0",
"dicer": "^0.3.0",
- "jsonwebtoken": "8.1.0",
- "node-forge": "0.7.4"
+ "jsonwebtoken": "^8.5.1",
+ "node-forge": "^0.9.1"
},
"dependencies": {
"@types/node": {
- "version": "8.10.61",
- "resolved": "https://registry.npmjs.org/@types/node/-/node-8.10.61.tgz",
- "integrity": "sha512-l+zSbvT8TPRaCxL1l9cwHCb0tSqGAGcjPJFItGGYat5oCTiq1uQQKYg5m7AF1mgnEBzFXGLJ2LRmNjtreRX76Q=="
- },
- "node-forge": {
- "version": "0.7.4",
- "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-0.7.4.tgz",
- "integrity": "sha512-8Df0906+tq/omxuCZD6PqhPaQDYuyJ1d+VITgxoIA8zvQd1ru+nMJcDChHH324MWitIgbVkAkQoGEEVJNpn/PA=="
+ "version": "10.17.28",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-10.17.28.tgz",
+ "integrity": "sha512-dzjES1Egb4c1a89C7lKwQh8pwjYmlOAG9dW1pBgxEk57tMrLnssOfEthz8kdkNaBd7lIqQx7APm5+mZ619IiCQ=="
}
}
},
"firebase-functions": {
- "version": "3.7.0",
- "resolved": "https://registry.npmjs.org/firebase-functions/-/firebase-functions-3.7.0.tgz",
- "integrity": "sha512-+ROj2Gs2/KyM+T8jYo7AKaHynFsN49sXbgZMll3zuGa9/8oiDsXp9e1Iy2JMkFmSZg67jeYw5Ue2OSpz0XiqFQ==",
+ "version": "3.11.0",
+ "resolved": "https://registry.npmjs.org/firebase-functions/-/firebase-functions-3.11.0.tgz",
+ "integrity": "sha512-i1uMhZ/M6i5SCI3ulKo7EWX0/LD+I5o6N+sk0HbOWfzyWfOl0iJTvQkR3BVDcjrlhPVC4xG1bDTLxd+DTkLqaw==",
"requires": {
"@types/express": "4.17.3",
"cors": "^2.8.5",
@@ -1040,12 +1341,6 @@
"lodash": "^4.17.14"
}
},
- "foreach": {
- "version": "2.0.5",
- "resolved": "https://registry.npmjs.org/foreach/-/foreach-2.0.5.tgz",
- "integrity": "sha1-C+4AUBiusmDQo6865ljdATbsG5k=",
- "optional": true
- },
"forwarded": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.1.2.tgz",
@@ -1056,11 +1351,6 @@
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"integrity": "sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac="
},
- "function-bind": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
- "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
- },
"functional-red-black-tree": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz",
@@ -1089,19 +1379,119 @@
}
},
"gcs-resumable-upload": {
- "version": "2.3.3",
- "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-2.3.3.tgz",
- "integrity": "sha512-sf896I5CC/1AxeaGfSFg3vKMjUq/r+A3bscmVzZm10CElyRanN0XwPu/MxeIO4LSP+9uF6yKzXvNsaTsMXUG6Q==",
+ "version": "3.1.1",
+ "resolved": "https://registry.npmjs.org/gcs-resumable-upload/-/gcs-resumable-upload-3.1.1.tgz",
+ "integrity": "sha512-RS1osvAicj9+MjCc6jAcVL1Pt3tg7NK2C2gXM5nqD1Gs0klF2kj5nnAFSBy97JrtslMIQzpb7iSuxaG8rFWd2A==",
"optional": true,
"requires": {
"abort-controller": "^3.0.0",
"configstore": "^5.0.0",
- "gaxios": "^2.0.0",
- "google-auth-library": "^5.0.0",
+ "extend": "^3.0.2",
+ "gaxios": "^3.0.0",
+ "google-auth-library": "^6.0.0",
"pumpify": "^2.0.0",
"stream-events": "^1.0.4"
+ },
+ "dependencies": {
+ "bignumber.js": {
+ "version": "9.0.0",
+ "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.0.0.tgz",
+ "integrity": "sha512-t/OYhhJ2SD+YGBQcjY8GzzDHEk9f3nerxjtfa6tlMXfe7frs/WozhvCNoGvpM0P3bNf3Gq5ZRMlGr5f3r4/N8A==",
+ "optional": true
+ },
+ "gaxios": {
+ "version": "3.1.0",
+ "resolved": "https://registry.npmjs.org/gaxios/-/gaxios-3.1.0.tgz",
+ "integrity": "sha512-DDTn3KXVJJigtz+g0J3vhcfbDbKtAroSTxauWsdnP57sM5KZ3d2c/3D9RKFJ86s43hfw6WULg6TXYw/AYiBlpA==",
+ "optional": true,
+ "requires": {
+ "abort-controller": "^3.0.0",
+ "extend": "^3.0.2",
+ "https-proxy-agent": "^5.0.0",
+ "is-stream": "^2.0.0",
+ "node-fetch": "^2.3.0"
+ }
+ },
+ "gcp-metadata": {
+ "version": "4.1.4",
+ "resolved": "https://registry.npmjs.org/gcp-metadata/-/gcp-metadata-4.1.4.tgz",
+ "integrity": "sha512-5J/GIH0yWt/56R3dNaNWPGQ/zXsZOddYECfJaqxFWgrZ9HC2Kvc5vl9upOgUUHKzURjAVf2N+f6tEJiojqXUuA==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "json-bigint": "^1.0.0"
+ }
+ },
+ "google-auth-library": {
+ "version": "6.0.6",
+ "resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-6.0.6.tgz",
+ "integrity": "sha512-fWYdRdg55HSJoRq9k568jJA1lrhg9i2xgfhVIMJbskUmbDpJGHsbv9l41DGhCDXM21F9Kn4kUwdysgxSYBYJUw==",
+ "optional": true,
+ "requires": {
+ "arrify": "^2.0.0",
+ "base64-js": "^1.3.0",
+ "ecdsa-sig-formatter": "^1.0.11",
+ "fast-text-encoding": "^1.0.0",
+ "gaxios": "^3.0.0",
+ "gcp-metadata": "^4.1.0",
+ "gtoken": "^5.0.0",
+ "jws": "^4.0.0",
+ "lru-cache": "^6.0.0"
+ }
+ },
+ "google-p12-pem": {
+ "version": "3.0.2",
+ "resolved": "https://registry.npmjs.org/google-p12-pem/-/google-p12-pem-3.0.2.tgz",
+ "integrity": "sha512-tbjzndQvSIHGBLzHnhDs3cL4RBjLbLXc2pYvGH+imGVu5b4RMAttUTdnmW2UH0t11QeBTXZ7wlXPS7hrypO/tg==",
+ "optional": true,
+ "requires": {
+ "node-forge": "^0.9.0"
+ }
+ },
+ "gtoken": {
+ "version": "5.0.3",
+ "resolved": "https://registry.npmjs.org/gtoken/-/gtoken-5.0.3.tgz",
+ "integrity": "sha512-Nyd1wZCMRc2dj/mAD0LlfQLcAO06uKdpKJXvK85SGrF5+5+Bpfil9u/2aw35ltvEHjvl0h5FMKN5knEU+9JrOg==",
+ "optional": true,
+ "requires": {
+ "gaxios": "^3.0.0",
+ "google-p12-pem": "^3.0.0",
+ "jws": "^4.0.0",
+ "mime": "^2.2.0"
+ }
+ },
+ "json-bigint": {
+ "version": "1.0.0",
+ "resolved": "https://registry.npmjs.org/json-bigint/-/json-bigint-1.0.0.tgz",
+ "integrity": "sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==",
+ "optional": true,
+ "requires": {
+ "bignumber.js": "^9.0.0"
+ }
+ },
+ "lru-cache": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+ "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
+ "optional": true,
+ "requires": {
+ "yallist": "^4.0.0"
+ }
+ },
+ "yallist": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==",
+ "optional": true
+ }
}
},
+ "get-caller-file": {
+ "version": "2.0.5",
+ "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
+ "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
+ "optional": true
+ },
"google-auth-library": {
"version": "5.10.1",
"resolved": "https://registry.npmjs.org/google-auth-library/-/google-auth-library-5.10.1.tgz",
@@ -1165,19 +1555,6 @@
"mime": "^2.2.0"
}
},
- "has": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
- "integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
- "requires": {
- "function-bind": "^1.1.1"
- }
- },
- "has-symbols": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.1.tgz",
- "integrity": "sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg=="
- },
"hash-stream-validation": {
"version": "0.2.3",
"resolved": "https://registry.npmjs.org/hash-stream-validation/-/hash-stream-validation-0.2.3.tgz",
@@ -1267,44 +1644,10 @@
"resolved": "https://registry.npmjs.org/ipaddr.js/-/ipaddr.js-1.9.1.tgz",
"integrity": "sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g=="
},
- "is-arguments": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/is-arguments/-/is-arguments-1.0.4.tgz",
- "integrity": "sha512-xPh0Rmt8NE65sNzvyUmWgI1tz3mKq74lGA0mL8LYZcoIzKOzDh6HmrYm3d18k60nHerC8A9Km8kYu87zfSFnLA==",
- "optional": true
- },
- "is-bigint": {
- "version": "1.0.0",
- "resolved": "https://registry.npmjs.org/is-bigint/-/is-bigint-1.0.0.tgz",
- "integrity": "sha512-t5mGUXC/xRheCK431ylNiSkGGpBp8bHENBcENTkDT6ppwPzEVxNGZRvgvmOEfbWkFhA7D2GEuE2mmQTr78sl2g==",
- "optional": true
- },
- "is-boolean-object": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/is-boolean-object/-/is-boolean-object-1.0.1.tgz",
- "integrity": "sha512-TqZuVwa/sppcrhUCAYkGBk7w0yxfQQnxq28fjkO53tnK9FQXmdwz2JS5+GjsWQ6RByES1K40nI+yDic5c9/aAQ==",
- "optional": true
- },
- "is-callable": {
- "version": "1.2.0",
- "resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.0.tgz",
- "integrity": "sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw=="
- },
- "is-date-object": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/is-date-object/-/is-date-object-1.0.2.tgz",
- "integrity": "sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g=="
- },
- "is-map": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-map/-/is-map-2.0.1.tgz",
- "integrity": "sha512-T/S49scO8plUiAOA2DBTBG3JHpn1yiw0kRp6dgiZ0v2/6twi5eiB0rHtHFH9ZIrvlWc6+4O+m4zg5+Z833aXgw==",
- "optional": true
- },
- "is-number-object": {
- "version": "1.0.4",
- "resolved": "https://registry.npmjs.org/is-number-object/-/is-number-object-1.0.4.tgz",
- "integrity": "sha512-zohwelOAur+5uXtk8O3GPQ1eAcu4ZX3UwxQhUlfFFMNpUd83gXgjbhJh6HmB6LUNV/ieOLQuDwJO3dWJosUeMw==",
+ "is-fullwidth-code-point": {
+ "version": "3.0.0",
+ "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
+ "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"optional": true
},
"is-obj": {
@@ -1313,20 +1656,6 @@
"integrity": "sha512-drqDG3cbczxxEJRoOXcOjtdp1J/lyp1mNn0xaznRs8+muBhgQcrnbspox5X5fOw0HnMnbfDzvnEMEtqDEJEo8w==",
"optional": true
},
- "is-regex": {
- "version": "1.1.0",
- "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.1.0.tgz",
- "integrity": "sha512-iI97M8KTWID2la5uYXlkbSDQIg4F6o1sYboZKKTDpnDQMLtUL86zxhgDet3Q2SriaYsyGqZ6Mn2SjbRKeLHdqw==",
- "requires": {
- "has-symbols": "^1.0.1"
- }
- },
- "is-set": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-set/-/is-set-2.0.1.tgz",
- "integrity": "sha512-eJEzOtVyenDs1TMzSQ3kU3K+E0GUS9sno+F0OBT97xsgcJsF9nXMBtkT9/kut5JEpM7oL7X/0qxR17K3mcwIAA==",
- "optional": true
- },
"is-stream": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.0.tgz",
@@ -1337,50 +1666,12 @@
"resolved": "https://registry.npmjs.org/is-stream-ended/-/is-stream-ended-0.1.4.tgz",
"integrity": "sha512-xj0XPvmr7bQFTvirqnFr50o0hQIh6ZItDqloxt5aJrR4NQsYeSsyFQERYGCAzfindAcnKjINnwEEgLx4IqVzQw=="
},
- "is-string": {
- "version": "1.0.5",
- "resolved": "https://registry.npmjs.org/is-string/-/is-string-1.0.5.tgz",
- "integrity": "sha512-buY6VNRjhQMiF1qWDouloZlQbRhDPCebwxSjxMjxgemYT46YMd2NR0/H+fBhEfWX4A/w9TBJ+ol+okqJKFE6vQ==",
- "optional": true
- },
- "is-symbol": {
- "version": "1.0.3",
- "resolved": "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.3.tgz",
- "integrity": "sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ==",
- "requires": {
- "has-symbols": "^1.0.1"
- }
- },
- "is-typed-array": {
- "version": "1.1.3",
- "resolved": "https://registry.npmjs.org/is-typed-array/-/is-typed-array-1.1.3.tgz",
- "integrity": "sha512-BSYUBOK/HJibQ30wWkWold5txYwMUXQct9YHAQJr8fSwvZoiglcqB0pd7vEN23+Tsi9IUEjztdOSzl4qLVYGTQ==",
- "optional": true,
- "requires": {
- "available-typed-arrays": "^1.0.0",
- "es-abstract": "^1.17.4",
- "foreach": "^2.0.5",
- "has-symbols": "^1.0.1"
- }
- },
"is-typedarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz",
"integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=",
"optional": true
},
- "is-weakmap": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-weakmap/-/is-weakmap-2.0.1.tgz",
- "integrity": "sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==",
- "optional": true
- },
- "is-weakset": {
- "version": "2.0.1",
- "resolved": "https://registry.npmjs.org/is-weakset/-/is-weakset-2.0.1.tgz",
- "integrity": "sha512-pi4vhbhVHGLxohUw7PhGsueT4vRGFoXhP7+RGN0jKIv9+8PWYCQTqtADngrxOm2g46hoH0+g8uZZBzMrvVGDmw==",
- "optional": true
- },
"isarray": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz",
@@ -1395,11 +1686,11 @@
}
},
"jsonwebtoken": {
- "version": "8.1.0",
- "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.1.0.tgz",
- "integrity": "sha1-xjl80uX9WD1lwAeoPce7eOaYK4M=",
+ "version": "8.5.1",
+ "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-8.5.1.tgz",
+ "integrity": "sha512-XjwVfRS6jTMsqYs0EsuJ4LGxXV14zQybNd4L2r0UvbVnSF9Af8x7p5MzbJ90Ioz/9TI41/hTCvznF/loiSzn8w==",
"requires": {
- "jws": "^3.1.4",
+ "jws": "^3.2.2",
"lodash.includes": "^4.3.0",
"lodash.isboolean": "^3.0.3",
"lodash.isinteger": "^4.0.4",
@@ -1407,8 +1698,8 @@
"lodash.isplainobject": "^4.0.6",
"lodash.isstring": "^4.0.1",
"lodash.once": "^4.0.0",
- "ms": "^2.0.0",
- "xtend": "^4.0.1"
+ "ms": "^2.1.1",
+ "semver": "^5.6.0"
},
"dependencies": {
"jwa": {
@@ -1429,6 +1720,11 @@
"jwa": "^1.4.1",
"safe-buffer": "^5.0.1"
}
+ },
+ "semver": {
+ "version": "5.7.1",
+ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.1.tgz",
+ "integrity": "sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ=="
}
}
},
@@ -1451,10 +1747,19 @@
"safe-buffer": "^5.0.1"
}
},
+ "locate-path": {
+ "version": "5.0.0",
+ "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
+ "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
+ "optional": true,
+ "requires": {
+ "p-locate": "^4.1.0"
+ }
+ },
"lodash": {
- "version": "4.17.15",
- "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
- "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
+ "version": "4.17.20",
+ "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.20.tgz",
+ "integrity": "sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA=="
},
"lodash.at": {
"version": "4.6.0",
@@ -1597,37 +1902,6 @@
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
"integrity": "sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM="
},
- "object-inspect": {
- "version": "1.7.0",
- "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.7.0.tgz",
- "integrity": "sha512-a7pEHdh1xKIAgTySUGgLMx/xwDZskN1Ud6egYYN3EdRW4ZMPNEDUTF+hwy2LUC+Bl+SyLXANnwz/jyh/qutKUw=="
- },
- "object-is": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/object-is/-/object-is-1.1.2.tgz",
- "integrity": "sha512-5lHCz+0uufF6wZ7CRFWJN3hp8Jqblpgve06U5CMQ3f//6iDjPr2PEo9MWCjEssDsa+UZEL4PkFpr+BMop6aKzQ==",
- "optional": true,
- "requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.5"
- }
- },
- "object-keys": {
- "version": "1.1.1",
- "resolved": "https://registry.npmjs.org/object-keys/-/object-keys-1.1.1.tgz",
- "integrity": "sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA=="
- },
- "object.assign": {
- "version": "4.1.0",
- "resolved": "https://registry.npmjs.org/object.assign/-/object.assign-4.1.0.tgz",
- "integrity": "sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w==",
- "requires": {
- "define-properties": "^1.1.2",
- "function-bind": "^1.1.1",
- "has-symbols": "^1.0.0",
- "object-keys": "^1.0.11"
- }
- },
"on-finished": {
"version": "2.3.0",
"resolved": "https://registry.npmjs.org/on-finished/-/on-finished-2.3.0.tgz",
@@ -1645,9 +1919,9 @@
}
},
"onetime": {
- "version": "5.1.0",
- "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz",
- "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==",
+ "version": "5.1.2",
+ "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz",
+ "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==",
"optional": true,
"requires": {
"mimic-fn": "^2.1.0"
@@ -1667,6 +1941,15 @@
"p-try": "^2.0.0"
}
},
+ "p-locate": {
+ "version": "4.1.0",
+ "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
+ "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
+ "optional": true,
+ "requires": {
+ "p-limit": "^2.2.0"
+ }
+ },
"p-try": {
"version": "2.2.0",
"resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
@@ -1678,6 +1961,12 @@
"resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz",
"integrity": "sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ=="
},
+ "path-exists": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
+ "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+ "optional": true
+ },
"path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
@@ -1812,15 +2101,17 @@
}
}
},
- "regexp.prototype.flags": {
- "version": "1.3.0",
- "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.0.tgz",
- "integrity": "sha512-2+Q0C5g951OlYlJz6yu5/M33IcsESLlLfsyIaLJaG4FA2r4yP8MvVMJUUP/fVBkSpbbbZlS5gynbEWLipiiXiQ==",
- "optional": true,
- "requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.0-next.1"
- }
+ "require-directory": {
+ "version": "2.1.1",
+ "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
+ "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=",
+ "optional": true
+ },
+ "require-main-filename": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
+ "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==",
+ "optional": true
},
"retry-request": {
"version": "4.1.1",
@@ -1904,21 +2195,17 @@
"send": "0.17.1"
}
},
+ "set-blocking": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
+ "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=",
+ "optional": true
+ },
"setprototypeof": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/setprototypeof/-/setprototypeof-1.1.1.tgz",
"integrity": "sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw=="
},
- "side-channel": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.2.tgz",
- "integrity": "sha512-7rL9YlPHg7Ancea1S96Pa8/QWb4BtXL/TZvS6B8XFetGBeuhAsfmUspK6DokBeZ64+Kj9TCNRD/30pVz1BvQNA==",
- "optional": true,
- "requires": {
- "es-abstract": "^1.17.0-next.1",
- "object-inspect": "^1.7.0"
- }
- },
"signal-exit": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz",
@@ -1955,42 +2242,15 @@
"resolved": "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz",
"integrity": "sha1-gIudDlb8Jz2Am6VzOOkpkZoanxo="
},
- "string.prototype.trimend": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz",
- "integrity": "sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g==",
- "requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.5"
- }
- },
- "string.prototype.trimleft": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/string.prototype.trimleft/-/string.prototype.trimleft-2.1.2.tgz",
- "integrity": "sha512-gCA0tza1JBvqr3bfAIFJGqfdRTyPae82+KTnm3coDXkZN9wnuW3HjGgN386D7hfv5CHQYCI022/rJPVlqXyHSw==",
- "requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.5",
- "string.prototype.trimstart": "^1.0.0"
- }
- },
- "string.prototype.trimright": {
- "version": "2.1.2",
- "resolved": "https://registry.npmjs.org/string.prototype.trimright/-/string.prototype.trimright-2.1.2.tgz",
- "integrity": "sha512-ZNRQ7sY3KroTaYjRS6EbNiiHrOkjihL9aQE/8gfQ4DtAC/aEBRHFJa44OmoWxGGqXuJlfKkZW4WcXErGr+9ZFg==",
- "requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.5",
- "string.prototype.trimend": "^1.0.0"
- }
- },
- "string.prototype.trimstart": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz",
- "integrity": "sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw==",
+ "string-width": {
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
+ "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==",
+ "optional": true,
"requires": {
- "define-properties": "^1.1.3",
- "es-abstract": "^1.17.5"
+ "emoji-regex": "^8.0.0",
+ "is-fullwidth-code-point": "^3.0.0",
+ "strip-ansi": "^6.0.0"
}
},
"string_decoder": {
@@ -2008,6 +2268,15 @@
}
}
},
+ "strip-ansi": {
+ "version": "6.0.0",
+ "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
+ "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+ "optional": true,
+ "requires": {
+ "ansi-regex": "^5.0.0"
+ }
+ },
"stubs": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/stubs/-/stubs-3.0.0.tgz",
@@ -2015,16 +2284,16 @@
"optional": true
},
"teeny-request": {
- "version": "6.0.3",
- "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-6.0.3.tgz",
- "integrity": "sha512-TZG/dfd2r6yeji19es1cUIwAlVD8y+/svB1kAC2Y0bjEyysrfbO8EZvJBRwIE6WkwmUoB7uvWLwTIhJbMXZ1Dw==",
+ "version": "7.0.0",
+ "resolved": "https://registry.npmjs.org/teeny-request/-/teeny-request-7.0.0.tgz",
+ "integrity": "sha512-kWD3sdGmIix6w7c8ZdVKxWq+3YwVPGWz+Mq0wRZXayEKY/YHb63b8uphfBzcFDmyq8frD9+UTc3wLyOhltRbtg==",
"optional": true,
"requires": {
"http-proxy-agent": "^4.0.0",
"https-proxy-agent": "^5.0.0",
"node-fetch": "^2.2.0",
"stream-events": "^1.0.5",
- "uuid": "^7.0.0"
+ "uuid": "^8.0.0"
}
},
"through2": {
@@ -2041,9 +2310,9 @@
"integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
},
"tslib": {
- "version": "1.11.1",
- "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.11.1.tgz",
- "integrity": "sha512-aZW88SY8kQbU7gpV19lN24LtXh/yD4ZZg6qieAJDDg+YBsJcSmLGK9QpnUjAKVG/xefmvJGd1WUmfpT/g6AJGA=="
+ "version": "1.13.0",
+ "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.13.0.tgz",
+ "integrity": "sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q=="
},
"type-is": {
"version": "1.6.18",
@@ -2094,9 +2363,9 @@
"integrity": "sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM="
},
"uuid": {
- "version": "7.0.3",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-7.0.3.tgz",
- "integrity": "sha512-DPSke0pXhTZgoF/d+WSt2QaKMCFSfx7QegxEWT+JOuHF5aWrKEn0G+ztjuJg/gG8/ItK+rbPCD/yNv8yyih6Cg==",
+ "version": "8.3.0",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.0.tgz",
+ "integrity": "sha512-fX6Z5o4m6XsXBdli9g7DtWgAx+osMsRRZFKma1mIUsLCz6vRvv+pz5VNbyu9UEDzpMWulZfvpgb/cmDXVulYFQ==",
"optional": true
},
"vary": {
@@ -2124,43 +2393,21 @@
"resolved": "https://registry.npmjs.org/websocket-extensions/-/websocket-extensions-0.1.4.tgz",
"integrity": "sha512-OqedPIGOfsDlo31UNwYbCFMSaO9m9G/0faIHj5/dZFDMFqPTcx6UwqyOy3COEaEOg/9VsGIpdqn62W5KhoKSpg=="
},
- "which-boxed-primitive": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/which-boxed-primitive/-/which-boxed-primitive-1.0.1.tgz",
- "integrity": "sha512-7BT4TwISdDGBgaemWU0N0OU7FeAEJ9Oo2P1PHRm/FCWoEi2VLWC9b6xvxAA3C/NMpxg3HXVgi0sMmGbNUbNepQ==",
- "optional": true,
- "requires": {
- "is-bigint": "^1.0.0",
- "is-boolean-object": "^1.0.0",
- "is-number-object": "^1.0.3",
- "is-string": "^1.0.4",
- "is-symbol": "^1.0.2"
- }
- },
- "which-collection": {
- "version": "1.0.1",
- "resolved": "https://registry.npmjs.org/which-collection/-/which-collection-1.0.1.tgz",
- "integrity": "sha512-W8xeTUwaln8i3K/cY1nGXzdnVZlidBcagyNFtBdD5kxnb4TvGKR7FfSIS3mYpwWS1QUCutfKz8IY8RjftB0+1A==",
- "optional": true,
- "requires": {
- "is-map": "^2.0.1",
- "is-set": "^2.0.1",
- "is-weakmap": "^2.0.1",
- "is-weakset": "^2.0.1"
- }
+ "which-module": {
+ "version": "2.0.0",
+ "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz",
+ "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=",
+ "optional": true
},
- "which-typed-array": {
- "version": "1.1.2",
- "resolved": "https://registry.npmjs.org/which-typed-array/-/which-typed-array-1.1.2.tgz",
- "integrity": "sha512-KT6okrd1tE6JdZAy3o2VhMoYPh3+J6EMZLyrxBQsZflI1QCZIxMrIYLkosd8Twf+YfknVIHmYQPgJt238p8dnQ==",
+ "wrap-ansi": {
+ "version": "6.2.0",
+ "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
+ "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
"optional": true,
"requires": {
- "available-typed-arrays": "^1.0.2",
- "es-abstract": "^1.17.5",
- "foreach": "^2.0.5",
- "function-bind": "^1.1.1",
- "has-symbols": "^1.0.1",
- "is-typed-array": "^1.1.3"
+ "ansi-styles": "^4.0.0",
+ "string-width": "^4.1.0",
+ "strip-ansi": "^6.0.0"
}
},
"wrappy": {
@@ -2189,12 +2436,48 @@
"xtend": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
- "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
+ "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==",
+ "optional": true
+ },
+ "y18n": {
+ "version": "4.0.0",
+ "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
+ "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==",
+ "optional": true
},
"yallist": {
"version": "3.1.1",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz",
"integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g=="
+ },
+ "yargs": {
+ "version": "15.4.1",
+ "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
+ "integrity": "sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A==",
+ "optional": true,
+ "requires": {
+ "cliui": "^6.0.0",
+ "decamelize": "^1.2.0",
+ "find-up": "^4.1.0",
+ "get-caller-file": "^2.0.1",
+ "require-directory": "^2.1.1",
+ "require-main-filename": "^2.0.0",
+ "set-blocking": "^2.0.0",
+ "string-width": "^4.2.0",
+ "which-module": "^2.0.0",
+ "y18n": "^4.0.0",
+ "yargs-parser": "^18.1.2"
+ }
+ },
+ "yargs-parser": {
+ "version": "18.1.3",
+ "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz",
+ "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==",
+ "optional": true,
+ "requires": {
+ "camelcase": "^5.0.0",
+ "decamelize": "^1.2.0"
+ }
}
}
}
diff --git a/firebase/functions/package.json b/firebase/functions/package.json
index b4eebe0523..3e506ecf77 100644
--- a/firebase/functions/package.json
+++ b/firebase/functions/package.json
@@ -9,13 +9,13 @@
"logs": "firebase functions:log"
},
"engines": {
- "node": "8"
+ "node": "10"
},
"dependencies": {
- "@google-cloud/pubsub": "2.1.0",
+ "@google-cloud/pubsub": "2.5.0",
"@google-cloud/iot": "1.8.0",
- "firebase-admin": "8.12.1",
- "firebase-functions": "3.7.0",
+ "firebase-admin": "9.1.1",
+ "firebase-functions": "3.11.0",
"extend": "3.0.2"
},
"private": true
diff --git a/firebase/public/config.html b/firebase/public/config.html
index f94216c57f..438013214b 100644
--- a/firebase/public/config.html
+++ b/firebase/public/config.html
@@ -11,8 +11,8 @@
-
-
+
+
diff --git a/firebase/public/index.html b/firebase/public/index.html
index d9ce546c59..c4928cd090 100644
--- a/firebase/public/index.html
+++ b/firebase/public/index.html
@@ -31,14 +31,14 @@
Filters:
Device ID
-
- Site
-
-
Origin
+
+ Site
+
+
Port
@@ -52,10 +52,10 @@
Filters:
-
Sites
-
Origins
+
Sites
+
Users
@@ -84,4 +84,4 @@ Users
if (typeof daq_deploy_version !== 'undefined') {
document.getElementById('deploy-version').innerHTML = daq_deploy_version;
}
-
\ No newline at end of file
+
diff --git a/firebase/public/main.js b/firebase/public/main.js
index 09f4237a5e..a6eeee087f 100644
--- a/firebase/public/main.js
+++ b/firebase/public/main.js
@@ -8,11 +8,6 @@ const display_columns = [];
const display_rows = [];
const row_timestamps = {};
-const data_state = {};
-
-let last_result_time_sec = 0;
-let heartbeatTimestamp = 0;
-
const origin_id = getQueryParam('origin');
const site_name = getQueryParam('site');
const port_id = getQueryParam('port');
@@ -21,8 +16,13 @@ const device_id = getQueryParam('device');
const run_id = getQueryParam('runid');
const from = getQueryParam('from');
const to = getQueryParam('to');
+
+const data_state = {};
+let last_result_time_sec = 0;
+let heartbeatTimestamp = 0;
var db;
-var activePorts = [];
+var activePorts = new Set();
+
document.addEventListener('DOMContentLoaded', () => {
db = firebase.firestore();
const settings = {
@@ -289,7 +289,7 @@ function watcherAdd(ref, collection, limit, handler) {
}, (e) => console.error(e));
}
-function listSites(db) {
+function listSites() {
const linkGroup = document.querySelector('#listings .sites');
db.collection('site').get().then((snapshot) => {
snapshot.forEach((site_doc) => {
@@ -303,21 +303,31 @@ function listSites(db) {
}).catch((e) => statusUpdate('registry list error', e));
}
-function listOrigins(db) {
- const linkGroup = document.querySelector('#listings .origins');
+function addOrigin(originId) {
+ db.collection('origin').doc(originId).get().then((result) => {
+ const linkGroup = document.querySelector('#listings .origins');
+ const originLink = document.createElement('a');
+ originLink.setAttribute('href', '/?origin=' + originId);
+ originLink.innerHTML = originId;
+ linkGroup.appendChild(originLink);
+ const originInfo = document.createElement('span');
+ const version = result.data() && result.data().version;
+ const updated = result.data() && result.data().updated;
+ originInfo.innerHTML = ` ${version}, ${updated}`;
+ linkGroup.appendChild(originInfo);
+ linkGroup.appendChild(document.createElement('p'));
+ });
+}
+
+function listOrigins() {
db.collection('origin').get().then((snapshot) => {
snapshot.forEach((originDoc) => {
- const origin = originDoc.id;
- const originLink = document.createElement('a');
- originLink.setAttribute('href', '/?origin=' + origin);
- originLink.innerHTML = origin;
- linkGroup.appendChild(originLink);
- linkGroup.appendChild(document.createElement('p'));
+ addOrigin(originDoc.id);
});
}).catch((e) => statusUpdate('origin list error', e));
}
-function listUsers(db) {
+function listUsers() {
const link_group = document.querySelector('#listings .users');
db.collection('users').get().then((snapshot) => {
snapshot.forEach((user_doc) => {
@@ -354,9 +364,9 @@ function dashboardSetup() {
triggerOrigin(db, origin_id);
} else {
document.getElementById('listings').classList.add('active');
- listSites(db);
- listOrigins(db);
- listUsers(db);
+ listOrigins();
+ listSites();
+ listUsers();
}
return origin_id;
diff --git a/firebase/public/protos.hash b/firebase/public/protos.hash
index bab39e76c2..4ddf9485f8 100644
--- a/firebase/public/protos.hash
+++ b/firebase/public/protos.hash
@@ -1 +1 @@
-b7a56a30dafe26576d6bdef00dfb57dc07a016ac proto/system_config.proto
+f76de649c75ed722febfc0750c53672f22af5ab1 proto/system_config.proto
diff --git a/firebase/public/protos.html b/firebase/public/protos.html
index d24bed4958..f4cf44b2e7 100644
--- a/firebase/public/protos.html
+++ b/firebase/public/protos.html
@@ -198,6 +198,14 @@ Table of Contents
M SwitchSetup
+
+ M USISetup
+
+
+
+
+ E RunTriggerType
+
@@ -430,10 +438,10 @@ DaqConfig
- fail_hook
+ topology_hook
string
- Hook for failure diagnostics.
+ Hook for device topology updates.
@@ -478,6 +486,27 @@ DaqConfig
Set time between port disconnect and host tests shutdown
+
+ usi_setup
+ USISetup
+
+ USI url
+
+
+
+ run_trigger_type
+ RunTriggerType
+
+ Configures events that trigger a DAQ run
+
+
+
+ debug_mode
+ bool
+
+ verbose output
+
+
@@ -613,7 +642,14 @@ SwitchSetup
lo_port
int32
- Local port of open flow controller
+ Local port of DAQ OpenFlow controller
+
+
+
+ alt_port
+ int32
+
+ Local port for an alternate OpenFlow controller
@@ -679,7 +715,61 @@ SwitchSetup
+ USISetup
+ USI paramters
+
+
+
+ Field Type Label Description
+
+
+
+
+ url
+ string
+
+
+
+
+
+ rpc_timeout_sec
+ int32
+
+
+
+
+
+
+
+
+
+
+
+
+
+ RunTriggerType
+
+
+
+ Name Number Description
+
+
+
+
+ PORT
+ 0
+
+
+
+
+ VLAN
+ 1
+
+
+
+
+
diff --git a/libs/proto/system_config_pb2.py b/libs/proto/system_config_pb2.py
index e4746e687f..f2784ab726 100644
--- a/libs/proto/system_config_pb2.py
+++ b/libs/proto/system_config_pb2.py
@@ -1,7 +1,9 @@
-# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: daq/proto/system_config.proto
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -18,9 +20,34 @@
package='',
syntax='proto3',
serialized_options=None,
- serialized_pb=b'\n\x1d\x64\x61q/proto/system_config.proto\"\xfb\x07\n\tDaqConfig\x12\x18\n\x10site_description\x18\x01 \x01(\t\x12\x18\n\x10monitor_scan_sec\x18\x02 \x01(\x05\x12\x1b\n\x13\x64\x65\x66\x61ult_timeout_sec\x18\x03 \x01(\x05\x12\x12\n\nsettle_sec\x18& \x01(\x05\x12\x11\n\tbase_conf\x18\x04 \x01(\t\x12\x11\n\tsite_path\x18\x05 \x01(\t\x12\x1f\n\x17initial_dhcp_lease_time\x18\x06 \x01(\t\x12\x17\n\x0f\x64hcp_lease_time\x18\x07 \x01(\t\x12\x19\n\x11\x64hcp_response_sec\x18\' \x01(\x05\x12\x1e\n\x16long_dhcp_response_sec\x18\x08 \x01(\x05\x12\"\n\x0cswitch_setup\x18\t \x01(\x0b\x32\x0c.SwitchSetup\x12\x12\n\nhost_tests\x18\x10 \x01(\t\x12\x13\n\x0b\x62uild_tests\x18$ \x01(\x08\x12\x11\n\trun_limit\x18\x11 \x01(\x05\x12\x11\n\tfail_mode\x18\x12 \x01(\x08\x12\x13\n\x0bsingle_shot\x18\" \x01(\x08\x12\x15\n\rresult_linger\x18\x13 \x01(\x08\x12\x0f\n\x07no_test\x18\x14 \x01(\x08\x12\x11\n\tkeep_hold\x18( \x01(\x08\x12\x14\n\x0c\x64\x61q_loglevel\x18\x15 \x01(\t\x12\x18\n\x10mininet_loglevel\x18\x16 \x01(\t\x12\x13\n\x0b\x66inish_hook\x18# \x01(\t\x12\x10\n\x08gcp_cred\x18\x17 \x01(\t\x12\x11\n\tgcp_topic\x18\x18 \x01(\t\x12\x13\n\x0bschema_path\x18\x19 \x01(\t\x12\x11\n\tmud_files\x18\x1a \x01(\t\x12\x14\n\x0c\x64\x65vice_specs\x18\x1b \x01(\t\x12\x13\n\x0btest_config\x18\x1c \x01(\t\x12\x19\n\x11port_debounce_sec\x18\x1d \x01(\x05\x12\x11\n\tfail_hook\x18\x1e \x01(\t\x12\x17\n\x0f\x64\x65vice_template\x18\x1f \x01(\t\x12\x14\n\x0csite_reports\x18 \x01(\t\x12\x1f\n\x17run_data_retention_days\x18! \x01(\x02\x12.\n\ninterfaces\x18% \x03(\x0b\x32\x1a.DaqConfig.InterfacesEntry\x12/\n\x0b\x66\x61il_module\x18/ \x03(\x0b\x32\x1a.DaqConfig.FailModuleEntry\x12\x1d\n\x15port_flap_timeout_sec\x18\x30 \x01(\x05\x1a=\n\x0fInterfacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.Interface:\x02\x38\x01\x1a\x31\n\x0f\x46\x61ilModuleEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xe2\x01\n\x0bSwitchSetup\x12\x11\n\tctrl_intf\x18\t \x01(\t\x12\x0f\n\x07ip_addr\x18\x0b \x01(\t\x12\x13\n\x0buplink_port\x18\r \x01(\x05\x12\x0f\n\x07lo_port\x18\x0e \x01(\x05\x12\x0f\n\x07lo_addr\x18\x0f \x01(\t\x12\x11\n\tmods_addr\x18\x10 \x01(\t\x12\x0f\n\x07of_dpid\x18) \x01(\t\x12\x11\n\tdata_intf\x18* \x01(\t\x12\x0e\n\x06\x65xt_br\x18+ \x01(\t\x12\r\n\x05model\x18, \x01(\t\x12\x10\n\x08username\x18- \x01(\t\x12\x10\n\x08password\x18. \x01(\t\"\'\n\tInterface\x12\x0c\n\x04opts\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05\x62\x06proto3'
+ serialized_pb=_b('\n\x1d\x64\x61q/proto/system_config.proto\"\xdc\x08\n\tDaqConfig\x12\x18\n\x10site_description\x18\x01 \x01(\t\x12\x18\n\x10monitor_scan_sec\x18\x02 \x01(\x05\x12\x1b\n\x13\x64\x65\x66\x61ult_timeout_sec\x18\x03 \x01(\x05\x12\x12\n\nsettle_sec\x18& \x01(\x05\x12\x11\n\tbase_conf\x18\x04 \x01(\t\x12\x11\n\tsite_path\x18\x05 \x01(\t\x12\x1f\n\x17initial_dhcp_lease_time\x18\x06 \x01(\t\x12\x17\n\x0f\x64hcp_lease_time\x18\x07 \x01(\t\x12\x19\n\x11\x64hcp_response_sec\x18\' \x01(\x05\x12\x1e\n\x16long_dhcp_response_sec\x18\x08 \x01(\x05\x12\"\n\x0cswitch_setup\x18\t \x01(\x0b\x32\x0c.SwitchSetup\x12\x12\n\nhost_tests\x18\x10 \x01(\t\x12\x13\n\x0b\x62uild_tests\x18$ \x01(\x08\x12\x11\n\trun_limit\x18\x11 \x01(\x05\x12\x11\n\tfail_mode\x18\x12 \x01(\x08\x12\x13\n\x0bsingle_shot\x18\" \x01(\x08\x12\x15\n\rresult_linger\x18\x13 \x01(\x08\x12\x0f\n\x07no_test\x18\x14 \x01(\x08\x12\x11\n\tkeep_hold\x18( \x01(\x08\x12\x14\n\x0c\x64\x61q_loglevel\x18\x15 \x01(\t\x12\x18\n\x10mininet_loglevel\x18\x16 \x01(\t\x12\x13\n\x0b\x66inish_hook\x18# \x01(\t\x12\x10\n\x08gcp_cred\x18\x17 \x01(\t\x12\x11\n\tgcp_topic\x18\x18 \x01(\t\x12\x13\n\x0bschema_path\x18\x19 \x01(\t\x12\x11\n\tmud_files\x18\x1a \x01(\t\x12\x14\n\x0c\x64\x65vice_specs\x18\x1b \x01(\t\x12\x13\n\x0btest_config\x18\x1c \x01(\t\x12\x19\n\x11port_debounce_sec\x18\x1d \x01(\x05\x12\x15\n\rtopology_hook\x18\x1e \x01(\t\x12\x17\n\x0f\x64\x65vice_template\x18\x1f \x01(\t\x12\x14\n\x0csite_reports\x18 \x01(\t\x12\x1f\n\x17run_data_retention_days\x18! \x01(\x02\x12.\n\ninterfaces\x18% \x03(\x0b\x32\x1a.DaqConfig.InterfacesEntry\x12/\n\x0b\x66\x61il_module\x18/ \x03(\x0b\x32\x1a.DaqConfig.FailModuleEntry\x12\x1d\n\x15port_flap_timeout_sec\x18\x30 \x01(\x05\x12\x1c\n\tusi_setup\x18\x31 \x01(\x0b\x32\t.USISetup\x12)\n\x10run_trigger_type\x18\x32 \x01(\x0e\x32\x0f.RunTriggerType\x12\x12\n\ndebug_mode\x18\x33 \x01(\x08\x1a=\n\x0fInterfacesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x19\n\x05value\x18\x02 \x01(\x0b\x32\n.Interface:\x02\x38\x01\x1a\x31\n\x0f\x46\x61ilModuleEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"0\n\x08USISetup\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x17\n\x0frpc_timeout_sec\x18\x02 \x01(\x05\"\xf4\x01\n\x0bSwitchSetup\x12\x11\n\tctrl_intf\x18\t \x01(\t\x12\x0f\n\x07ip_addr\x18\x0b \x01(\t\x12\x13\n\x0buplink_port\x18\r \x01(\x05\x12\x0f\n\x07lo_port\x18\x0e \x01(\x05\x12\x10\n\x08\x61lt_port\x18\x10 \x01(\x05\x12\x0f\n\x07lo_addr\x18\x12 \x01(\t\x12\x11\n\tmods_addr\x18\x14 \x01(\t\x12\x0f\n\x07of_dpid\x18) \x01(\t\x12\x11\n\tdata_intf\x18* \x01(\t\x12\x0e\n\x06\x65xt_br\x18+ \x01(\t\x12\r\n\x05model\x18, \x01(\t\x12\x10\n\x08username\x18- \x01(\t\x12\x10\n\x08password\x18. \x01(\t\"\'\n\tInterface\x12\x0c\n\x04opts\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\x05*$\n\x0eRunTriggerType\x12\x08\n\x04PORT\x10\x00\x12\x08\n\x04VLAN\x10\x01\x62\x06proto3')
)
+_RUNTRIGGERTYPE = _descriptor.EnumDescriptor(
+ name='RunTriggerType',
+ full_name='RunTriggerType',
+ filename=None,
+ file=DESCRIPTOR,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='PORT', index=0, number=0,
+ serialized_options=None,
+ type=None),
+ _descriptor.EnumValueDescriptor(
+ name='VLAN', index=1, number=1,
+ serialized_options=None,
+ type=None),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=1490,
+ serialized_end=1526,
+)
+_sym_db.RegisterEnumDescriptor(_RUNTRIGGERTYPE)
+
+RunTriggerType = enum_type_wrapper.EnumTypeWrapper(_RUNTRIGGERTYPE)
+PORT = 0
+VLAN = 1
@@ -34,7 +61,7 @@
_descriptor.FieldDescriptor(
name='key', full_name='DaqConfig.InterfacesEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -51,14 +78,14 @@
nested_types=[],
enum_types=[
],
- serialized_options=b'8\001',
+ serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
- serialized_start=941,
- serialized_end=1002,
+ serialized_start=1038,
+ serialized_end=1099,
)
_DAQCONFIG_FAILMODULEENTRY = _descriptor.Descriptor(
@@ -71,14 +98,14 @@
_descriptor.FieldDescriptor(
name='key', full_name='DaqConfig.FailModuleEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='DaqConfig.FailModuleEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -88,14 +115,14 @@
nested_types=[],
enum_types=[
],
- serialized_options=b'8\001',
+ serialized_options=_b('8\001'),
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
- serialized_start=1004,
- serialized_end=1053,
+ serialized_start=1101,
+ serialized_end=1150,
)
_DAQCONFIG = _descriptor.Descriptor(
@@ -108,7 +135,7 @@
_descriptor.FieldDescriptor(
name='site_description', full_name='DaqConfig.site_description', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -136,28 +163,28 @@
_descriptor.FieldDescriptor(
name='base_conf', full_name='DaqConfig.base_conf', index=4,
number=4, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='site_path', full_name='DaqConfig.site_path', index=5,
number=5, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initial_dhcp_lease_time', full_name='DaqConfig.initial_dhcp_lease_time', index=6,
number=6, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='dhcp_lease_time', full_name='DaqConfig.dhcp_lease_time', index=7,
number=7, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -185,7 +212,7 @@
_descriptor.FieldDescriptor(
name='host_tests', full_name='DaqConfig.host_tests', index=11,
number=16, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -241,63 +268,63 @@
_descriptor.FieldDescriptor(
name='daq_loglevel', full_name='DaqConfig.daq_loglevel', index=19,
number=21, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mininet_loglevel', full_name='DaqConfig.mininet_loglevel', index=20,
number=22, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='finish_hook', full_name='DaqConfig.finish_hook', index=21,
number=35, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gcp_cred', full_name='DaqConfig.gcp_cred', index=22,
number=23, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='gcp_topic', full_name='DaqConfig.gcp_topic', index=23,
number=24, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='schema_path', full_name='DaqConfig.schema_path', index=24,
number=25, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mud_files', full_name='DaqConfig.mud_files', index=25,
number=26, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='device_specs', full_name='DaqConfig.device_specs', index=26,
number=27, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='test_config', full_name='DaqConfig.test_config', index=27,
number=28, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -309,23 +336,23 @@
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='fail_hook', full_name='DaqConfig.fail_hook', index=29,
+ name='topology_hook', full_name='DaqConfig.topology_hook', index=29,
number=30, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='device_template', full_name='DaqConfig.device_template', index=30,
number=31, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='site_reports', full_name='DaqConfig.site_reports', index=31,
number=32, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -357,6 +384,27 @@
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='usi_setup', full_name='DaqConfig.usi_setup', index=36,
+ number=49, type=11, cpp_type=10, label=1,
+ has_default_value=False, default_value=None,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='run_trigger_type', full_name='DaqConfig.run_trigger_type', index=37,
+ number=50, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='debug_mode', full_name='DaqConfig.debug_mode', index=38,
+ number=51, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
@@ -370,7 +418,45 @@
oneofs=[
],
serialized_start=34,
- serialized_end=1053,
+ serialized_end=1150,
+)
+
+
+_USISETUP = _descriptor.Descriptor(
+ name='USISetup',
+ full_name='USISetup',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='url', full_name='USISetup.url', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='rpc_timeout_sec', full_name='USISetup.rpc_timeout_sec', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=1152,
+ serialized_end=1200,
)
@@ -384,14 +470,14 @@
_descriptor.FieldDescriptor(
name='ctrl_intf', full_name='SwitchSetup.ctrl_intf', index=0,
number=9, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ip_addr', full_name='SwitchSetup.ip_addr', index=1,
number=11, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -410,58 +496,65 @@
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='lo_addr', full_name='SwitchSetup.lo_addr', index=4,
- number=15, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ name='alt_port', full_name='SwitchSetup.alt_port', index=4,
+ number=16, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='mods_addr', full_name='SwitchSetup.mods_addr', index=5,
- number=16, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ name='lo_addr', full_name='SwitchSetup.lo_addr', index=5,
+ number=18, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='of_dpid', full_name='SwitchSetup.of_dpid', index=6,
+ name='mods_addr', full_name='SwitchSetup.mods_addr', index=6,
+ number=20, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=_b("").decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR),
+ _descriptor.FieldDescriptor(
+ name='of_dpid', full_name='SwitchSetup.of_dpid', index=7,
number=41, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='data_intf', full_name='SwitchSetup.data_intf', index=7,
+ name='data_intf', full_name='SwitchSetup.data_intf', index=8,
number=42, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='ext_br', full_name='SwitchSetup.ext_br', index=8,
+ name='ext_br', full_name='SwitchSetup.ext_br', index=9,
number=43, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='model', full_name='SwitchSetup.model', index=9,
+ name='model', full_name='SwitchSetup.model', index=10,
number=44, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='username', full_name='SwitchSetup.username', index=10,
+ name='username', full_name='SwitchSetup.username', index=11,
number=45, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
- name='password', full_name='SwitchSetup.password', index=11,
+ name='password', full_name='SwitchSetup.password', index=12,
number=46, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -477,8 +570,8 @@
extension_ranges=[],
oneofs=[
],
- serialized_start=1056,
- serialized_end=1282,
+ serialized_start=1203,
+ serialized_end=1447,
)
@@ -492,7 +585,7 @@
_descriptor.FieldDescriptor(
name='opts', full_name='Interface.opts', index=0,
number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
+ has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
@@ -515,8 +608,8 @@
extension_ranges=[],
oneofs=[
],
- serialized_start=1284,
- serialized_end=1323,
+ serialized_start=1449,
+ serialized_end=1488,
)
_DAQCONFIG_INTERFACESENTRY.fields_by_name['value'].message_type = _INTERFACE
@@ -525,46 +618,57 @@
_DAQCONFIG.fields_by_name['switch_setup'].message_type = _SWITCHSETUP
_DAQCONFIG.fields_by_name['interfaces'].message_type = _DAQCONFIG_INTERFACESENTRY
_DAQCONFIG.fields_by_name['fail_module'].message_type = _DAQCONFIG_FAILMODULEENTRY
+_DAQCONFIG.fields_by_name['usi_setup'].message_type = _USISETUP
+_DAQCONFIG.fields_by_name['run_trigger_type'].enum_type = _RUNTRIGGERTYPE
DESCRIPTOR.message_types_by_name['DaqConfig'] = _DAQCONFIG
+DESCRIPTOR.message_types_by_name['USISetup'] = _USISETUP
DESCRIPTOR.message_types_by_name['SwitchSetup'] = _SWITCHSETUP
DESCRIPTOR.message_types_by_name['Interface'] = _INTERFACE
+DESCRIPTOR.enum_types_by_name['RunTriggerType'] = _RUNTRIGGERTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-DaqConfig = _reflection.GeneratedProtocolMessageType('DaqConfig', (_message.Message,), {
+DaqConfig = _reflection.GeneratedProtocolMessageType('DaqConfig', (_message.Message,), dict(
- 'InterfacesEntry' : _reflection.GeneratedProtocolMessageType('InterfacesEntry', (_message.Message,), {
- 'DESCRIPTOR' : _DAQCONFIG_INTERFACESENTRY,
- '__module__' : 'daq.proto.system_config_pb2'
+ InterfacesEntry = _reflection.GeneratedProtocolMessageType('InterfacesEntry', (_message.Message,), dict(
+ DESCRIPTOR = _DAQCONFIG_INTERFACESENTRY,
+ __module__ = 'daq.proto.system_config_pb2'
# @@protoc_insertion_point(class_scope:DaqConfig.InterfacesEntry)
- })
+ ))
,
- 'FailModuleEntry' : _reflection.GeneratedProtocolMessageType('FailModuleEntry', (_message.Message,), {
- 'DESCRIPTOR' : _DAQCONFIG_FAILMODULEENTRY,
- '__module__' : 'daq.proto.system_config_pb2'
+ FailModuleEntry = _reflection.GeneratedProtocolMessageType('FailModuleEntry', (_message.Message,), dict(
+ DESCRIPTOR = _DAQCONFIG_FAILMODULEENTRY,
+ __module__ = 'daq.proto.system_config_pb2'
# @@protoc_insertion_point(class_scope:DaqConfig.FailModuleEntry)
- })
+ ))
,
- 'DESCRIPTOR' : _DAQCONFIG,
- '__module__' : 'daq.proto.system_config_pb2'
+ DESCRIPTOR = _DAQCONFIG,
+ __module__ = 'daq.proto.system_config_pb2'
# @@protoc_insertion_point(class_scope:DaqConfig)
- })
+ ))
_sym_db.RegisterMessage(DaqConfig)
_sym_db.RegisterMessage(DaqConfig.InterfacesEntry)
_sym_db.RegisterMessage(DaqConfig.FailModuleEntry)
-SwitchSetup = _reflection.GeneratedProtocolMessageType('SwitchSetup', (_message.Message,), {
- 'DESCRIPTOR' : _SWITCHSETUP,
- '__module__' : 'daq.proto.system_config_pb2'
+USISetup = _reflection.GeneratedProtocolMessageType('USISetup', (_message.Message,), dict(
+ DESCRIPTOR = _USISETUP,
+ __module__ = 'daq.proto.system_config_pb2'
+ # @@protoc_insertion_point(class_scope:USISetup)
+ ))
+_sym_db.RegisterMessage(USISetup)
+
+SwitchSetup = _reflection.GeneratedProtocolMessageType('SwitchSetup', (_message.Message,), dict(
+ DESCRIPTOR = _SWITCHSETUP,
+ __module__ = 'daq.proto.system_config_pb2'
# @@protoc_insertion_point(class_scope:SwitchSetup)
- })
+ ))
_sym_db.RegisterMessage(SwitchSetup)
-Interface = _reflection.GeneratedProtocolMessageType('Interface', (_message.Message,), {
- 'DESCRIPTOR' : _INTERFACE,
- '__module__' : 'daq.proto.system_config_pb2'
+Interface = _reflection.GeneratedProtocolMessageType('Interface', (_message.Message,), dict(
+ DESCRIPTOR = _INTERFACE,
+ __module__ = 'daq.proto.system_config_pb2'
# @@protoc_insertion_point(class_scope:Interface)
- })
+ ))
_sym_db.RegisterMessage(Interface)
diff --git a/libs/proto/usi_pb2.py b/libs/proto/usi_pb2.py
new file mode 100644
index 0000000000..780ac2e1c2
--- /dev/null
+++ b/libs/proto/usi_pb2.py
@@ -0,0 +1,486 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler. DO NOT EDIT!
+# source: usi.proto
+
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+ name='usi.proto',
+ package='usi',
+ syntax='proto3',
+ serialized_options=b'\n\004grpcB\010USIProtoP\001',
+ create_key=_descriptor._internal_create_key,
+ serialized_pb=b'\n\tusi.proto\x12\x03usi\"\'\n\x14SwitchActionResponse\x12\x0f\n\x07success\x18\x01 \x01(\x08\"\xc9\x01\n\rPowerResponse\x12!\n\x19\x63urrent_power_consumption\x18\x01 \x01(\x02\x12\x1d\n\x15max_power_consumption\x18\x02 \x01(\x02\x12$\n\x0bpoe_support\x18\x03 \x01(\x0e\x32\x0f.usi.POESupport\x12\"\n\npoe_status\x18\x04 \x01(\x0e\x32\x0e.usi.POEStatus\x12,\n\x0fpoe_negotiation\x18\x05 \x01(\x0e\x32\x13.usi.POENegotiation\"]\n\x11InterfaceResponse\x12$\n\x0blink_status\x18\x01 \x01(\x0e\x32\x0f.usi.LinkStatus\x12\x12\n\nlink_speed\x18\x02 \x01(\x05\x12\x0e\n\x06\x64uplex\x18\x03 \x01(\t\"w\n\nSwitchInfo\x12\x0f\n\x07ip_addr\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65vice_port\x18\x03 \x01(\x05\x12\x1f\n\x05model\x18\x04 \x01(\x0e\x32\x10.usi.SwitchModel\x12\x10\n\x08username\x18\x05 \x01(\t\x12\x10\n\x08password\x18\x06 \x01(\t*F\n\x0bSwitchModel\x12\x17\n\x13\x41LLIED_TELESIS_X230\x10\x00\x12\x0e\n\nCISCO_9300\x10\x01\x12\x0e\n\nOVS_SWITCH\x10\x02*\x1e\n\nLinkStatus\x12\x06\n\x02UP\x10\x00\x12\x08\n\x04\x44OWN\x10\x01*\'\n\nPOESupport\x12\x0b\n\x07\x45NABLED\x10\x00\x12\x0c\n\x08\x44ISABLED\x10\x01*1\n\tPOEStatus\x12\x06\n\x02ON\x10\x00\x12\x07\n\x03OFF\x10\x01\x12\t\n\x05\x46\x41ULT\x10\x02\x12\x08\n\x04\x44\x45NY\x10\x03*C\n\x0ePOENegotiation\x12\x17\n\x13NEGOTIATION_ENABLED\x10\x00\x12\x18\n\x14NEGOTIATION_DISABLED\x10\x01\x32\xef\x01\n\nUSIService\x12\x31\n\x08GetPower\x12\x0f.usi.SwitchInfo\x1a\x12.usi.PowerResponse\"\x00\x12\x39\n\x0cGetInterface\x12\x0f.usi.SwitchInfo\x1a\x16.usi.InterfaceResponse\"\x00\x12:\n\ndisconnect\x12\x0f.usi.SwitchInfo\x1a\x19.usi.SwitchActionResponse\"\x00\x12\x37\n\x07\x63onnect\x12\x0f.usi.SwitchInfo\x1a\x19.usi.SwitchActionResponse\"\x00\x42\x12\n\x04grpcB\x08USIProtoP\x01\x62\x06proto3'
+)
+
+_SWITCHMODEL = _descriptor.EnumDescriptor(
+ name='SwitchModel',
+ full_name='usi.SwitchModel',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ALLIED_TELESIS_X230', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='CISCO_9300', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='OVS_SWITCH', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=479,
+ serialized_end=549,
+)
+_sym_db.RegisterEnumDescriptor(_SWITCHMODEL)
+
+SwitchModel = enum_type_wrapper.EnumTypeWrapper(_SWITCHMODEL)
+_LINKSTATUS = _descriptor.EnumDescriptor(
+ name='LinkStatus',
+ full_name='usi.LinkStatus',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='UP', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='DOWN', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=551,
+ serialized_end=581,
+)
+_sym_db.RegisterEnumDescriptor(_LINKSTATUS)
+
+LinkStatus = enum_type_wrapper.EnumTypeWrapper(_LINKSTATUS)
+_POESUPPORT = _descriptor.EnumDescriptor(
+ name='POESupport',
+ full_name='usi.POESupport',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ENABLED', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='DISABLED', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=583,
+ serialized_end=622,
+)
+_sym_db.RegisterEnumDescriptor(_POESUPPORT)
+
+POESupport = enum_type_wrapper.EnumTypeWrapper(_POESUPPORT)
+_POESTATUS = _descriptor.EnumDescriptor(
+ name='POEStatus',
+ full_name='usi.POEStatus',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='ON', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='OFF', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='FAULT', index=2, number=2,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='DENY', index=3, number=3,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=624,
+ serialized_end=673,
+)
+_sym_db.RegisterEnumDescriptor(_POESTATUS)
+
+POEStatus = enum_type_wrapper.EnumTypeWrapper(_POESTATUS)
+_POENEGOTIATION = _descriptor.EnumDescriptor(
+ name='POENegotiation',
+ full_name='usi.POENegotiation',
+ filename=None,
+ file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
+ values=[
+ _descriptor.EnumValueDescriptor(
+ name='NEGOTIATION_ENABLED', index=0, number=0,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ _descriptor.EnumValueDescriptor(
+ name='NEGOTIATION_DISABLED', index=1, number=1,
+ serialized_options=None,
+ type=None,
+ create_key=_descriptor._internal_create_key),
+ ],
+ containing_type=None,
+ serialized_options=None,
+ serialized_start=675,
+ serialized_end=742,
+)
+_sym_db.RegisterEnumDescriptor(_POENEGOTIATION)
+
+POENegotiation = enum_type_wrapper.EnumTypeWrapper(_POENEGOTIATION)
+ALLIED_TELESIS_X230 = 0
+CISCO_9300 = 1
+OVS_SWITCH = 2
+UP = 0
+DOWN = 1
+ENABLED = 0
+DISABLED = 1
+ON = 0
+OFF = 1
+FAULT = 2
+DENY = 3
+NEGOTIATION_ENABLED = 0
+NEGOTIATION_DISABLED = 1
+
+
+
+_SWITCHACTIONRESPONSE = _descriptor.Descriptor(
+ name='SwitchActionResponse',
+ full_name='usi.SwitchActionResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='success', full_name='usi.SwitchActionResponse.success', index=0,
+ number=1, type=8, cpp_type=7, label=1,
+ has_default_value=False, default_value=False,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=18,
+ serialized_end=57,
+)
+
+
+_POWERRESPONSE = _descriptor.Descriptor(
+ name='PowerResponse',
+ full_name='usi.PowerResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='current_power_consumption', full_name='usi.PowerResponse.current_power_consumption', index=0,
+ number=1, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='max_power_consumption', full_name='usi.PowerResponse.max_power_consumption', index=1,
+ number=2, type=2, cpp_type=6, label=1,
+ has_default_value=False, default_value=float(0),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='poe_support', full_name='usi.PowerResponse.poe_support', index=2,
+ number=3, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='poe_status', full_name='usi.PowerResponse.poe_status', index=3,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='poe_negotiation', full_name='usi.PowerResponse.poe_negotiation', index=4,
+ number=5, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=60,
+ serialized_end=261,
+)
+
+
+_INTERFACERESPONSE = _descriptor.Descriptor(
+ name='InterfaceResponse',
+ full_name='usi.InterfaceResponse',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='link_status', full_name='usi.InterfaceResponse.link_status', index=0,
+ number=1, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='link_speed', full_name='usi.InterfaceResponse.link_speed', index=1,
+ number=2, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='duplex', full_name='usi.InterfaceResponse.duplex', index=2,
+ number=3, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=263,
+ serialized_end=356,
+)
+
+
+_SWITCHINFO = _descriptor.Descriptor(
+ name='SwitchInfo',
+ full_name='usi.SwitchInfo',
+ filename=None,
+ file=DESCRIPTOR,
+ containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[
+ _descriptor.FieldDescriptor(
+ name='ip_addr', full_name='usi.SwitchInfo.ip_addr', index=0,
+ number=1, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='device_port', full_name='usi.SwitchInfo.device_port', index=1,
+ number=3, type=5, cpp_type=1, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='model', full_name='usi.SwitchInfo.model', index=2,
+ number=4, type=14, cpp_type=8, label=1,
+ has_default_value=False, default_value=0,
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='username', full_name='usi.SwitchInfo.username', index=3,
+ number=5, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ _descriptor.FieldDescriptor(
+ name='password', full_name='usi.SwitchInfo.password', index=4,
+ number=6, type=9, cpp_type=9, label=1,
+ has_default_value=False, default_value=b"".decode('utf-8'),
+ message_type=None, enum_type=None, containing_type=None,
+ is_extension=False, extension_scope=None,
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
+ ],
+ extensions=[
+ ],
+ nested_types=[],
+ enum_types=[
+ ],
+ serialized_options=None,
+ is_extendable=False,
+ syntax='proto3',
+ extension_ranges=[],
+ oneofs=[
+ ],
+ serialized_start=358,
+ serialized_end=477,
+)
+
+_POWERRESPONSE.fields_by_name['poe_support'].enum_type = _POESUPPORT
+_POWERRESPONSE.fields_by_name['poe_status'].enum_type = _POESTATUS
+_POWERRESPONSE.fields_by_name['poe_negotiation'].enum_type = _POENEGOTIATION
+_INTERFACERESPONSE.fields_by_name['link_status'].enum_type = _LINKSTATUS
+_SWITCHINFO.fields_by_name['model'].enum_type = _SWITCHMODEL
+DESCRIPTOR.message_types_by_name['SwitchActionResponse'] = _SWITCHACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['PowerResponse'] = _POWERRESPONSE
+DESCRIPTOR.message_types_by_name['InterfaceResponse'] = _INTERFACERESPONSE
+DESCRIPTOR.message_types_by_name['SwitchInfo'] = _SWITCHINFO
+DESCRIPTOR.enum_types_by_name['SwitchModel'] = _SWITCHMODEL
+DESCRIPTOR.enum_types_by_name['LinkStatus'] = _LINKSTATUS
+DESCRIPTOR.enum_types_by_name['POESupport'] = _POESUPPORT
+DESCRIPTOR.enum_types_by_name['POEStatus'] = _POESTATUS
+DESCRIPTOR.enum_types_by_name['POENegotiation'] = _POENEGOTIATION
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+SwitchActionResponse = _reflection.GeneratedProtocolMessageType('SwitchActionResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _SWITCHACTIONRESPONSE,
+ '__module__' : 'usi_pb2'
+ # @@protoc_insertion_point(class_scope:usi.SwitchActionResponse)
+ })
+_sym_db.RegisterMessage(SwitchActionResponse)
+
+PowerResponse = _reflection.GeneratedProtocolMessageType('PowerResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _POWERRESPONSE,
+ '__module__' : 'usi_pb2'
+ # @@protoc_insertion_point(class_scope:usi.PowerResponse)
+ })
+_sym_db.RegisterMessage(PowerResponse)
+
+InterfaceResponse = _reflection.GeneratedProtocolMessageType('InterfaceResponse', (_message.Message,), {
+ 'DESCRIPTOR' : _INTERFACERESPONSE,
+ '__module__' : 'usi_pb2'
+ # @@protoc_insertion_point(class_scope:usi.InterfaceResponse)
+ })
+_sym_db.RegisterMessage(InterfaceResponse)
+
+SwitchInfo = _reflection.GeneratedProtocolMessageType('SwitchInfo', (_message.Message,), {
+ 'DESCRIPTOR' : _SWITCHINFO,
+ '__module__' : 'usi_pb2'
+ # @@protoc_insertion_point(class_scope:usi.SwitchInfo)
+ })
+_sym_db.RegisterMessage(SwitchInfo)
+
+
+DESCRIPTOR._options = None
+
+_USISERVICE = _descriptor.ServiceDescriptor(
+ name='USIService',
+ full_name='usi.USIService',
+ file=DESCRIPTOR,
+ index=0,
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ serialized_start=745,
+ serialized_end=984,
+ methods=[
+ _descriptor.MethodDescriptor(
+ name='GetPower',
+ full_name='usi.USIService.GetPower',
+ index=0,
+ containing_service=None,
+ input_type=_SWITCHINFO,
+ output_type=_POWERRESPONSE,
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name='GetInterface',
+ full_name='usi.USIService.GetInterface',
+ index=1,
+ containing_service=None,
+ input_type=_SWITCHINFO,
+ output_type=_INTERFACERESPONSE,
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name='disconnect',
+ full_name='usi.USIService.disconnect',
+ index=2,
+ containing_service=None,
+ input_type=_SWITCHINFO,
+ output_type=_SWITCHACTIONRESPONSE,
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+ _descriptor.MethodDescriptor(
+ name='connect',
+ full_name='usi.USIService.connect',
+ index=3,
+ containing_service=None,
+ input_type=_SWITCHINFO,
+ output_type=_SWITCHACTIONRESPONSE,
+ serialized_options=None,
+ create_key=_descriptor._internal_create_key,
+ ),
+])
+_sym_db.RegisterServiceDescriptor(_USISERVICE)
+
+DESCRIPTOR.services_by_name['USIService'] = _USISERVICE
+
+# @@protoc_insertion_point(module_scope)
diff --git a/libs/proto/usi_pb2_grpc.py b/libs/proto/usi_pb2_grpc.py
new file mode 100644
index 0000000000..c8e57501c9
--- /dev/null
+++ b/libs/proto/usi_pb2_grpc.py
@@ -0,0 +1,161 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
+import grpc
+
+import usi_pb2 as usi__pb2
+
+
+class USIServiceStub(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def __init__(self, channel):
+ """Constructor.
+
+ Args:
+ channel: A grpc.Channel.
+ """
+ self.GetPower = channel.unary_unary(
+ '/usi.USIService/GetPower',
+ request_serializer=usi__pb2.SwitchInfo.SerializeToString,
+ response_deserializer=usi__pb2.PowerResponse.FromString,
+ )
+ self.GetInterface = channel.unary_unary(
+ '/usi.USIService/GetInterface',
+ request_serializer=usi__pb2.SwitchInfo.SerializeToString,
+ response_deserializer=usi__pb2.InterfaceResponse.FromString,
+ )
+ self.disconnect = channel.unary_unary(
+ '/usi.USIService/disconnect',
+ request_serializer=usi__pb2.SwitchInfo.SerializeToString,
+ response_deserializer=usi__pb2.SwitchActionResponse.FromString,
+ )
+ self.connect = channel.unary_unary(
+ '/usi.USIService/connect',
+ request_serializer=usi__pb2.SwitchInfo.SerializeToString,
+ response_deserializer=usi__pb2.SwitchActionResponse.FromString,
+ )
+
+
+class USIServiceServicer(object):
+ """Missing associated documentation comment in .proto file."""
+
+ def GetPower(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def GetInterface(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def disconnect(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+ def connect(self, request, context):
+ """Missing associated documentation comment in .proto file."""
+ context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+ context.set_details('Method not implemented!')
+ raise NotImplementedError('Method not implemented!')
+
+
+def add_USIServiceServicer_to_server(servicer, server):
+ rpc_method_handlers = {
+ 'GetPower': grpc.unary_unary_rpc_method_handler(
+ servicer.GetPower,
+ request_deserializer=usi__pb2.SwitchInfo.FromString,
+ response_serializer=usi__pb2.PowerResponse.SerializeToString,
+ ),
+ 'GetInterface': grpc.unary_unary_rpc_method_handler(
+ servicer.GetInterface,
+ request_deserializer=usi__pb2.SwitchInfo.FromString,
+ response_serializer=usi__pb2.InterfaceResponse.SerializeToString,
+ ),
+ 'disconnect': grpc.unary_unary_rpc_method_handler(
+ servicer.disconnect,
+ request_deserializer=usi__pb2.SwitchInfo.FromString,
+ response_serializer=usi__pb2.SwitchActionResponse.SerializeToString,
+ ),
+ 'connect': grpc.unary_unary_rpc_method_handler(
+ servicer.connect,
+ request_deserializer=usi__pb2.SwitchInfo.FromString,
+ response_serializer=usi__pb2.SwitchActionResponse.SerializeToString,
+ ),
+ }
+ generic_handler = grpc.method_handlers_generic_handler(
+ 'usi.USIService', rpc_method_handlers)
+ server.add_generic_rpc_handlers((generic_handler,))
+
+
+ # This class is part of an EXPERIMENTAL API.
+class USIService(object):
+ """Missing associated documentation comment in .proto file."""
+
+ @staticmethod
+ def GetPower(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/usi.USIService/GetPower',
+ usi__pb2.SwitchInfo.SerializeToString,
+ usi__pb2.PowerResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def GetInterface(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/usi.USIService/GetInterface',
+ usi__pb2.SwitchInfo.SerializeToString,
+ usi__pb2.InterfaceResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def disconnect(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/usi.USIService/disconnect',
+ usi__pb2.SwitchInfo.SerializeToString,
+ usi__pb2.SwitchActionResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
+
+ @staticmethod
+ def connect(request,
+ target,
+ options=(),
+ channel_credentials=None,
+ call_credentials=None,
+ compression=None,
+ wait_for_ready=None,
+ timeout=None,
+ metadata=None):
+ return grpc.experimental.unary_unary(request, target, '/usi.USIService/connect',
+ usi__pb2.SwitchInfo.SerializeToString,
+ usi__pb2.SwitchActionResponse.FromString,
+ options, channel_credentials,
+ call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/mudacl/build.gradle b/mudacl/build.gradle
index 13f3fd4017..f3667604bb 100644
--- a/mudacl/build.gradle
+++ b/mudacl/build.gradle
@@ -5,12 +5,12 @@ buildscript {
}
}
dependencies {
- classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0"
+ classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0"
}
}
plugins {
- id 'com.github.johnrengelman.shadow' version '5.2.0'
+ id 'com.github.johnrengelman.shadow' version '6.0.0'
id 'java'
id 'maven'
}
@@ -32,7 +32,7 @@ repositories {
}
dependencies {
- compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.11.0'
- compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.11.0'
+ compile group: 'com.fasterxml.jackson.core', name: 'jackson-databind', version: '2.11.1'
+ compile group: 'com.fasterxml.jackson.dataformat', name: 'jackson-dataformat-yaml', version: '2.11.1'
testCompile group: 'junit', name: 'junit', version: '4.13'
}
diff --git a/mudacl/gradle/wrapper/gradle-wrapper.properties b/mudacl/gradle/wrapper/gradle-wrapper.properties
index 16871c71a0..567aa53d89 100644
--- a/mudacl/gradle/wrapper/gradle-wrapper.properties
+++ b/mudacl/gradle/wrapper/gradle-wrapper.properties
@@ -3,4 +3,4 @@ distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip
diff --git a/proto/system_config.proto b/proto/system_config.proto
index a0cbfbda48..966a5b780a 100644
--- a/proto/system_config.proto
+++ b/proto/system_config.proto
@@ -94,8 +94,8 @@ message DaqConfig {
// Set port-debounce for flaky connections. Zero to disable.
int32 port_debounce_sec = 29;
- // Hook for failure diagnostics.
- string fail_hook = 30;
+ // Hook for device topology updates.
+ string topology_hook = 30;
// Directory of defaults for new devices.
string device_template = 31;
@@ -114,8 +114,29 @@ message DaqConfig {
// Set time between port disconnect and host tests shutdown
int32 port_flap_timeout_sec = 48;
+
+ // USI url
+ USISetup usi_setup = 49;
+
+ // Configures events that trigger a DAQ run
+ RunTriggerType run_trigger_type = 50;
+
+ // verbose output
+ bool debug_mode = 51;
}
+enum RunTriggerType {
+ PORT = 0;
+ VLAN = 1;
+}
+
+/**
+ * USI paramters
+**/
+message USISetup {
+ string url = 1;
+ int32 rpc_timeout_sec = 2;
+}
/*
* System configuraiton of the access switch. This is used by the system
@@ -131,14 +152,17 @@ message SwitchSetup {
// Dataplane uplink port
int32 uplink_port = 13;
- // Local port of open flow controller
+ // Local port of DAQ OpenFlow controller
int32 lo_port = 14;
+ // Local port for an alternate OpenFlow controller
+ int32 alt_port = 16;
+
// IP address and subnet for local control plane interface
- string lo_addr = 15;
+ string lo_addr = 18;
// IP address template and subnet for module ip addresses
- string mods_addr = 16;
+ string mods_addr = 20;
// Dataplane id of external OpenFlow switch
string of_dpid = 41;
diff --git a/pubber/.idea/codeStyles/codeStyleConfig.xml b/pubber/.idea/codeStyles/codeStyleConfig.xml
deleted file mode 100644
index c79f34ced8..0000000000
--- a/pubber/.idea/codeStyles/codeStyleConfig.xml
+++ /dev/null
@@ -1,5 +0,0 @@
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/dictionaries/peringknife.xml b/pubber/.idea/dictionaries/peringknife.xml
deleted file mode 100644
index 1f2f3fc05a..0000000000
--- a/pubber/.idea/dictionaries/peringknife.xml
+++ /dev/null
@@ -1,7 +0,0 @@
-
-
-
- pubber
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/encodings.xml b/pubber/.idea/encodings.xml
deleted file mode 100644
index 15a15b218a..0000000000
--- a/pubber/.idea/encodings.xml
+++ /dev/null
@@ -1,4 +0,0 @@
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/gradle.xml b/pubber/.idea/gradle.xml
deleted file mode 100644
index a931762ec9..0000000000
--- a/pubber/.idea/gradle.xml
+++ /dev/null
@@ -1,19 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/jarRepositories.xml b/pubber/.idea/jarRepositories.xml
deleted file mode 100644
index 6f70f42344..0000000000
--- a/pubber/.idea/jarRepositories.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml b/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml
deleted file mode 100644
index a61dc7e59a..0000000000
--- a/pubber/.idea/libraries/Gradle__com_bugsnag_bugsnag_3_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml
deleted file mode 100644
index 940abc9cd6..0000000000
--- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_annotations_2_10_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml
deleted file mode 100644
index c39a1aad89..0000000000
--- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_core_2_10_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml b/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml
deleted file mode 100644
index 401e4470cc..0000000000
--- a/pubber/.idea/libraries/Gradle__com_fasterxml_jackson_core_jackson_databind_2_10_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml b/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml
deleted file mode 100644
index 6a37163770..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_api_common_1_1_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml
deleted file mode 100644
index f7052b657f..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_client_google_api_client_1_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml b/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml
deleted file mode 100644
index 5afd4e53b5..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_gax_1_8_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml b/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml
deleted file mode 100644
index 7dd2f70770..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_gax_grpc_0_25_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml
deleted file mode 100644
index f653b75d6a..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_cloud_logging_v2_0_1_20.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml
deleted file mode 100644
index 3ab192cf7d..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_common_protos_0_1_20.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml b/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml
deleted file mode 100644
index 831f72e025..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_api_grpc_proto_google_iam_v1_0_1_20.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml
deleted file mode 100644
index 409ca08042..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_apis_google_api_services_cloudiot_v1_rev20170922_1_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml b/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml
deleted file mode 100644
index 19f8e9622f..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_credentials_0_8_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml b/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml
deleted file mode 100644
index a9aba07437..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_auth_google_auth_library_oauth2_http_0_8_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml b/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml
deleted file mode 100644
index aeea865ae3..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_auto_value_auto_value_1_2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml
deleted file mode 100644
index 49ddf929e7..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_1_7_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml
deleted file mode 100644
index f09079becb..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_core_grpc_1_7_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml b/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml
deleted file mode 100644
index 42054ea1de..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_cloud_google_cloud_logging_1_7_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml b/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml
deleted file mode 100644
index c6616f41e4..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_code_findbugs_jsr305_3_0_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml b/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml
deleted file mode 100644
index cbe1b3266b..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_code_gson_gson_2_7.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml b/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml
deleted file mode 100644
index b4cd21969d..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_errorprone_error_prone_annotations_2_0_19.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml
deleted file mode 100644
index 4c947ec6df..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_guava_guava_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml
deleted file mode 100644
index 6c259c21e1..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_1_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml
deleted file mode 100644
index b4ec53cbea..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_http_client_google_http_client_jackson2_1_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml b/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml
deleted file mode 100644
index 07c6748fa9..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_instrumentation_instrumentation_api_0_4_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml b/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml
deleted file mode 100644
index ab45264c2d..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_j2objc_j2objc_annotations_1_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml b/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml
deleted file mode 100644
index 8549a6371c..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_oauth_client_google_oauth_client_1_22_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml b/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml
deleted file mode 100644
index e294c29fa5..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_3_3_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml b/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml
deleted file mode 100644
index 6866f74bf0..0000000000
--- a/pubber/.idea/libraries/Gradle__com_google_protobuf_protobuf_java_util_3_3_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml b/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml
deleted file mode 100644
index 6b097056b4..0000000000
--- a/pubber/.idea/libraries/Gradle__com_hazelcast_hazelcast_3_5_4.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml b/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml
deleted file mode 100644
index d3ab2fa027..0000000000
--- a/pubber/.idea/libraries/Gradle__com_librato_metrics_librato_java_2_1_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml b/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml
deleted file mode 100644
index c588bf6754..0000000000
--- a/pubber/.idea/libraries/Gradle__com_librato_metrics_metrics_librato_5_1_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml b/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml
deleted file mode 100644
index 35a975da46..0000000000
--- a/pubber/.idea/libraries/Gradle__com_sun_xml_bind_jaxb_impl_2_3_2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml b/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml
deleted file mode 100644
index c84796132f..0000000000
--- a/pubber/.idea/libraries/Gradle__commons_codec_commons_codec_1_10.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml b/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml
deleted file mode 100644
index b9fb75155c..0000000000
--- a/pubber/.idea/libraries/Gradle__commons_logging_commons_logging_1_1_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml b/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml
deleted file mode 100644
index f2223bcf6b..0000000000
--- a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_core_3_2_2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml b/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml
deleted file mode 100644
index a4fb7c1897..0000000000
--- a/pubber/.idea/libraries/Gradle__io_dropwizard_metrics_metrics_jvm_3_2_2.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml
deleted file mode 100644
index 8308d1661d..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_auth_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml
deleted file mode 100644
index 42b3336f05..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_context_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml
deleted file mode 100644
index f4249256ab..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_core_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml
deleted file mode 100644
index 3b8ead556e..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_netty_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml
deleted file mode 100644
index 95d433c45d..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml
deleted file mode 100644
index 9a3dd1b89b..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_protobuf_lite_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml b/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml
deleted file mode 100644
index f282c5df26..0000000000
--- a/pubber/.idea/libraries/Gradle__io_grpc_grpc_stub_1_6_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml b/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml
deleted file mode 100644
index c255c18f27..0000000000
--- a/pubber/.idea/libraries/Gradle__io_jsonwebtoken_jjwt_0_7_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml b/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml
deleted file mode 100644
index 0751597837..0000000000
--- a/pubber/.idea/libraries/Gradle__io_moquette_moquette_broker_0_10.xml
+++ /dev/null
@@ -1,9 +0,0 @@
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml
deleted file mode 100644
index 30fb818235..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_buffer_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml
deleted file mode 100644
index 97cd8c9cb9..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml
deleted file mode 100644
index 54abbbe3aa..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http2_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml
deleted file mode 100644
index 58ac06ba5b..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_http_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml
deleted file mode 100644
index 68118cd002..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_mqtt_4_1_12_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml
deleted file mode 100644
index 614e913e87..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_codec_socks_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml
deleted file mode 100644
index 1a98e17a91..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_common_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml
deleted file mode 100644
index 9298ca6e14..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml
deleted file mode 100644
index ca38677084..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_handler_proxy_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml
deleted file mode 100644
index b70942e76b..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_resolver_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml
deleted file mode 100644
index e00975f2d7..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_tcnative_boringssl_static_2_0_3_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml
deleted file mode 100644
index f055031276..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_4_1_14_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml
deleted file mode 100644
index 467269eee3..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_epoll_4_1_12_Final_linux_x86_64.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml b/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml
deleted file mode 100644
index a3b889ee62..0000000000
--- a/pubber/.idea/libraries/Gradle__io_netty_netty_transport_native_unix_common_4_1_12_Final.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml b/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml
deleted file mode 100644
index 5cdfe84133..0000000000
--- a/pubber/.idea/libraries/Gradle__io_opencensus_opencensus_api_0_5_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml b/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml
deleted file mode 100644
index f480add6e2..0000000000
--- a/pubber/.idea/libraries/Gradle__javax_activation_javax_activation_api_1_2_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml b/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml
deleted file mode 100644
index 434a174d24..0000000000
--- a/pubber/.idea/libraries/Gradle__javax_xml_bind_jaxb_api_2_3_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml b/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml
deleted file mode 100644
index f45e0d77bd..0000000000
--- a/pubber/.idea/libraries/Gradle__joda_time_joda_time_2_9_7.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml b/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml
deleted file mode 100644
index 0cef6bc81e..0000000000
--- a/pubber/.idea/libraries/Gradle__junit_junit_4_13.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml b/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml
deleted file mode 100644
index 80d562c2fb..0000000000
--- a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpclient_4_0_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml b/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml
deleted file mode 100644
index e203c6b668..0000000000
--- a/pubber/.idea/libraries/Gradle__org_apache_httpcomponents_httpcore_4_0_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml b/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml
deleted file mode 100644
index 72ee118d97..0000000000
--- a/pubber/.idea/libraries/Gradle__org_codehaus_mojo_animal_sniffer_annotations_1_14.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml b/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml
deleted file mode 100644
index 01970270a7..0000000000
--- a/pubber/.idea/libraries/Gradle__org_eclipse_paho_org_eclipse_paho_client_mqttv3_1_1_0.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml b/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml
deleted file mode 100644
index 8262f729c2..0000000000
--- a/pubber/.idea/libraries/Gradle__org_hamcrest_hamcrest_core_1_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml b/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml
deleted file mode 100644
index 64dc62f3d4..0000000000
--- a/pubber/.idea/libraries/Gradle__org_json_json_20160810.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml b/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml
deleted file mode 100644
index 6aa377ef4f..0000000000
--- a/pubber/.idea/libraries/Gradle__org_mockito_mockito_core_1_10_19.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml b/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml
deleted file mode 100644
index e6b52ad30d..0000000000
--- a/pubber/.idea/libraries/Gradle__org_objenesis_objenesis_2_1.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml b/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml
deleted file mode 100644
index dd23f3e0c1..0000000000
--- a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_api_1_7_25.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml b/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml
deleted file mode 100644
index 586ac1e599..0000000000
--- a/pubber/.idea/libraries/Gradle__org_slf4j_slf4j_simple_1_7_5.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml b/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml
deleted file mode 100644
index 0fcafe29d0..0000000000
--- a/pubber/.idea/libraries/Gradle__org_threeten_threetenbp_1_3_3.xml
+++ /dev/null
@@ -1,11 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/modules.xml b/pubber/.idea/modules.xml
deleted file mode 100644
index f4ca1e7a1b..0000000000
--- a/pubber/.idea/modules.xml
+++ /dev/null
@@ -1,8 +0,0 @@
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/modules/datafmt.iml b/pubber/.idea/modules/datafmt.iml
deleted file mode 100644
index baed4f134d..0000000000
--- a/pubber/.idea/modules/datafmt.iml
+++ /dev/null
@@ -1,21 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/uiDesigner.xml b/pubber/.idea/uiDesigner.xml
deleted file mode 100644
index e96534fb27..0000000000
--- a/pubber/.idea/uiDesigner.xml
+++ /dev/null
@@ -1,124 +0,0 @@
-
-
-
-
- -
-
-
- -
-
-
- -
-
-
- -
-
-
- -
-
-
-
-
-
- -
-
-
-
-
-
- -
-
-
-
-
-
- -
-
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
-
-
- -
-
-
- -
-
-
- -
-
-
- -
-
-
- -
-
-
-
-
- -
-
-
- -
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/.idea/vcs.xml b/pubber/.idea/vcs.xml
deleted file mode 100644
index 26b269dd99..0000000000
--- a/pubber/.idea/vcs.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/bin/build b/pubber/bin/build
deleted file mode 100755
index a2bd6be40a..0000000000
--- a/pubber/bin/build
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash -e
-
-rundir=$(dirname $0)
-cd $rundir/..
-
-echo Running in $PWD
-
-rm -rf build
-
-./gradlew build
-./gradlew shadow
diff --git a/pubber/bin/keygen b/pubber/bin/keygen
deleted file mode 100755
index 1b3cf60d67..0000000000
--- a/pubber/bin/keygen
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash -e
-
-ROOT=$(realpath $(dirname $0)/../..)
-cd $ROOT
-
-TARGET_PREFIX=local/rsa_
-
-PUBLIC_CERT=${TARGET_PREFIX}cert.pem
-PRIVATE_CERT=${TARGET_PREFIX}private.pem
-PRIVATE_KEY=${TARGET_PREFIX}private.pkcs8
-
-if [ -f $PUBLIC_CERT ]; then
- echo $PUBLIC_CERT already exists, exiting.
- false
-fi
-if [ -f $PRIVATE_CERT ]; then
- echo $PRIVATE_CERT already exists, exiting.
- false
-fi
-if [ -f $PRIVATE_KEY ]; then
- echo $PRIVATE_KEY already exists, exiting.
- false
-fi
-
-openssl req -x509 -nodes -newkey rsa:2048 -keyout $PRIVATE_CERT -days 1000000 -out $PUBLIC_CERT -subj "/CN=unused"
-openssl pkcs8 -topk8 -inform PEM -outform DER -in $PRIVATE_CERT -nocrypt > $PRIVATE_KEY
diff --git a/pubber/bin/run b/pubber/bin/run
deleted file mode 100755
index ee14f351aa..0000000000
--- a/pubber/bin/run
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/bin/bash -e
-
-ROOT=$(realpath $(dirname $0)/../..)
-cd $ROOT
-
-conf_file=local/pubber.json
-
-if [ ! -f $conf_file ]; then
- echo Pubber config file not found: $(realpath $conf_file)
- false
-fi
-
-java -jar pubber/build/libs/pubber-1.0-SNAPSHOT-all.jar $conf_file
diff --git a/pubber/build.gradle b/pubber/build.gradle
deleted file mode 100644
index 1bdfb151a3..0000000000
--- a/pubber/build.gradle
+++ /dev/null
@@ -1,54 +0,0 @@
-buildscript {
- repositories {
- maven {
- url "https://plugins.gradle.org/m2/"
- }
- }
- dependencies {
- classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0"
- }
-}
-
-plugins {
- id 'com.github.johnrengelman.shadow' version '5.2.0'
- id 'java'
- id 'maven'
-}
-
-group 'daq-pubber'
-version '1.0-SNAPSHOT'
-
-sourceCompatibility = 1.8
-
-jar {
- manifest {
- attributes 'Main-Class': 'daq.pubber.Pubber'
- }
-}
-
-repositories {
- mavenCentral()
- mavenLocal()
- jcenter()
-}
-
-dependencies {
- compile group: 'org.slf4j', name: 'slf4j-simple', version:'1.7.5'
- compile 'io.jsonwebtoken:jjwt:0.7.0'
- compile 'javax.xml.bind:jaxb-api:2.3.1'
- compile 'com.sun.xml.bind:jaxb-impl:2.3.2'
- compile 'com.google.guava:guava:22.0'
- compile 'com.google.cloud:google-cloud-logging:1.7.0'
- compile('com.google.api-client:google-api-client:1.22.0') {
- exclude group: 'com.google.guava', module: 'guava-jdk5'
- }
- compile 'com.fasterxml.jackson.core:jackson-databind:2.11.0'
- compile('com.google.apis:google-api-services-cloudiot:v1-rev20170922-1.22.0') {
- exclude group: 'com.google.guava', module: 'guava-jdk5'
- }
- compile 'joda-time:joda-time:2.9.7'
- compile 'org.eclipse.paho:org.eclipse.paho.client.mqttv3:1.1.0'
- compile 'io.moquette:moquette-broker:0.10'
- testCompile group: 'junit', name: 'junit', version: '4.13'
- testCompile 'org.mockito:mockito-core:1.10.19'
-}
diff --git a/pubber/gradle/wrapper/gradle-wrapper.jar b/pubber/gradle/wrapper/gradle-wrapper.jar
deleted file mode 100644
index 01b8bf6b1f..0000000000
Binary files a/pubber/gradle/wrapper/gradle-wrapper.jar and /dev/null differ
diff --git a/pubber/gradle/wrapper/gradle-wrapper.properties b/pubber/gradle/wrapper/gradle-wrapper.properties
deleted file mode 100644
index f07e1a85bc..0000000000
--- a/pubber/gradle/wrapper/gradle-wrapper.properties
+++ /dev/null
@@ -1,6 +0,0 @@
-#Tue Feb 11 09:15:14 PST 2020
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-all.zip
-distributionBase=GRADLE_USER_HOME
-distributionPath=wrapper/dists
-zipStorePath=wrapper/dists
-zipStoreBase=GRADLE_USER_HOME
diff --git a/pubber/gradlew b/pubber/gradlew
deleted file mode 100755
index 4453ccea33..0000000000
--- a/pubber/gradlew
+++ /dev/null
@@ -1,172 +0,0 @@
-#!/usr/bin/env sh
-
-##############################################################################
-##
-## Gradle start up script for UN*X
-##
-##############################################################################
-
-# Attempt to set APP_HOME
-# Resolve links: $0 may be a link
-PRG="$0"
-# Need this for relative symlinks.
-while [ -h "$PRG" ] ; do
- ls=`ls -ld "$PRG"`
- link=`expr "$ls" : '.*-> \(.*\)$'`
- if expr "$link" : '/.*' > /dev/null; then
- PRG="$link"
- else
- PRG=`dirname "$PRG"`"/$link"
- fi
-done
-SAVED="`pwd`"
-cd "`dirname \"$PRG\"`/" >/dev/null
-APP_HOME="`pwd -P`"
-cd "$SAVED" >/dev/null
-
-APP_NAME="Gradle"
-APP_BASE_NAME=`basename "$0"`
-
-# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
-DEFAULT_JVM_OPTS=""
-
-# Use the maximum available, or set MAX_FD != -1 to use that value.
-MAX_FD="maximum"
-
-warn ( ) {
- echo "$*"
-}
-
-die ( ) {
- echo
- echo "$*"
- echo
- exit 1
-}
-
-# OS specific support (must be 'true' or 'false').
-cygwin=false
-msys=false
-darwin=false
-nonstop=false
-case "`uname`" in
- CYGWIN* )
- cygwin=true
- ;;
- Darwin* )
- darwin=true
- ;;
- MINGW* )
- msys=true
- ;;
- NONSTOP* )
- nonstop=true
- ;;
-esac
-
-CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
-
-# Determine the Java command to use to start the JVM.
-if [ -n "$JAVA_HOME" ] ; then
- if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
- # IBM's JDK on AIX uses strange locations for the executables
- JAVACMD="$JAVA_HOME/jre/sh/java"
- else
- JAVACMD="$JAVA_HOME/bin/java"
- fi
- if [ ! -x "$JAVACMD" ] ; then
- die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
- fi
-else
- JAVACMD="java"
- which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
-
-Please set the JAVA_HOME variable in your environment to match the
-location of your Java installation."
-fi
-
-# Increase the maximum file descriptors if we can.
-if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
- MAX_FD_LIMIT=`ulimit -H -n`
- if [ $? -eq 0 ] ; then
- if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
- MAX_FD="$MAX_FD_LIMIT"
- fi
- ulimit -n $MAX_FD
- if [ $? -ne 0 ] ; then
- warn "Could not set maximum file descriptor limit: $MAX_FD"
- fi
- else
- warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
- fi
-fi
-
-# For Darwin, add options to specify how the application appears in the dock
-if $darwin; then
- GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
-fi
-
-# For Cygwin, switch paths to Windows format before running java
-if $cygwin ; then
- APP_HOME=`cygpath --path --mixed "$APP_HOME"`
- CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
- JAVACMD=`cygpath --unix "$JAVACMD"`
-
- # We build the pattern for arguments to be converted via cygpath
- ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
- SEP=""
- for dir in $ROOTDIRSRAW ; do
- ROOTDIRS="$ROOTDIRS$SEP$dir"
- SEP="|"
- done
- OURCYGPATTERN="(^($ROOTDIRS))"
- # Add a user-defined pattern to the cygpath arguments
- if [ "$GRADLE_CYGPATTERN" != "" ] ; then
- OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
- fi
- # Now convert the arguments - kludge to limit ourselves to /bin/sh
- i=0
- for arg in "$@" ; do
- CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
- CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
-
- if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
- eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
- else
- eval `echo args$i`="\"$arg\""
- fi
- i=$((i+1))
- done
- case $i in
- (0) set -- ;;
- (1) set -- "$args0" ;;
- (2) set -- "$args0" "$args1" ;;
- (3) set -- "$args0" "$args1" "$args2" ;;
- (4) set -- "$args0" "$args1" "$args2" "$args3" ;;
- (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
- (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
- (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
- (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
- (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
- esac
-fi
-
-# Escape application args
-save ( ) {
- for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
- echo " "
-}
-APP_ARGS=$(save "$@")
-
-# Collect all arguments for the java command, following the shell quoting and substitution rules
-eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
-
-# by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong
-if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then
- cd "$(dirname "$0")"
-fi
-
-exec "$JAVACMD" "$@"
diff --git a/pubber/local b/pubber/local
deleted file mode 120000
index 0a4de1e828..0000000000
--- a/pubber/local
+++ /dev/null
@@ -1 +0,0 @@
-../local/
\ No newline at end of file
diff --git a/pubber/pubber.iml b/pubber/pubber.iml
deleted file mode 100644
index e0065ccd31..0000000000
--- a/pubber/pubber.iml
+++ /dev/null
@@ -1,92 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
\ No newline at end of file
diff --git a/pubber/settings.gradle b/pubber/settings.gradle
deleted file mode 100644
index 8b13789179..0000000000
--- a/pubber/settings.gradle
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/pubber/src/main/java/daq/pubber/AbstractPoint.java b/pubber/src/main/java/daq/pubber/AbstractPoint.java
deleted file mode 100644
index 44bd581c68..0000000000
--- a/pubber/src/main/java/daq/pubber/AbstractPoint.java
+++ /dev/null
@@ -1,15 +0,0 @@
-package daq.pubber;
-
-import daq.udmi.Message.PointData;
-import daq.udmi.Message.PointState;
-
-public interface AbstractPoint {
-
- String getName();
-
- PointData getData();
-
- void updateData();
-
- PointState getState();
-}
diff --git a/pubber/src/main/java/daq/pubber/Configuration.java b/pubber/src/main/java/daq/pubber/Configuration.java
deleted file mode 100644
index 7c362781ef..0000000000
--- a/pubber/src/main/java/daq/pubber/Configuration.java
+++ /dev/null
@@ -1,17 +0,0 @@
-package daq.pubber;
-
-/**
- */
-public class Configuration {
- public String bridgeHostname = "mqtt.googleapis.com";
- public String bridgePort = "443";
- public String projectId;
- public String cloudRegion;
- public String registryId;
- public String gatewayId;
- public String deviceId;
- public String keyFile = "local/rsa_private.pkcs8";
- public byte[] keyBytes;
- public String algorithm = "RS256";
- public Object extraField;
-}
diff --git a/pubber/src/main/java/daq/pubber/GatewayError.java b/pubber/src/main/java/daq/pubber/GatewayError.java
deleted file mode 100644
index fcbc4954c9..0000000000
--- a/pubber/src/main/java/daq/pubber/GatewayError.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package daq.pubber;
-
-public class GatewayError {
- public String error_type;
- public String description;
- public String device_id;
- public MqttMessageInfo mqtt_message_info;
-
- public static class MqttMessageInfo {
- public String message_type;
- public String topic;
- public String packet_id;
- }
-}
diff --git a/pubber/src/main/java/daq/pubber/JwtAuthorization.java b/pubber/src/main/java/daq/pubber/JwtAuthorization.java
deleted file mode 100644
index cd17be9ba7..0000000000
--- a/pubber/src/main/java/daq/pubber/JwtAuthorization.java
+++ /dev/null
@@ -1,11 +0,0 @@
-package daq.pubber;
-
-import java.util.Arrays;
-
-public class JwtAuthorization {
- public String authorization;
-
- public JwtAuthorization(String jwtToken) {
- authorization = jwtToken;
- }
-}
diff --git a/pubber/src/main/java/daq/pubber/MqttPublisher.java b/pubber/src/main/java/daq/pubber/MqttPublisher.java
deleted file mode 100644
index 9358ee3546..0000000000
--- a/pubber/src/main/java/daq/pubber/MqttPublisher.java
+++ /dev/null
@@ -1,364 +0,0 @@
-package daq.pubber;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.SerializationFeature;
-import com.fasterxml.jackson.databind.util.ISO8601DateFormat;
-import com.google.common.base.Preconditions;
-import com.google.common.cache.CacheLoader;
-import com.google.common.cache.RemovalNotification;
-import io.jsonwebtoken.JwtBuilder;
-import io.jsonwebtoken.Jwts;
-import io.jsonwebtoken.SignatureAlgorithm;
-import org.eclipse.paho.client.mqttv3.*;
-import org.eclipse.paho.client.mqttv3.persist.MemoryPersistence;
-import org.joda.time.DateTime;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.nio.charset.StandardCharsets;
-import java.security.KeyFactory;
-import java.security.PrivateKey;
-import java.security.spec.PKCS8EncodedKeySpec;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.AtomicInteger;
-import java.util.function.Consumer;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-
-/**
- * Handle publishing sensor data to a Cloud IoT MQTT endpoint.
- */
-public class MqttPublisher {
-
- private static final Logger LOG = LoggerFactory.getLogger(MqttPublisher.class);
-
- private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
- .disable(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS)
- .setDateFormat(new ISO8601DateFormat())
- .setSerializationInclusion(JsonInclude.Include.NON_NULL);
-
- // Indicate if this message should be a MQTT 'retained' message.
- private static final boolean SHOULD_RETAIN = false;
-
- private static final int MQTT_QOS = 1;
- private static final String CONFIG_UPDATE_TOPIC_FMT = "/devices/%s/config";
- private static final String ERRORS_TOPIC_FMT = "/devices/%s/errors";
- private static final String UNUSED_ACCOUNT_NAME = "unused";
- private static final int INITIALIZE_TIME_MS = 20000;
-
- private static final String MESSAGE_TOPIC_FORMAT = "/devices/%s/%s";
- private static final String BROKER_URL_FORMAT = "ssl://%s:%s";
- private static final String CLIENT_ID_FORMAT = "projects/%s/locations/%s/registries/%s/devices/%s";
- private static final int PUBLISH_THREAD_COUNT = 10;
- private static final String HANDLER_KEY_FORMAT = "%s/%s";
-
- private final Semaphore connectionLock = new Semaphore(1);
-
- private final Map mqttClients = new ConcurrentHashMap<>();
-
- private final ExecutorService publisherExecutor =
- Executors.newFixedThreadPool(PUBLISH_THREAD_COUNT);
-
- private final Configuration configuration;
- private final String registryId;
-
- private final AtomicInteger publishCounter = new AtomicInteger(0);
- private final AtomicInteger errorCounter = new AtomicInteger(0);
- private final AtomicInteger expiredCounter = new AtomicInteger(0);
- private final Map> handlers = new ConcurrentHashMap<>();
- private final Map> handlersType = new ConcurrentHashMap<>();
- private final Consumer onError;
-
- MqttPublisher(Configuration configuration, Consumer onError) {
- this.configuration = configuration;
- this.registryId = configuration.registryId;
- this.onError = onError;
- validateCloudIoTOptions();
- }
-
- void publish(String deviceId, String topic, Object data) {
- Preconditions.checkNotNull(deviceId, "publish deviceId");
- LOG.debug("Publishing in background " + registryId + "/" + deviceId);
- publisherExecutor.submit(() -> publishCore(deviceId, topic, data));
- }
-
- private void publishCore(String deviceId, String topic, Object data) {
- try {
- String payload = OBJECT_MAPPER.writeValueAsString(data);
- sendMessage(deviceId, getMessageTopic(deviceId, topic), payload.getBytes());
- LOG.debug("Publishing complete " + registryId + "/" + deviceId);
- } catch (Exception e) {
- errorCounter.incrementAndGet();
- LOG.warn(String.format("Publish failed for %s: %s", deviceId, e));
- if (configuration.gatewayId == null) {
- closeDeviceClient(deviceId);
- } else {
- close();
- }
- }
- }
-
- private void closeDeviceClient(String deviceId) {
- MqttClient removed = mqttClients.remove(deviceId);
- if (removed != null) {
- try {
- removed.close();
- } catch (Exception e) {
- LOG.error("Error closing MQTT client: " + e.toString());
- }
- }
- }
-
- void close() {
- Set clients = mqttClients.keySet();
- for (String client : clients) {
- closeDeviceClient(client);
- }
- }
-
- long clientCount() {
- return mqttClients.size();
- }
-
- private void validateCloudIoTOptions() {
- try {
- checkNotNull(configuration.bridgeHostname, "bridgeHostname");
- checkNotNull(configuration.bridgePort, "bridgePort");
- checkNotNull(configuration.projectId, "projectId");
- checkNotNull(configuration.cloudRegion, "cloudRegion");
- checkNotNull(configuration.keyBytes, "keyBytes");
- checkNotNull(configuration.algorithm, "algorithm");
- } catch (Exception e) {
- throw new IllegalStateException("Invalid Cloud IoT Options", e);
- }
- }
-
- private MqttClient newBoundClient(String deviceId) {
- try {
- String gatewayId = configuration.gatewayId;
- LOG.debug("Connecting through gateway " + gatewayId);
- MqttClient mqttClient = getConnectedClient(gatewayId);
- String topic = String.format("/devices/%s/attach", deviceId);
- String payload = "";
- LOG.info("Publishing attach message to topic " + topic);
- mqttClient.publish(topic, payload.getBytes(StandardCharsets.UTF_8.name()), MQTT_QOS, SHOULD_RETAIN);
- return mqttClient;
- } catch (Exception e) {
- throw new RuntimeException("While binding client " + deviceId, e);
- }
- }
-
- private MqttClient newMqttClient(String deviceId) {
- try {
- Preconditions.checkNotNull(registryId, "registryId is null");
- Preconditions.checkNotNull(deviceId, "deviceId is null");
- MqttClient mqttClient = new MqttClient(getBrokerUrl(), getClientId(deviceId),
- new MemoryPersistence());
- return mqttClient;
- } catch (Exception e) {
- errorCounter.incrementAndGet();
- throw new RuntimeException("Creating new MQTT client " + deviceId, e);
- }
- }
-
- private MqttClient connectMqttClient(String deviceId) {
- try {
- if (!connectionLock.tryAcquire(INITIALIZE_TIME_MS, TimeUnit.MILLISECONDS)) {
- throw new RuntimeException("Timeout waiting for connection lock");
- }
- MqttClient mqttClient = newMqttClient(deviceId);
- if (mqttClient.isConnected()) {
- return mqttClient;
- }
- LOG.info("Attempting connection to " + registryId + ":" + deviceId);
-
- mqttClient.setCallback(new MqttCallbackHandler(deviceId));
- mqttClient.setTimeToWait(INITIALIZE_TIME_MS);
-
- MqttConnectOptions options = new MqttConnectOptions();
- options.setMqttVersion(MqttConnectOptions.MQTT_VERSION_3_1_1);
- options.setUserName(UNUSED_ACCOUNT_NAME);
- options.setMaxInflight(PUBLISH_THREAD_COUNT * 2);
- options.setConnectionTimeout(INITIALIZE_TIME_MS);
-
- options.setPassword(createJwt());
-
- mqttClient.connect(options);
-
- subscribeToUpdates(mqttClient, deviceId);
- return mqttClient;
- } catch (Exception e) {
- throw new RuntimeException("While connecting mqtt client " + deviceId, e);
- } finally {
- connectionLock.release();
- }
- }
-
- private char[] createJwt() throws Exception {
- return createJwt(configuration.projectId, configuration.keyBytes, configuration.algorithm)
- .toCharArray();
- }
-
- private String getClientId(String deviceId) {
- // Create our MQTT client. The mqttClientId is a unique string that identifies this device. For
- // Google Cloud IoT, it must be in the format below.
- return String.format(CLIENT_ID_FORMAT, configuration.projectId, configuration.cloudRegion,
- registryId, deviceId);
- }
-
- private String getBrokerUrl() {
- // Build the connection string for Google's Cloud IoT MQTT server. Only SSL connections are
- // accepted. For server authentication, the JVM's root certificates are used.
- return String.format(BROKER_URL_FORMAT, configuration.bridgeHostname, configuration.bridgePort);
- }
-
- private String getMessageTopic(String deviceId, String topic) {
- return String.format(MESSAGE_TOPIC_FORMAT, deviceId, topic);
- }
-
- private void subscribeToUpdates(MqttClient client, String deviceId) {
- subscribeTopic(client, String.format(CONFIG_UPDATE_TOPIC_FMT, deviceId));
- subscribeTopic(client, String.format(ERRORS_TOPIC_FMT, deviceId));
- }
-
- private void subscribeTopic(MqttClient client, String updateTopic) {
- try {
- client.subscribe(updateTopic);
- } catch (MqttException e) {
- throw new RuntimeException("While subscribing to MQTT topic " + updateTopic, e);
- }
- }
-
- public PublisherStats getStatistics() {
- return new PublisherStats();
- }
-
- @SuppressWarnings("unchecked")
- public void registerHandler(String deviceId, String mqttTopic,
- Consumer handler, Class messageType) {
- String key = getHandlerKey(getMessageTopic(deviceId, mqttTopic));
- if (handler == null) {
- handlers.remove(key);
- handlersType.remove(key);
- } else if (handlers.put(key, (Consumer) handler) == null) {
- handlersType.put(key, (Class) messageType);
- } else {
- throw new IllegalStateException("Overwriting existing handler for " + key);
- }
- }
-
- private String getHandlerKey(String configTopic) {
- return String.format(HANDLER_KEY_FORMAT, registryId, configTopic);
- }
-
- public void connect(String deviceId) {
- getConnectedClient(deviceId);
- }
-
- private class MqttCallbackHandler implements MqttCallback {
-
- private final String deviceId;
-
- MqttCallbackHandler(String deviceId) {
- this.deviceId = deviceId;
- }
-
- /**
- * @see MqttCallback#connectionLost(Throwable)
- */
- public void connectionLost(Throwable cause) {
- LOG.warn("MQTT Connection Lost", cause);
- }
-
- /**
- * @see MqttCallback#deliveryComplete(IMqttDeliveryToken)
- */
- public void deliveryComplete(IMqttDeliveryToken token) {
- }
-
- /**
- * @see MqttCallback#messageArrived(String, MqttMessage)
- */
- public void messageArrived(String topic, MqttMessage message) {
- String handlerKey = getHandlerKey(topic);
- Consumer handler = handlers.get(handlerKey);
- Class type = handlersType.get(handlerKey);
- if (handler == null) {
- onError.accept(new RuntimeException("No registered handler for " + handlerKey));
- } else if (message.toString().length() == 0) {
- LOG.warn("Received message is empty for " + handlerKey);
- handler.accept(null);
- } else {
- try {
- handler.accept(OBJECT_MAPPER.readValue(message.toString(), type));
- } catch (Exception e) {
- onError.accept(e);
- }
- }
- }
- }
-
- private void sendMessage(String deviceId, String mqttTopic,
- byte[] mqttMessage) throws Exception {
- LOG.debug("Sending message to " + mqttTopic);
- getConnectedClient(deviceId).publish(mqttTopic, mqttMessage, MQTT_QOS, SHOULD_RETAIN);
- publishCounter.incrementAndGet();
- }
-
- private MqttClient getConnectedClient(String deviceId) {
- try {
- String gatewayId = configuration.gatewayId;
- if (gatewayId != null && !gatewayId.equals(deviceId)) {
- return mqttClients.computeIfAbsent(deviceId, this::newBoundClient);
- }
- return mqttClients.computeIfAbsent(deviceId, this::connectMqttClient);
- } catch (Exception e) {
- throw new RuntimeException("While getting mqtt client " + deviceId + ": " + e.toString(), e);
- }
- }
-
- /** Load a PKCS8 encoded keyfile from the given path. */
- private PrivateKey loadKeyBytes(byte[] keyBytes, String algorithm) throws Exception {
- try {
- PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec(keyBytes);
- KeyFactory kf = KeyFactory.getInstance(algorithm);
- return kf.generatePrivate(spec);
- } catch (Exception e) {
- throw new IllegalArgumentException("Loading key bytes", e);
- }
- }
-
- /** Create a Cloud IoT JWT for the given project id, signed with the given private key */
- protected String createJwt(String projectId, byte[] privateKeyBytes, String algorithm)
- throws Exception {
- DateTime now = new DateTime();
- // Create a JWT to authenticate this device. The device will be disconnected after the token
- // expires, and will have to reconnect with a new token. The audience field should always be set
- // to the GCP project id.
- JwtBuilder jwtBuilder =
- Jwts.builder()
- .setIssuedAt(now.toDate())
- .setExpiration(now.plusMinutes(60).toDate())
- .setAudience(projectId);
-
- if (algorithm.equals("RS256")) {
- PrivateKey privateKey = loadKeyBytes(privateKeyBytes, "RSA");
- return jwtBuilder.signWith(SignatureAlgorithm.RS256, privateKey).compact();
- } else if (algorithm.equals("ES256")) {
- PrivateKey privateKey = loadKeyBytes(privateKeyBytes, "EC");
- return jwtBuilder.signWith(SignatureAlgorithm.ES256, privateKey).compact();
- } else {
- throw new IllegalArgumentException(
- "Invalid algorithm " + algorithm + ". Should be one of 'RS256' or 'ES256'.");
- }
- }
-
- public class PublisherStats {
- public long clientCount = mqttClients.size();
- public int publishCount = publishCounter.getAndSet(0);
- public int errorCount = errorCounter.getAndSet(0);
- }
-}
diff --git a/pubber/src/main/java/daq/pubber/Pubber.java b/pubber/src/main/java/daq/pubber/Pubber.java
deleted file mode 100644
index 5f28a08efa..0000000000
--- a/pubber/src/main/java/daq/pubber/Pubber.java
+++ /dev/null
@@ -1,282 +0,0 @@
-package daq.pubber;
-
-import com.fasterxml.jackson.annotation.JsonInclude;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Preconditions;
-import daq.udmi.Entry;
-import daq.udmi.Message;
-import daq.udmi.Message.Pointset;
-import daq.udmi.Message.PointsetState;
-import daq.udmi.Message.State;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.File;
-import java.nio.file.Files;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.Date;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.ScheduledFuture;
-import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class Pubber {
-
- private static final Logger LOG = LoggerFactory.getLogger(Pubber.class);
- private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper()
- .setSerializationInclusion(JsonInclude.Include.NON_NULL);
-
- private static final String POINTSET_TOPIC = "events/pointset";
- private static final String SYSTEM_TOPIC = "events/system";
- private static final String STATE_TOPIC = "state";
- private static final String CONFIG_TOPIC = "config";
- private static final String ERROR_TOPIC = "errors";
-
- private static final int MIN_REPORT_MS = 200;
- private static final int DEFAULT_REPORT_MS = 5000;
- private static final int CONFIG_WAIT_TIME_MS = 10000;
- private static final int STATE_THROTTLE_MS = 1500;
- private static final String CONFIG_ERROR_STATUS_KEY = "config_error";
- private static final int LOGGING_MOD_COUNT = 10;
-
- private final ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
-
- private final Configuration configuration;
- private final AtomicInteger messageDelayMs = new AtomicInteger(DEFAULT_REPORT_MS);
- private final CountDownLatch configLatch = new CountDownLatch(1);
-
- private final State deviceState = new State();
- private final Pointset devicePoints = new Pointset();
- private final Set allPoints = new HashSet<>();
-
- private MqttPublisher mqttPublisher;
- private ScheduledFuture> scheduledFuture;
- private long lastStateTimeMs;
- private int sendCount;
-
- public static void main(String[] args) throws Exception {
- if (args.length != 1) {
- throw new IllegalArgumentException("Expected [configPath] as argument");
- }
- Pubber pubber = new Pubber(args[0]);
- pubber.initialize();
- pubber.startConnection();
- LOG.info("Done with main");
- }
-
- private Pubber(String configFile) {
- File configurationFile = new File(configFile);
- LOG.info("Reading configuration from " + configurationFile.getAbsolutePath());
- try {
- configuration = OBJECT_MAPPER.readValue(configurationFile, Configuration.class);
- } catch (Exception e) {
- throw new RuntimeException("While reading configuration file " + configurationFile.getAbsolutePath(), e);
- }
- info(String.format("Starting instance for project %s registry %s",
- configuration.projectId, configuration.registryId));
-
- initializeDevice();
- addPoint(new RandomPoint("superimposition_reading", 0, 100, "Celsius"));
- addPoint(new RandomPoint("recalcitrant_angle", 0, 360, "deg" ));
- addPoint(new RandomPoint("faulty_finding", 1, 1, "truth"));
- }
-
- private void initializeDevice() {
- deviceState.system.make_model = "DAQ_pubber";
- deviceState.system.firmware.version = "v1";
- deviceState.pointset = new PointsetState();
- devicePoints.extraField = configuration.extraField;
- }
-
- private synchronized void maybeRestartExecutor(int intervalMs) {
- if (scheduledFuture == null || intervalMs != messageDelayMs.get()) {
- cancelExecutor();
- messageDelayMs.set(intervalMs);
- startExecutor();
- }
- }
-
- private synchronized void startExecutor() {
- Preconditions.checkState(scheduledFuture == null);
- int delay = messageDelayMs.get();
- LOG.info("Starting executor with send message delay " + delay);
- scheduledFuture = executor
- .scheduleAtFixedRate(this::sendMessages, delay, delay, TimeUnit.MILLISECONDS);
- }
-
- private synchronized void cancelExecutor() {
- if (scheduledFuture != null) {
- scheduledFuture.cancel(false);
- scheduledFuture = null;
- }
- }
-
- private void sendMessages() {
- try {
- sendDeviceMessage(configuration.deviceId);
- updatePoints();
- if (sendCount % LOGGING_MOD_COUNT == 0) {
- publishLogMessage(configuration.deviceId,"Sent " + sendCount + " messages");
- }
- sendCount++;
- } catch (Exception e) {
- LOG.error("Fatal error during execution", e);
- terminate();
- }
- }
-
- private void updatePoints() {
- allPoints.forEach(AbstractPoint::updateData);
- }
-
- private void terminate() {
- try {
- info("Terminating");
- mqttPublisher.close();
- cancelExecutor();
- } catch (Exception e) {
- info("Error terminating: " + e.getMessage());
- }
- }
-
- private void startConnection() throws InterruptedException {
- connect();
- boolean result = configLatch.await(CONFIG_WAIT_TIME_MS, TimeUnit.MILLISECONDS);
- LOG.info("synchronized start config result " + result);
- if (!result) {
- mqttPublisher.close();
- }
- }
-
- private void addPoint(AbstractPoint point) {
- String pointName = point.getName();
- if (devicePoints.points.put(pointName, point.getData()) != null) {
- throw new IllegalStateException("Duplicate pointName " + pointName);
- }
- deviceState.pointset.points.put(pointName, point.getState());
- allPoints.add(point);
- }
-
- private void initialize() {
- Preconditions.checkState(mqttPublisher == null, "mqttPublisher already defined");
- Preconditions.checkNotNull(configuration.keyFile, "configuration keyFile not defined");
- System.err.println("Loading device key file from " + configuration.keyFile);
- configuration.keyBytes = getFileBytes(configuration.keyFile);
- mqttPublisher = new MqttPublisher(configuration, this::reportError);
- if (configuration.gatewayId != null) {
- mqttPublisher.registerHandler(configuration.gatewayId, CONFIG_TOPIC,
- this::configHandler, Message.Config.class);
- mqttPublisher.registerHandler(configuration.gatewayId, ERROR_TOPIC,
- this::errorHandler, GatewayError.class);
- }
- mqttPublisher.registerHandler(configuration.deviceId, CONFIG_TOPIC,
- this::configHandler, Message.Config.class);
- }
-
- private void connect() {
- try {
- mqttPublisher.connect(configuration.deviceId);
- LOG.info("Connection complete.");
- } catch (Exception e) {
- LOG.error("Connection error", e);
- LOG.error("Forcing termination");
- System.exit(-1);
- }
- }
-
- private void reportError(Exception toReport) {
- if (toReport != null) {
- LOG.error("Error receiving message: " + toReport);
- Entry report = new Entry(toReport);
- deviceState.system.statuses.put(CONFIG_ERROR_STATUS_KEY, report);
- publishStateMessage(configuration.deviceId);
- } else {
- Entry previous = deviceState.system.statuses.remove(CONFIG_ERROR_STATUS_KEY);
- if (previous != null) {
- publishStateMessage(configuration.deviceId);
- }
- }
- }
-
- private void info(String msg) {
- LOG.info(msg);
- }
-
- private void configHandler(Message.Config config) {
- try {
- info("Received new config " + config);
- final int actualInterval;
- if (config != null) {
- Integer reportInterval = config.system == null ? null : config.system.report_interval_ms;
- actualInterval = Integer.max(MIN_REPORT_MS,
- reportInterval == null ? DEFAULT_REPORT_MS : reportInterval);
- deviceState.system.last_config = config.timestamp;
- } else {
- actualInterval = DEFAULT_REPORT_MS;
- }
- maybeRestartExecutor(actualInterval);
- configLatch.countDown();
- publishStateMessage(configuration.deviceId);
- reportError(null);
- } catch (Exception e) {
- reportError(e);
- }
- }
-
- private void errorHandler(GatewayError error) {
- // TODO: Handle error and give up on device.
- info(String.format("%s for %s: %s",
- error.error_type, error.device_id, error.description));
- }
-
- private byte[] getFileBytes(String dataFile) {
- Path dataPath = Paths.get(dataFile);
- try {
- return Files.readAllBytes(dataPath);
- } catch (Exception e) {
- throw new RuntimeException("While getting data from " + dataPath.toAbsolutePath(), e);
- }
- }
-
- private void sendDeviceMessage(String deviceId) {
- if (mqttPublisher.clientCount() == 0) {
- LOG.error("No connected clients, exiting.");
- System.exit(-2);
- }
- info(String.format("Sending test message for %s/%s", configuration.registryId, deviceId));
- devicePoints.timestamp = new Date();
- mqttPublisher.publish(deviceId, POINTSET_TOPIC, devicePoints);
- }
-
- private void publishLogMessage(String deviceId, String logMessage) {
- info(String.format("Sending log message for %s/%s", configuration.registryId, deviceId));
- Message.SystemEvent systemEvent = new Message.SystemEvent();
- systemEvent.logentries.add(new Entry(logMessage));
- mqttPublisher.publish(deviceId, SYSTEM_TOPIC, systemEvent);
- }
-
- private void publishStateMessage(String deviceId) {
- lastStateTimeMs = sleepUntil(lastStateTimeMs + STATE_THROTTLE_MS);
- info("Sending state message for device " + deviceId);
- deviceState.timestamp = new Date();
- mqttPublisher.publish(deviceId, STATE_TOPIC, deviceState);
- }
-
- private long sleepUntil(long targetTimeMs) {
- long currentTime = System.currentTimeMillis();
- long delay = targetTimeMs - currentTime;
- try {
- if (delay > 0) {
- Thread.sleep(delay);
- }
- return System.currentTimeMillis();
- } catch (Exception e) {
- throw new RuntimeException("While sleeping for " + delay, e);
- }
- }
-}
diff --git a/pubber/src/main/java/daq/pubber/RandomPoint.java b/pubber/src/main/java/daq/pubber/RandomPoint.java
deleted file mode 100644
index dcf0ff0a03..0000000000
--- a/pubber/src/main/java/daq/pubber/RandomPoint.java
+++ /dev/null
@@ -1,42 +0,0 @@
-package daq.pubber;
-
-import daq.udmi.Message.PointData;
-import daq.udmi.Message.PointState;
-
-public class RandomPoint implements AbstractPoint {
-
- private final String name;
- private final double min;
- private final double max;
- private final PointData data = new PointData();
- private final PointState state = new PointState();
-
- public RandomPoint(String name, double min, double max, String units) {
- this.name = name;
- this.min = min;
- this.max = max;
- this.state.fault = max == min;
- this.state.units = units;
- updateData();
- }
-
- @Override
- public void updateData() {
- data.present_value = Math.round(Math.random() * (max - min) + min);
- }
-
- @Override
- public PointState getState() {
- return state;
- }
-
- @Override
- public String getName() {
- return name;
- }
-
- @Override
- public PointData getData() {
- return data;
- }
-}
diff --git a/pubber/src/main/java/daq/udmi/Entry.java b/pubber/src/main/java/daq/udmi/Entry.java
deleted file mode 100644
index 25201edc72..0000000000
--- a/pubber/src/main/java/daq/udmi/Entry.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package daq.udmi;
-
-import java.io.ByteArrayOutputStream;
-import java.io.PrintStream;
-import java.util.Date;
-
-public class Entry {
- public String message;
- public String detail;
- public String category = "com.acme.pubber";
- public Integer level = 500;
- public Date timestamp = new Date();
-
- public Entry(String message) {
- this.message = message;
- }
-
- public Entry(Exception e) {
- message = e.toString();
- ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
- e.printStackTrace(new PrintStream(outputStream));
- detail = outputStream.toString();
- category = e.getStackTrace()[0].getClassName();
- level = 800;
- }
-}
diff --git a/pubber/src/main/java/daq/udmi/Message.java b/pubber/src/main/java/daq/udmi/Message.java
deleted file mode 100644
index c15f9c842c..0000000000
--- a/pubber/src/main/java/daq/udmi/Message.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package daq.udmi;
-
-import java.util.ArrayList;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-@SuppressWarnings("unused")
-public class Message {
-
- public static class State extends UdmiBase {
- public SystemState system = new SystemState();
- public PointsetState pointset;
- }
-
- public static class Config extends UdmiBase {
- public SystemConfig system;
- public PointsetConfig pointset;
- public GatewayConfig gateway;
- }
-
- public static class Pointset extends UdmiBase {
- public Map points = new HashMap<>();
- public Object extraField;
- }
-
- public static class SystemEvent extends UdmiBase {
- public List logentries = new ArrayList<>();
- }
-
- public static class PointsetState {
- public Map points = new HashMap<>();
- }
-
- public static class PointsetConfig {
- public Map points = new HashMap<>();
- }
-
- public static class PointConfig {
- }
-
- public static class GatewayConfig {
- public List proxy_ids;
- }
-
- public static class SystemState {
- public String make_model;
- public Bundle firmware = new Bundle();
- public boolean operational;
- public Date last_config;
- public Map statuses = new HashMap<>();
- }
-
- public static class SystemConfig {
- public Integer report_interval_ms;
- }
-
- public static class PointData {
- public Object present_value;
- }
-
- public static class PointState {
- public String units;
- public Boolean fault;
- }
-
- public static class Bundle {
- public String version;
- }
-
- public static class UdmiBase {
- public Integer version = 1;
- public Date timestamp = new Date();
- }
-}
diff --git a/resources/device_types/deltacontrols_o3-din-cpu/module_config.json b/resources/device_types/deltacontrols_o3-din-cpu/module_config.json
index 066bf35abb..6859207787 100644
--- a/resources/device_types/deltacontrols_o3-din-cpu/module_config.json
+++ b/resources/device_types/deltacontrols_o3-din-cpu/module_config.json
@@ -15,9 +15,6 @@
"bacnet": {
"enabled": true
},
- "macoui": {
- "enabled": true
- },
"mudgee": {
"enabled": true
},
diff --git a/resources/device_types/distech_ecy-s1000/module_config.json b/resources/device_types/distech_ecy-s1000/module_config.json
index d790899c5c..09e9fd37b9 100644
--- a/resources/device_types/distech_ecy-s1000/module_config.json
+++ b/resources/device_types/distech_ecy-s1000/module_config.json
@@ -15,9 +15,6 @@
"bacnet": {
"enabled": true
},
- "macoui": {
- "enabled": true
- },
"mudgee": {
"enabled": true
},
diff --git a/resources/setups/baseline/module_config.json b/resources/setups/baseline/module_config.json
index 8ebd56cc57..5999aa59b6 100644
--- a/resources/setups/baseline/module_config.json
+++ b/resources/setups/baseline/module_config.json
@@ -1,15 +1,16 @@
{
"modules": {
"ipaddr": {
- "timeout_sec": 300
+ "enabled": false,
+ "timeout_sec": 900,
+ "port_flap_timeout_sec": 20,
+ "dhcp_ranges": [{"start": "192.168.0.1", "end": "192.168.255.254", "prefix_length": 16}]
},
"pass": {
"enabled": true
},
"nmap": {
- "enabled": true
- },
- "macoui": {
+ "timeout_sec": 600,
"enabled": true
},
"switch": {
@@ -19,6 +20,7 @@
"enabled": true
},
"password": {
+ "dictionary_dir": "resources/faux",
"enabled": true
},
"bacext": {
diff --git a/resources/setups/qualification/device_module_config.json b/resources/setups/qualification/device_module_config.json
index 6019735d2b..131348b856 100644
--- a/resources/setups/qualification/device_module_config.json
+++ b/resources/setups/qualification/device_module_config.json
@@ -30,9 +30,6 @@
"ipaddr": {
"timeout_sec": 120
},
- "macoui": {
- "enabled": true
- },
"mudgee": {
"enabled": true
},
diff --git a/resources/setups/qualification/device_type_module_config.json b/resources/setups/qualification/device_type_module_config.json
index f183d5527c..0401a128b7 100644
--- a/resources/setups/qualification/device_type_module_config.json
+++ b/resources/setups/qualification/device_type_module_config.json
@@ -24,10 +24,10 @@
"ipaddr": {
"timeout_sec": 120
},
- "macoui": {
+ "mudgee": {
"enabled": true
},
- "mudgee": {
+ "network": {
"enabled": true
},
"nmap": {
diff --git a/resources/setups/qualification/system_module_config.json b/resources/setups/qualification/system_module_config.json
index de3cc56461..1cc6fd9835 100644
--- a/resources/setups/qualification/system_module_config.json
+++ b/resources/setups/qualification/system_module_config.json
@@ -15,10 +15,10 @@
"ipaddr": {
"timeout_sec": 120
},
- "macoui": {
+ "mudgee": {
"enabled": true
},
- "mudgee": {
+ "network": {
"enabled": true
},
"nmap": {
@@ -90,6 +90,11 @@
"required": "pass",
"expected": "Required Pass"
},
+ "connection.network.ntp_update": {
+ "category": "Network Time",
+ "required": "pass",
+ "expected": "Required Pass"
+ },
"connection.network.communication_type": {
"category": "Communication",
"required": "info",
diff --git a/resources/setups/remediation/device_module_config.json b/resources/setups/remediation/device_module_config.json
index 976761c762..1d14c6f7aa 100644
--- a/resources/setups/remediation/device_module_config.json
+++ b/resources/setups/remediation/device_module_config.json
@@ -26,10 +26,10 @@
"ipaddr": {
"timeout_sec": 120
},
- "macoui": {
+ "mudgee": {
"enabled": true
},
- "mudgee": {
+ "network": {
"enabled": true
},
"nmap": {
diff --git a/resources/setups/remediation/system_module_config.json b/resources/setups/remediation/system_module_config.json
index 17e7793758..e7542de45d 100644
--- a/resources/setups/remediation/system_module_config.json
+++ b/resources/setups/remediation/system_module_config.json
@@ -15,10 +15,10 @@
"ipaddr": {
"timeout_sec": 120
},
- "macoui": {
+ "mudgee": {
"enabled": true
},
- "mudgee": {
+ "network": {
"enabled": true
},
"nmap": {
@@ -86,6 +86,11 @@
"required": "pass",
"expected": "Recommended Pass"
},
+ "connection.network.ntp_update": {
+ "category": "Network Time",
+ "required": "pass",
+ "expected": "Recommended Pass"
+ },
"connection.network.communication_type": {
"category": "Communication",
"required": "info",
diff --git a/resources/test_site/devices/AHU-1/metadata.json b/resources/test_site/devices/AHU-1/metadata.json
index cdee5cc055..09eb5ce94d 100644
--- a/resources/test_site/devices/AHU-1/metadata.json
+++ b/resources/test_site/devices/AHU-1/metadata.json
@@ -1,13 +1,13 @@
{
"pointset": {
"points": {
- "filter_alarm_pressure_status": {
+ "faulty_finding": {
"units": "Bars"
},
- "filter_differential_pressure_sensor": {
+ "recalcitrant_angle": {
"units": "Degrees-Celsius"
},
- "filter_differential_pressure_setpoint": {
+ "superimposition_reading": {
"units": "No-units"
}
}
diff --git a/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json b/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json
index c999dd7e16..bb981518eb 100644
--- a/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json
+++ b/resources/test_site/mac_addrs/3c5ab41e8f0b/module_config.json
@@ -3,10 +3,6 @@
"modules": {
"hold": {
"enabled": true
- },
- "macoui": {
- "enabled": true,
- "timeout_sec": 1
}
}
}
diff --git a/resources/test_site/module_config.json b/resources/test_site/module_config.json
index 4d672b67aa..03f7d504d0 100644
--- a/resources/test_site/module_config.json
+++ b/resources/test_site/module_config.json
@@ -17,6 +17,9 @@
},
"manual": {
"enabled": true
+ },
+ "ssh": {
+ "enabled": false
}
},
"process": {
@@ -44,7 +47,7 @@
"category": "Security",
"expected": "Recommended"
},
- "manual.test.travis": {
+ "manual.test.name": {
"required": "pass",
"category": "Security",
"expected": "Recommended",
diff --git a/schemas/simple/simple.json b/schemas/simple/simple.json
deleted file mode 100644
index fa31b8e8d0..0000000000
--- a/schemas/simple/simple.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "type" : "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties" : {
- "rectangle" : {"$ref" : "#/definitions/Rectangle" }
- },
- "required": [
- "rectangle"
- ],
- "definitions" : {
- "size" : {
- "type" : "number",
- "minimum" : 0
- },
- "Rectangle" : {
- "type" : "object",
- "required": [
- "a",
- "b"
- ],
- "properties" : {
- "a" : {"$ref" : "#/definitions/size"},
- "b" : {"$ref" : "#/definitions/size"}
- }
- }
- }
-}
\ No newline at end of file
diff --git a/schemas/simple/simple.tests/error.json b/schemas/simple/simple.tests/error.json
deleted file mode 100644
index 361ce8baae..0000000000
--- a/schemas/simple/simple.tests/error.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "rectangle" : {
- "a" : -4,
- "b" : 5
- }
-}
diff --git a/schemas/simple/simple.tests/error.out b/schemas/simple/simple.tests/error.out
deleted file mode 100644
index d5320ed8a3..0000000000
--- a/schemas/simple/simple.tests/error.out
+++ /dev/null
@@ -1,4 +0,0 @@
-Validating 1 schemas
- Validating 1 files against simple.json
- Against input simple.tests/error.json
- #/rectangle/a: -4 is not greater or equal to 0
diff --git a/schemas/simple/simple.tests/example.json b/schemas/simple/simple.tests/example.json
deleted file mode 100644
index e6751b6099..0000000000
--- a/schemas/simple/simple.tests/example.json
+++ /dev/null
@@ -1,6 +0,0 @@
-{
- "rectangle" : {
- "a" : 4,
- "b" : 5
- }
-}
diff --git a/schemas/simple/simple.tests/example.out b/schemas/simple/simple.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/simple/simple.tests/simple.json b/schemas/simple/simple.tests/simple.json
deleted file mode 100644
index fa31b8e8d0..0000000000
--- a/schemas/simple/simple.tests/simple.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "type" : "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "properties" : {
- "rectangle" : {"$ref" : "#/definitions/Rectangle" }
- },
- "required": [
- "rectangle"
- ],
- "definitions" : {
- "size" : {
- "type" : "number",
- "minimum" : 0
- },
- "Rectangle" : {
- "type" : "object",
- "required": [
- "a",
- "b"
- ],
- "properties" : {
- "a" : {"$ref" : "#/definitions/size"},
- "b" : {"$ref" : "#/definitions/size"}
- }
- }
- }
-}
\ No newline at end of file
diff --git a/schemas/simple/simple.tests/simple.out b/schemas/simple/simple.tests/simple.out
deleted file mode 100644
index 85d5807ab3..0000000000
--- a/schemas/simple/simple.tests/simple.out
+++ /dev/null
@@ -1,4 +0,0 @@
-Validating 1 schemas
- Validating 1 files against simple.json
- Against input simple.tests/simple.json
- #: required key [rectangle] not found
diff --git a/schemas/udmi/README.md b/schemas/udmi/README.md
deleted file mode 100644
index b0c450fa94..0000000000
--- a/schemas/udmi/README.md
+++ /dev/null
@@ -1,163 +0,0 @@
-# UDMI Schema
-
-The Universal Device Management Interface (UDMI) provides a high-level specification for the
-management and operation of physical IoT systems. This data is typically exchanged
-with a cloud entity that can maintain a "digital twin" or "shadow device" in the cloud.
-Nominally meant for use with [Googe's Cloud IoT Core](https://cloud.google.com/iot/docs/),
-as a schema it can be applied to any set of data or hosting setup. Additionally, the schema
-has provisions for basic telemetry ingestion, such as datapoint streaming from an IoT device.
-
-By deisgn, this schema is intended to be:
-* U niversal: Apply to all subsystems in a building, not a singular vertical solution.
-* D evice: Operations on an IoT _device_, a managed entity in physical space.
-* M anagement: Focus on device _management_, rather than command & control.
-* I nterface: Define an interface specification, rather than a client-library or
-RPC mechanism.
-
-See the associated [UDMI Tech Stack](TECH_STACK.md) for details about transport mechanism
-outside of the core schema definition. For questions and discussion pertaining to this topic,
-please join/monitor the
-[daq-users@googlegroups.com](https://groups.google.com/forum/#!forum/daq-users) email list
-
-## Use Cases
-
-The essence behind UDMI is an automated mechanism for IoT system management. Many current
-systems require direct-to-device access, such as through a web browser or telnet/ssh session.
-These techniques do not scale to robust managed ecosystems since they rely too heavily on
-manual operation (aren't automated), and increase the security exposure of the system
-(since they need to expose these management ports).
-
-UDMI is intended to support a few primary use-cases:
-* _Telemetry Ingestion_: Ingest device data points in a standardized format.
-* [_Gateway Proxy_](docs/gateway.md): Proxy data/connection for non-UDMI devices,
-allowing adaptation to legacy systems.
-* _On-Prem Actuation_: Ability to effect on-prem device behavior.
-* _Device Testability_: e.g. Trigger a fake alarm to test reporting mechanims.
-* _Commissioning Tools_: Streamline complete system setup and install.
-* _Operational Diagnostics_: Make it easy for system operators to diagnoe basic faults.
-* _Status and Logging_: Report system operational metrics to hosting infrastructure.
-* _Key Rotation_: Manage encryption keys and certificates in accordance with best practice.
-* _Credential Exchange_: Bootstrap higher-layer authentication to restricted resources.
-* _Firmware Updates_: Initiate, monitor, and track firmware updates across an entire fleet
-of devices.
-* _On-Prem Discovery_: Enumerate and on-prem devices to aid setup or anomaly detection.
-
-All these situations are conceptually about _management_ of devices, which is conceptually
-different than the _control_ or _operation_. These concepts are similar to the _management_,
-_control_, and _data_ planes of
-[Software Defined Networks](https://queue.acm.org/detail.cfm?id=2560327).
-Once operational, the system should be able to operate completely autonomoulsy from the
-management capabilities, which are only required to diagnose or tweak system behavior.
-
-## Design Philiosphy
-
-In order to provide for management automation, UDMI strives for the following principles:
-* Secure and Authenticated: Requires a propertly secure and authenticated channel
-from the device to managing infrastructure.
-* Declarative Specification: The schema describes the _desired_ state of the system,
-relying on the underlying mechanisms to match actual state with desired state. This is
-conceptually similar to Kubernetes-style configuraiton files.
-* Minimal Elegant Design: Initially underspecified, with an eye towards making it easy to
-add new capabilities in the future. It is easier to add something than it is to remove it.
-* Reduced Choices: In the long run, choice leads to more work
-to implement, and more ambiguity. Strive towards having only _one_ way of doing each thing.
-* Structure and Clarity: This is not a "compressed" format and not designed for
-very large structures or high-bandwidth streams.
-* Property Names: Uses snake_case convention for property names.
-* Resource Names: Overall structure (when flattened to paths), follows the
-[API Resource Names guidline](https://cloud.google.com/apis/design/resource_names).
-
-## Schema Structure
-
-Schemas are broken down into several top-level sub-schemas that are invoked for
-different aspects of device management:
-* Device _state_ ([example](state.tests/example.json)), sent from device to cloud,
-defined by [state.json ](state.json). There is one current _state_ per device,
-which is considered sticky until a new state message is sent.
-is comprised of several subsections (e.g. _system_ or _pointset_) that describe the
-relevant sub-state components.
-* Device _config_ ([example](config.tests/example.json)), passed from cloud to device,
-defined by [config.json ](config.json). There is one active _config_ per device,
-which is considered current until a new config is recevied.
-* Message _envelope_ ([example](envelope.tests/example.json)) for server-side
-attributes of received messages, defined by [envelope.json ](envelope.json). This is
-automatically generated by the transport layer and is then available for server-side
-processing.
-* Device _metadata_ ([example](metadata.tests/example.json)) stored in the cloud about a device,
-but not directly available to or on the device, defined by [metadata.json ](metadata.json).
-This is essentially a specification about how the device should be configured or
-expectations about what the device should be doing.
-* Streaming device telemetry, which can take on several different forms, depending on the intended
-use, e.g.:
- * Streaming _pointset_ ([example](pointset.tests/example.json)) from device to cloud,
- defined by [pointset.json ](pointset.json). _pointset_ is used for delivering a
- set of data point telemetry.
- * Core _system_ messages ([example](system.tests/example.json)) from devices, such as log
- entries and access logs, defined by [system.json ](system.json).
- * Local _discover_ messages ([example](discover.tests/example.json)) that show the
- results of local scans or probes to determine which devices are on the local network,
- defined by [discover.json ](discover.json).
-
-A device client implementation will typically only be aware of the _state_, _config_, and
-one or more telemetry messages (e.g. _pointset_), while all others are meant for the supporting
-infrastructure. Additionally, the _state_ and _config_ parts are comprised of several distinct
-subsections (e.g. _system_, _pointset_, or _gateway_) that relate to various bits of functionality.
-
-## Validation
-
-To verify correct operation of a real system, follow the instructions outlined in the
-[validator subsystem docs](../../docs/validator.md), which provides for a suitable
-communication channel. Additional sample messages are easy to include in the regression
-suite if there are new cases to test.
-
-## Message Detail Notes
-
-### State Message
-
-* See notes below about 'State status' fields.
-* There is an implicit minimum update interval of _one second_ applied to state updates, and it
-is considered an error to update device state more often than that.
-* `last_config` should be the timestamp _from_ the `timestamp` field of the last successfully
-parsed `config` message.
-
-### Config Message
-
-* `sample_rate_sec`: Sampling rate for the system, which should proactively send an
-update (e.g. _pointset_, _logentry_, _discover_ message) at this interval.
-* `sample_limit_sec`: Minimum time between sample updates. Updates that happen faster than this time
-(e.g. due to _cov_ events) should be coalesced so that only the most recent update is sent.
-* `fix_value`: Fix a value to be used during diagnostics and operational use. Should
-override any operational values, but not override alarm conditions.
-* `min_loglevel`: Indicates the minimum loglevel for reporting log messages below which log entries
-should not be sent. See note below for a description of the level value.
-
-### Logentry Message
-
-* See notes below about 'logentry entries' fields.
-
-### State status and logentry entries fields
-
-The State and System/logentry messages both have `status` and `entries` sub-fields, respectivly, that
-follow the same structure.
-* State `status` entries represent 'sticky' conditions that persist until the situation is cleared,
-e.g. "device disconnected".
-* A `statuses` entry is a map of 'sticky' conditions that are keyed on a value that can be
-used to manage updates by a particular (device dependent) subsystem.
-* Logentry `entries` fields are transitory event that happen, e.g. "connection failed".
-* The log `entries` field is an array that can be used to collaesce multiple log updates into
-one message.
-* Config parse errors should be represented as a system-level device state `status` entry.
-* The `message` field sould be a one-line representation of the triggering condition.
-* The `detail` field can be multi-line and include more detail, e.g. a complete program
-stack-trace.
-* The `category` field is a device-specific representation of which sub-system the message comes
-from. In a Java environment, for example, it would be the fully qualified path name of the Class
-triggering the message.
-* A `status` or `statuses` `timestamp` field should be the timestamp the condition was triggered,
-or most recently updated. It might be different than the top-level message `timestamp` if the
-condition is not checked often or is sticky until it's cleared.
-* A logentry `entries` `timestamp` field is the time that the event occured, which is potentially
-different than the top-level `timestamp` field (which is when the log was sent).
-* The status `level` should conform to the numerical
-[Stackdriver LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity)
-levels. The `DEFAULT` value of 0 is not allowed (lowest value is 100, maximum 800).
diff --git a/schemas/udmi/TECH_STACK.md b/schemas/udmi/TECH_STACK.md
deleted file mode 100644
index 86d3c0ab4e..0000000000
--- a/schemas/udmi/TECH_STACK.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# UDMI Technology Stack
-
-The complete UDMI specificaiton (super set of the base schema), specifies a complete
-technology stack for compliant IoT devices.
-
-# Core Requirements
-
-* [Google Cloud's MQTT Protocol Bridge](https://cloud.google.com/iot/docs/how-tos/mqtt-bridge).
- * This is _not_ the same as a generic MQTT Broker, but it is compatible with standard client-side libraries.
- * Other transports (non-Google MQTT, CoAP, etc...) are acceptable with prior approval.
- * Connected to a specific Cloud IoT Registry designated for each site-specific project.
-* Utilizes the MQTT Topic table listed below.
-* JSON encoding following the core [UDMI Schema](README.md), specifying the semantic structure of the data.
-* Passes the [DAQ Validation Tool](../../docs/validator.md) for all requirements.
-
-# MQTT Topic Table
-
-| Type | Category | subFolder | MQTT Topic | Schema File |
-|----------|----------|-----------|----------------------------------------|---------------|
-| state | state | _n/a_ | `/devices/{device_id}/state` | state.json |
-| config | config | _n/a_ | `/devices/{device-id}/config` | config.json |
-| pointset | event | pointset | `/devices/{device-id}/events/pointset` | pointset.json |
-| system | event | system | `/devices/{device-id}/events/system` | system.json |
-
-# Backend Systems
-
-Any backend system (in a GCP project) should adhere to the following guidelines:
-* All messages to/from the devices should conform to the UDMI schema payloads (pass validation).
-* All exchanges with the devices should go through a PubSub topic:
- * The _state_ and _event_ messages are published to a topic configured through the IoT Core registry.
- * If necessary, any _config_ or _command_ messages should go through a PubSub topic, and then converted to the requisite Cloud IoT
- config write using a simple cloud function.
-* To make data persistent, it can be written to a back-end database, e.g. Firestore. See the `device_telemetry` and
- `device_state` [example cloud functions](../../firebase/functions/index.js) for details.
-* A similar function called `device_config` shows how PubSub can be used to update the Cloud IoT configuration.
-
-A config push can be tested with something like:
-
-```
-gcloud pubsub topics publish target \
- --attribute subFolder=config,deviceId=AHU-1,projectId=bos-daq-testing,cloudRegion=us-central1,deviceRegistryId=registrar_test \
- --message '{"version": 1, "timestamp": "2019-01-17T14:02:29.364Z"}'
-```
-
-The reason for the redirection of any data through a PubSub topic is so that the Cloud IoT registry, if necessary,
-can be housed in a different cloud project from the backend applications.
diff --git a/schemas/udmi/config.json b/schemas/udmi/config.json
deleted file mode 100644
index 73f19b0b4b..0000000000
--- a/schemas/udmi/config.json
+++ /dev/null
@@ -1,33 +0,0 @@
-{
- "title": "Device Config Schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "timestamp",
- "version"
- ],
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "system": {
- "$ref": "file:config_system.json#"
- },
- "gateway": {
- "$ref": "file:config_gateway.json#"
- },
- "localnet": {
- "$ref": "file:config_localnet.json#"
- },
- "pointset": {
- "$ref": "file:config_pointset.json#"
- }
- }
-}
diff --git a/schemas/udmi/config.tests/empty.json b/schemas/udmi/config.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/config.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/config.tests/empty.out b/schemas/udmi/config.tests/empty.out
deleted file mode 100644
index c9aa2803fc..0000000000
--- a/schemas/udmi/config.tests/empty.out
+++ /dev/null
@@ -1,6 +0,0 @@
-Validating 1 schemas
- Validating 1 files against config.json
- Against input config.tests/empty.json
- #: 2 schema violations found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/config.tests/errors.json b/schemas/udmi/config.tests/errors.json
deleted file mode 100644
index 407a7c9eb0..0000000000
--- a/schemas/udmi/config.tests/errors.json
+++ /dev/null
@@ -1,30 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "properties": {
- "make_model": "com.yoyodine.flux_capacitor",
- "whoowhoo": true,
- "release": "231_rev_8"
- },
- "type": "config",
- "system": {
- },
- "points": {
- },
- "pointset": {
- "sample_rate_sec": "5",
- "version": 1,
- "id": "miXeD_CaSE",
- "timestamp": "2018-08-26T21:39:29.364Z",
- "properties": {
- "device_id": "33895507",
- "object_name": "UK-BRH-XX_AHU-001",
- },
- "points": {
- "return_air_temperature_sensor": {
- "object_type": "analog_input",
- "units": "Degrees Celsius"
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/errors.out b/schemas/udmi/config.tests/errors.out
deleted file mode 100644
index 80edaa341f..0000000000
--- a/schemas/udmi/config.tests/errors.out
+++ /dev/null
@@ -1,16 +0,0 @@
-Validating 1 schemas
- Validating 1 files against config.json
- Against input config.tests/errors.json
- #: 10 schema violations found
- #/pointset: 7 schema violations found
- #/pointset/points/return_air_temperature_sensor: 2 schema violations found
- #/pointset/points/return_air_temperature_sensor: extraneous key [object_type] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [units] is not permitted
- #/pointset/sample_rate_sec: expected type: Number, found: String
- #/pointset: extraneous key [id] is not permitted
- #/pointset: extraneous key [properties] is not permitted
- #/pointset: extraneous key [timestamp] is not permitted
- #/pointset: extraneous key [version] is not permitted
- #: extraneous key [points] is not permitted
- #: extraneous key [properties] is not permitted
- #: extraneous key [type] is not permitted
diff --git a/schemas/udmi/config.tests/example.json b/schemas/udmi/config.tests/example.json
deleted file mode 100644
index 164fb358b2..0000000000
--- a/schemas/udmi/config.tests/example.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "min_loglevel": 500
- },
- "pointset": {
- "sample_limit_sec": 2,
- "sample_rate_sec": 500,
- "points": {
- "return_air_temperature_sensor": {
- },
- "nexus_sensor": {
- "fix_value": 21.1
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/example.out b/schemas/udmi/config.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/config.tests/fcu.json b/schemas/udmi/config.tests/fcu.json
deleted file mode 100644
index 03380428c8..0000000000
--- a/schemas/udmi/config.tests/fcu.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "version": 1,
- "timestamp": "2019-01-17T14:02:29.364Z",
- "system": {
- "max_update_ms": 50000,
- "min_loglevel": 500
- },
- "pointset": {
- "points": {
- "space_temperature_sensor": {
- },
- "fan_run_status": {
- "fix_value": true
- },
- "fan_run_enable": {
- "fix_value": false
- },
- "chilled_water_valve_percentage_command": {
- "min_update_ms": 1000
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/fcu.out b/schemas/udmi/config.tests/fcu.out
deleted file mode 100644
index f81283bace..0000000000
--- a/schemas/udmi/config.tests/fcu.out
+++ /dev/null
@@ -1,6 +0,0 @@
-Validating 1 schemas
- Validating 1 files against config.json
- Against input config.tests/fcu.json
- #: 2 schema violations found
- #/pointset/points/chilled_water_valve_percentage_command: extraneous key [min_update_ms] is not permitted
- #/system: extraneous key [max_update_ms] is not permitted
diff --git a/schemas/udmi/config.tests/gateway.json b/schemas/udmi/config.tests/gateway.json
deleted file mode 100644
index 294aabb2ce..0000000000
--- a/schemas/udmi/config.tests/gateway.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "gateway": {
- "proxy_ids": [ "AHU-123", "SMS-81", "991" ]
- }
-}
diff --git a/schemas/udmi/config.tests/gateway.out b/schemas/udmi/config.tests/gateway.out
deleted file mode 100644
index 1f85426706..0000000000
--- a/schemas/udmi/config.tests/gateway.out
+++ /dev/null
@@ -1,4 +0,0 @@
-Validating 1 schemas
- Validating 1 files against config.json
- Against input config.tests/gateway.json
- #/gateway/proxy_ids/2: string [991] does not match pattern ^[A-Z]{3}-[1-9][0-9]{0,2}$
diff --git a/schemas/udmi/config.tests/proxy.json b/schemas/udmi/config.tests/proxy.json
deleted file mode 100644
index 6f56202964..0000000000
--- a/schemas/udmi/config.tests/proxy.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "min_loglevel": 500
- },
- "localnet": {
- "subsystem": {
- "bacnet": {
- "local_id": "0x78ce1900"
- }
- }
- },
- "pointset": {
- "sample_limit_sec": 2,
- "sample_rate_sec": 500,
- "points": {
- "return_air_temperature_sensor": {
- "ref": "BV23.present_value"
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/proxy.out b/schemas/udmi/config.tests/proxy.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/config.tests/rotate.json b/schemas/udmi/config.tests/rotate.json
deleted file mode 100644
index 652a698ba0..0000000000
--- a/schemas/udmi/config.tests/rotate.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "auth_key": {
- "private": "pkcs8:MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQD2PpibhV7vs1ZqGXsV3bCW2p1+WScg6QUNQQb1Ua8pjwIrOQzPfTROpAlxuBAlSbC+aDIz/NrAF0E7tNJx5N8Zk0ekIoqCEVGx/0XuJtyvSYESBclCD7bD3d6KUHcVOK/7hVo7nVnrEjjmihdsz1TSqxmIiNcSe55xboqtJBJLMb9yE646Y/P/kRKCOurR73h3a1N5ipVgpflyMVEW0z/B6GPTc4FRMCAv/6+Mp7v9kjZ/rJa7VwgSMLl/AJ1xyiH3ScQN2dBTCxeGlOu2Ed4v8Rse3OKbOyIbiXQqPeOdys7+CdAtng7qgDLQzinA1r+1YDeSgpLIEHnsnXHBBz1zAgMBAAECggEACm8ilkxzdmK6AarsvUN0jR/cVpM+hjV5glNEETcR3gfyRpSXR199zOtEfy4WeQ4DUD4oCm3ncwZJPFdwJ2VUUsgb3AeeqN5FAO7ZLrs01LSfpHzcR1FVJD2NhXzdXufVBSpkZWxIeB6AjLxDO9gZNwgK/+8UdfMJBrNxat7Ba7AtrYCaAcqh8ewsoGNJte9OC1ubLSvw5p/88XaBYyhN2MLrrOvv7hezsxVakUquPK0xCekV+ScK+6ezrtZVIkvg2ozlF2cffHRoQEjBDju/qQD3dsBAkqol7Lw25KntrM+wBSyCwD04eFzICeDYUBER20SeKEzYRCOek5TgKIeb8QKBgQD/syGWNfE7ZnlswdXYTaHCgCEgQg4N4wwqUva81agAuJbXYXPiBLq0qDiieyyPIT7qU/uCF+R25OJAl/228cVbkhyXa4TAjM5EAAuHeyOwJi+ZBE+c2Mo4Z4mJoXjLzNSvF+ytRQjoAXiErZ4+Kl6wI7zgeIA+SsA0Yy2qStJSKwKBgQD2iJ9bL0FtC/HkkgiCOblLJ1nSiMiTbDAcm9KbeeRDRVpbsYzdVkOoq1s5z6M6MdWVFIqmXL1ROlVyfesG5Dk3AbssbBt0qiF5ZXEF7N33Bqft/LW2U3mdwLVfQLJwtRZ/Uu+yGJ0y7tCEIdCsuaYRkNtZmSIU+ZcwUMr5ks5F2QKBgQC8R6mmkqfDhmxIod4VvOwsbO53c0wn+5RcoeRfHa/sf/9KLs8GkVbtaaTyN2KTLfbG0voyS+eFiHn0+DXw/MvG5qq48Im6wrOrLKFQrGKV9Tg9IwiARL16lPqYZlatMnE1UJeM6nVpaJPWloUb31UDu/z7CJ/dvmsS6Cia6Sc/KwKBgQC3LK/qmmPsV/G0uORljeok8uoESeltz/a3avfC2YBGk2MdugBF/HBtBFTV0XcgKCLfj9Gs5j8r+UG/vdtA1ZXFTx35VaHYvwf8IOknU+VgQ6vGYfvJqqA0HBkm2vU6VPKQS9kY5Lz4OQTpCA76Jz5C0vSH0AXIu+If3gfSA8gLkQKBgGcnKKp2lgIWh76ed4fpHD1dLH9naSF9GvxBEVAxfhfndMSx/hpkmbkO+78FPSDGqPLL5pc72davdrJfRg8aGi5+e2qb67y0Kd0+vlnUY/xv970/LEKDZmNhQreLTDo/wPpOSW75B6GjPhfNdc5znDUUibn6RMqyYcVOm8bLpqkZ"
- }
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "fix_value": 21.1
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/rotate.out b/schemas/udmi/config.tests/rotate.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/config.tests/smartprimus.json b/schemas/udmi/config.tests/smartprimus.json
deleted file mode 100644
index 971290d1d5..0000000000
--- a/schemas/udmi/config.tests/smartprimus.json
+++ /dev/null
@@ -1,21 +0,0 @@
-{
- "version": 1,
- "timestamp": "2019-01-17T14:02:29.364Z",
- "system": {
- },
- "pointset": {
- "sample_rate_sec": 2,
- "points": {
- "space_temperature_sensor": {
- },
- "fan_run_status": {
- "fix_value": true
- },
- "fan_run_enable": {
- "fix_value": false
- },
- "chilled_water_valve_percentage_command": {
- }
- }
- }
-}
diff --git a/schemas/udmi/config.tests/smartprimus.out b/schemas/udmi/config.tests/smartprimus.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/config_gateway.json b/schemas/udmi/config_gateway.json
deleted file mode 100644
index be0ba44063..0000000000
--- a/schemas/udmi/config_gateway.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "title": "Gateway Config Snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "proxy_ids"
- ],
- "properties": {
- "proxy_ids": {
- "type": "array",
- "items": {
- "type": "string",
- "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$"
- }
- }
- }
-}
diff --git a/schemas/udmi/config_localnet.json b/schemas/udmi/config_localnet.json
deleted file mode 100644
index 4177f240d6..0000000000
--- a/schemas/udmi/config_localnet.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "title": "Proxy Device Config Snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "subsystem": {
- "type": "object",
- "patternProperties": {
- "^[a-z0-9-]+$": {
- "additionalProperties": false,
- "properties": {
- "local_id": {
- "type": "string"
- }
- },
- "required": [
- "local_id"
- ]
- }
- }
- }
- },
- "required": [
- "subsystem"
- ]
-}
diff --git a/schemas/udmi/config_pointset.json b/schemas/udmi/config_pointset.json
deleted file mode 100644
index a7ec32f936..0000000000
--- a/schemas/udmi/config_pointset.json
+++ /dev/null
@@ -1,34 +0,0 @@
-{
- "title": "pointset config snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "sample_limit_sec": {
- "type": "number",
- "minimum": 1,
- "maximum": 86400
- },
- "sample_rate_sec": {
- "type": "number",
- "minimum": 1,
- "maximum": 86400
- },
- "points": {
- "additionalProperties": false,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "additionalProperties": false,
- "properties": {
- "ref": {
- "type": "string"
- },
- "fix_value": {
- "type": ["number", "string", "boolean"]
- }
- }
- }
- }
- }
- }
-}
diff --git a/schemas/udmi/config_system.json b/schemas/udmi/config_system.json
deleted file mode 100644
index 10a3da3ba2..0000000000
--- a/schemas/udmi/config_system.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "title": "System Config snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "min_loglevel": {
- "type": "number",
- "minimum": 100,
- "maximum": 800
- },
- "auth_key": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "private": {
- "type": "string"
- }
- },
- "required": [
- "private"
- ]
- }
- }
-}
diff --git a/schemas/udmi/discover.json b/schemas/udmi/discover.json
deleted file mode 100644
index 8abd70e24c..0000000000
--- a/schemas/udmi/discover.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "title": "Device discover schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "protocol": {
- "type": "string"
- },
- "local_id": {
- "type": "string"
- },
- "points": {
- "additionalProperties": false,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "$ref": "#/definitions/point_property_names"
- }
- }
- }
- },
- "required": [
- "timestamp",
- "version",
- "protocol",
- "local_id",
- "points"
- ],
- "definitions": {
- "point_property_names": {
- "propertyNames": {
- "oneOf": [
- {
- "enum": [
- "units",
- "present_value"
- ]
- }
- ]
- }
- }
- }
-}
diff --git a/schemas/udmi/discover.tests/empty.json b/schemas/udmi/discover.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/discover.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/discover.tests/empty.out b/schemas/udmi/discover.tests/empty.out
deleted file mode 100644
index f9fdc0c692..0000000000
--- a/schemas/udmi/discover.tests/empty.out
+++ /dev/null
@@ -1,9 +0,0 @@
-Validating 1 schemas
- Validating 1 files against discover.json
- Against input discover.tests/empty.json
- #: 5 schema violations found
- #: required key [local_id] not found
- #: required key [points] not found
- #: required key [protocol] not found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/discover.tests/errors.json b/schemas/udmi/discover.tests/errors.json
deleted file mode 100644
index aa3540a596..0000000000
--- a/schemas/udmi/discover.tests/errors.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "id": "sneakyCASE",
- "properties": {
- "$comment": "Common error cases for target telemetry."
- },
- "points": {
- "bad_entity_name_": {
- "present_value": 21.30108642578125
- },
- "yoyo_motion_sensor": {
- "bad_property_name": true
- },
- "bad___sensor": {
- "present_value": 21.30108642578125
- },
- "missing_present_value": {
- },
- "old_properties": {
- "properties": {
- "present_value": true
- }
- },
- "magic_voice_recognizer": {
- "present_value": {
- "present_value": true
- }
- }
- }
-}
diff --git a/schemas/udmi/discover.tests/errors.out b/schemas/udmi/discover.tests/errors.out
deleted file mode 100644
index 2a972276f9..0000000000
--- a/schemas/udmi/discover.tests/errors.out
+++ /dev/null
@@ -1,15 +0,0 @@
-Validating 1 schemas
- Validating 1 files against discover.json
- Against input discover.tests/errors.json
- #: 8 schema violations found
- #/points: 4 schema violations found
- #/points/old_properties/properties: #: 0 subschemas matched instead of one
- #/points/old_properties/properties: properties is not a valid enum value
- #/points/yoyo_motion_sensor/bad_property_name: #: 0 subschemas matched instead of one
- #/points/yoyo_motion_sensor/bad_property_name: bad_property_name is not a valid enum value
- #/points: extraneous key [bad___sensor] is not permitted
- #/points: extraneous key [bad_entity_name_] is not permitted
- #: extraneous key [id] is not permitted
- #: extraneous key [properties] is not permitted
- #: required key [local_id] not found
- #: required key [protocol] not found
diff --git a/schemas/udmi/discover.tests/example.json b/schemas/udmi/discover.tests/example.json
deleted file mode 100644
index 9852ad3818..0000000000
--- a/schemas/udmi/discover.tests/example.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "local_id": "92EA09",
- "protocol": "bacnet",
- "points": {
- "reading_value": {
- "units": "C",
- "present_value": 21.30108642578125
- },
- "yoyo_motion_sensor": {
- "present_value": true
- },
- "enum_value": {
- "present_value": "hello"
- }
- }
-}
diff --git a/schemas/udmi/discover.tests/example.out b/schemas/udmi/discover.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/docs/gateway.md b/schemas/udmi/docs/gateway.md
deleted file mode 100644
index 33f31a0152..0000000000
--- a/schemas/udmi/docs/gateway.md
+++ /dev/null
@@ -1,120 +0,0 @@
-# Device Gateway
-
-The _gateway_ functionality is used for systems that have legacy, heritage,
-or traditional devices that do not communicate directly to the cloud using
-the [UDMI specification](../README.md). For example, an older BacNET based
-system could use a gateway to translate on-prem communications into UDMI.
-
-The
-[Google Clout IoT Core Gateway Documentation](https://cloud.google.com/iot/docs/how-tos/gateways)
-for an overview of the cloud-side implementation of a gateway. UDMI, then,
-specifies an additional layer of specification around the associated
-message formats.
-
-Conceptually, there are two types of
-entities involved: the _gateway device_, and the _proxied device_. Both of
-these are 'devices' in the sense that they have an entry in a cloud registry
-and have device-level UDMI data, but they have fundamentally different roles.
-
-The process of _discovery_, which is where something discovers other devices
-on the local network, is conceptually related but functionally distinct and
-separate than a gateway.
-
-## Gateway Operation
-
-There are two modes for gateway operation: _static_ and _dynamic_. In the
-_dynamic_ mode, the gateway functionality if configured dynamically through
-gateway _config_ messages, which tell it the local devices it should proxy
-for. In a _static_ gateway configuraiton, the gateway will be statically
-configured to proxy a set of devices, essentally ignoring any information
-in the associated _config_ block.
-
-The general sequence of events for gateway operation is:
-1. Optional metadata specifies configuration paramaters that should be used
-at install time to properly (manually) setup the device.
-2. (_dynamic_ only) On startup, the gateway connects to the cloud and receives a configuration
-block that details which _proxy devices_ the gateway should proxy for.
-4. Gateway 'attaches' (Cloud IoT terminology) to the proxied devices,
-receiving a configuration block for each proxied device. Any attch errors are
-indicated in the gateway _status_ block and sent along as a _logentry_ event.
-5. (_dynamic_ only) The proxied device's _config_ block specifies any local connection
-parameters for the proxied device, e.g. the BacNET device id.
-6. The gateway proxies communication to/from the device, translating between
-native (e.g. BacNET) communications and UDMI-based messages.
-
-### config
-
-The [gateway config block](../config.tests/gateway.json)
-simply specifies the list of target proxy devices.
-On a config update, the gateway is responsible for handling any change in
-this list (added or removed devices). The details of proxied devices are
-kept to a minimum here (IDs only) to avoid overflowing the allowed block
-size in cases where there are a large number of devices.
-
-### state
-
-Any attach errors, e.g. the gateway can not successfully attach to the target
-device, should be reported in the [gateway state](../state.tests/gateway.json)
-and a _logentry_ message used to detail the
-nature of the problem. If the gateway can attach successfully, any other
-errors, e.g. the inability to communicate with the device over the local
-network, should be indicated as part of the proxy device status block.
-
-### telemetry
-
-Telemety from the gateway would primarily consist of standard
-[_logentry_](../logentry.tests/logentry.json) messages, which
-provide a running comentary about gateway operation. Specificaly, if there
-is an error attaching, then there should be appropriate logging to help
-diagnose the problem.
-
-### metadata
-
-The gateway [metadata block](../metadata.tests/gateway.json) specifies
-any information necessary either for the
-initial (manual) configuration of the device or ongoing validation of
-operation. E.g., if a gateway device has a unique MAC address used for
-local communications, it would be indicated here.
-
-## Proxy Device Operation
-
-Proxy devices are those that have a logical cloud device entry (in a registry),
-and are associated (bound) to a particular gateway. On-prem, the device
-itself talks a local protocol (e.g. BacNET), but does not have a direct
-cloud connection.
-
-### config
-
-[Proxy device config blocks](../config.tests/proxy.json) contain a special
-_localnet_ section that
-specifies information required by the gateway to contact the local device.
-E.g., the fact that a device is 'BacNET' and also the device's BacNET object
-ID. Based on this, the gateway can communicate with the target device and proxy
-all other messages.
-
-Additionally, the gateway is responsible for proxying all other supported
-operations of the config bundle. E.g., if a _pointset_ 'force_value" parameter
-is specified, the gateway would need to convert that into the local protocol
-and trigger the required functionality.
-
-### state
-
-There is no gateway-specific _state_ information, but similarly to _config_ the
-gateway is responsible for proxying all relevant state from the local device
-into the proxied device's state block. E.g., if the device is in an alarm
-state, then the gateway would have to transform that from the local format
-into the appropriate UDMI message.
-
-### telemetry
-
-Telemetry is handled similarly, with the gateway responsible for proxying data
-from local devices through to UDMI. In many cases, this would be translating
-specific device points into a [_pointset_ message](../pointset.tests/example.json).
-
-### metadata
-
-A [proxy device metadata section](../metadata.tests/proxy.json) describes
-_localnet_ with the presence of the
-device on a local network. This can/should be used for initial programming
-and configuration of the device, or to validate proper device configuration.
-The gateway implementation itself would not directly deal with this block.
diff --git a/schemas/udmi/envelope.json b/schemas/udmi/envelope.json
deleted file mode 100644
index 9458e08ba5..0000000000
--- a/schemas/udmi/envelope.json
+++ /dev/null
@@ -1,39 +0,0 @@
-{
- "title": "Message envelope schema",
- "additionalProperties": true,
- "properties": {
- "deviceId": {
- "type": "string",
- "pattern": "^[A-Z]{2,6}-[0-9]{1,6}$"
- },
- "deviceNumId": {
- "type": "string",
- "pattern": "^[0-9]+$"
- },
- "deviceRegistryId": {
- "type": "string",
- "pattern": "^[a-zA-Z][-a-zA-Z0-9._+~%]*[a-zA-Z0-9]$"
- },
- "projectId": {
- "type": "string",
- "pattern": "^([.a-z]+:)?[a-z][-a-z0-9]*[a-z0-9]$"
- },
- "subFolder": {
- "enum": [
- "config",
- "discover",
- "system",
- "metadata",
- "pointset",
- "state"
- ]
- }
- },
- "required": [
- "projectId",
- "deviceRegistryId",
- "deviceNumId",
- "deviceId",
- "subFolder"
- ]
-}
diff --git a/schemas/udmi/envelope.tests/empty.json b/schemas/udmi/envelope.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/envelope.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/envelope.tests/empty.out b/schemas/udmi/envelope.tests/empty.out
deleted file mode 100644
index d4d84fce5c..0000000000
--- a/schemas/udmi/envelope.tests/empty.out
+++ /dev/null
@@ -1,9 +0,0 @@
-Validating 1 schemas
- Validating 1 files against envelope.json
- Against input envelope.tests/empty.json
- #: 5 schema violations found
- #: required key [deviceId] not found
- #: required key [deviceNumId] not found
- #: required key [deviceRegistryId] not found
- #: required key [projectId] not found
- #: required key [subFolder] not found
diff --git a/schemas/udmi/envelope.tests/errors1.json b/schemas/udmi/envelope.tests/errors1.json
deleted file mode 100644
index ff059c78e0..0000000000
--- a/schemas/udmi/envelope.tests/errors1.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "deviceRegistryId": "test/registry",
- "deviceNumId": "921302198324X",
- "deviceId": "fcu-1"
-}
diff --git a/schemas/udmi/envelope.tests/errors1.out b/schemas/udmi/envelope.tests/errors1.out
deleted file mode 100644
index 601887a26a..0000000000
--- a/schemas/udmi/envelope.tests/errors1.out
+++ /dev/null
@@ -1,9 +0,0 @@
-Validating 1 schemas
- Validating 1 files against envelope.json
- Against input envelope.tests/errors1.json
- #: 5 schema violations found
- #/deviceId: string [fcu-1] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$
- #/deviceNumId: string [921302198324X] does not match pattern ^[0-9]+$
- #/deviceRegistryId: string [test/registry] does not match pattern ^[a-zA-Z][-a-zA-Z0-9._+~%]*[a-zA-Z0-9]$
- #: required key [projectId] not found
- #: required key [subFolder] not found
diff --git a/schemas/udmi/envelope.tests/errors2.json b/schemas/udmi/envelope.tests/errors2.json
deleted file mode 100644
index 9cbb163ade..0000000000
--- a/schemas/udmi/envelope.tests/errors2.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "deviceRegistryId": "test-registry",
- "deviceNumId": "-9213923812",
- "deviceId": "FCUs_02_NW_12"
-}
diff --git a/schemas/udmi/envelope.tests/errors2.out b/schemas/udmi/envelope.tests/errors2.out
deleted file mode 100644
index 0c02908e03..0000000000
--- a/schemas/udmi/envelope.tests/errors2.out
+++ /dev/null
@@ -1,8 +0,0 @@
-Validating 1 schemas
- Validating 1 files against envelope.json
- Against input envelope.tests/errors2.json
- #: 4 schema violations found
- #/deviceId: string [FCUs_02_NW_12] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$
- #/deviceNumId: string [-9213923812] does not match pattern ^[0-9]+$
- #: required key [projectId] not found
- #: required key [subFolder] not found
diff --git a/schemas/udmi/envelope.tests/example.json b/schemas/udmi/envelope.tests/example.json
deleted file mode 100644
index b48f45a49a..0000000000
--- a/schemas/udmi/envelope.tests/example.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "projectId": "daq-test-suite",
- "deviceRegistryId": "test_registry",
- "deviceNumId": "921302198324",
- "deviceId": "FCU-2",
- "subFolder": "pointset"
-}
diff --git a/schemas/udmi/envelope.tests/example.out b/schemas/udmi/envelope.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/envelope.tests/example2.json b/schemas/udmi/envelope.tests/example2.json
deleted file mode 100644
index 572406ce32..0000000000
--- a/schemas/udmi/envelope.tests/example2.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "projectId": "daq-test-suite",
- "deviceRegistryId": "test-registry",
- "deviceNumId": "23812",
- "deviceId": "FCU-002",
- "subFolder": "system",
-}
diff --git a/schemas/udmi/envelope.tests/example2.out b/schemas/udmi/envelope.tests/example2.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/envelope.tests/lgtw.json b/schemas/udmi/envelope.tests/lgtw.json
deleted file mode 100644
index 8e7b88cbd9..0000000000
--- a/schemas/udmi/envelope.tests/lgtw.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "projectId": "daq-test-suite",
- "deviceRegistryId": "test_registry",
- "deviceNumId": "921302198324",
- "deviceId": "LGTW-2",
- "subFolder": "discover"
-}
diff --git a/schemas/udmi/envelope.tests/lgtw.out b/schemas/udmi/envelope.tests/lgtw.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/metadata.json b/schemas/udmi/metadata.json
deleted file mode 100644
index e01dbe16cc..0000000000
--- a/schemas/udmi/metadata.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
- "title": "Device metadata schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "timestamp",
- "version",
- "system"
- ],
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "hash": {
- "type": "string",
- "pattern": "^[0-9a-z]{8}$"
- },
- "cloud": {
- "$ref": "file:metadata_cloud.json#"
- },
- "system": {
- "$ref": "file:metadata_system.json#"
- },
- "gateway": {
- "$ref": "file:metadata_gateway.json#"
- },
- "localnet": {
- "$ref": "file:metadata_localnet.json#"
- },
- "pointset": {
- "$ref": "file:metadata_pointset.json#"
- }
- }
-}
diff --git a/schemas/udmi/metadata.tests/empty.json b/schemas/udmi/metadata.tests/empty.json
deleted file mode 100644
index 0967ef424b..0000000000
--- a/schemas/udmi/metadata.tests/empty.json
+++ /dev/null
@@ -1 +0,0 @@
-{}
diff --git a/schemas/udmi/metadata.tests/empty.out b/schemas/udmi/metadata.tests/empty.out
deleted file mode 100644
index 159f05c268..0000000000
--- a/schemas/udmi/metadata.tests/empty.out
+++ /dev/null
@@ -1,7 +0,0 @@
-Validating 1 schemas
- Validating 1 files against metadata.json
- Against input metadata.tests/empty.json
- #: 3 schema violations found
- #: required key [system] not found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/metadata.tests/errors.json b/schemas/udmi/metadata.tests/errors.json
deleted file mode 100644
index 395b19e45f..0000000000
--- a/schemas/udmi/metadata.tests/errors.json
+++ /dev/null
@@ -1,25 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "guid": "bim://04aEp5ymD_$u5IxhJN2aGi",
- "location": {
- "site": "New Zealand"
- },
- "physical_tag": {
- "asset": {
- "site": "US-SFO-XYY_Noope!",
- "name": "AHU-A01_extension11-option"
- }
- }
- },
- "pointset": {
- "rabbits": true,
- "points": {
- "return_air_temperature_sensor": {
- "units": "Celsius",
- "monkeys": "elephants"
- }
- }
- }
-}
diff --git a/schemas/udmi/metadata.tests/errors.out b/schemas/udmi/metadata.tests/errors.out
deleted file mode 100644
index c315c2fd40..0000000000
--- a/schemas/udmi/metadata.tests/errors.out
+++ /dev/null
@@ -1,16 +0,0 @@
-Validating 1 schemas
- Validating 1 files against metadata.json
- Against input metadata.tests/errors.json
- #: 8 schema violations found
- #/pointset: 3 schema violations found
- #/pointset/points/return_air_temperature_sensor: 2 schema violations found
- #/pointset/points/return_air_temperature_sensor/units: Celsius is not a valid enum value
- #/pointset/points/return_air_temperature_sensor: extraneous key [monkeys] is not permitted
- #/pointset: extraneous key [rabbits] is not permitted
- #/system: 5 schema violations found
- #/system/location/site: string [New Zealand] does not match pattern ^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$
- #/system/physical_tag/asset: 3 schema violations found
- #/system/physical_tag/asset/name: string [AHU-A01_extension11-option] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$
- #/system/physical_tag/asset/site: string [US-SFO-XYY_Noope!] does not match pattern ^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$
- #/system/physical_tag/asset: required key [guid] not found
- #/system: extraneous key [guid] is not permitted
diff --git a/schemas/udmi/metadata.tests/example.json b/schemas/udmi/metadata.tests/example.json
deleted file mode 100644
index 4f1df64175..0000000000
--- a/schemas/udmi/metadata.tests/example.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "location": {
- "site": "US-SFO-XYY",
- "section": "NW-2F",
- "position": {
- "x": 10,
- "y": 20
- }
- },
- "physical_tag": {
- "asset": {
- "guid": "bim://04aEp5ymD_$u5IxhJN2aGi",
- "site": "US-SFO-XYY",
- "name": "AHU-1"
- }
- },
- "aux": {
- "suffix": "extention11-optional",
- }
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "units": "Degrees-Celsius"
- }
- }
- }
-}
diff --git a/schemas/udmi/metadata.tests/example.out b/schemas/udmi/metadata.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/metadata.tests/example2.out b/schemas/udmi/metadata.tests/example2.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/metadata.tests/gateway.json b/schemas/udmi/metadata.tests/gateway.json
deleted file mode 100644
index 2da945023e..0000000000
--- a/schemas/udmi/metadata.tests/gateway.json
+++ /dev/null
@@ -1,41 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "location": {
- "site": "US-SFO-XYY",
- "section": "NW-2F",
- "position": {
- "x": 10,
- "y": 20
- }
- },
- "physical_tag": {
- "asset": {
- "guid": "bim://04aEp5ymD_$u5IxhJN2aGi",
- "site": "US-SFO-XYY",
- "name": "AHU-01"
- }
- }
- },
- "cloud": {
- "auth_type": "RS256",
- },
- "gateway": {
- "proxy_ids": ["AHU-22"]
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "units": "Degrees-Celsius"
- }
- }
- },
- "localnet": {
- "subsystem": {
- "bacnet": {
- "local_id": "0x991132ec"
- }
- }
- }
-}
diff --git a/schemas/udmi/metadata.tests/gateway.out b/schemas/udmi/metadata.tests/gateway.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/metadata.tests/proxy.json b/schemas/udmi/metadata.tests/proxy.json
deleted file mode 100644
index bbc522af3c..0000000000
--- a/schemas/udmi/metadata.tests/proxy.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "location": {
- "site": "US-SFO-XYY",
- "section": "NW-2F",
- "position": {
- "x": 10,
- "y": 20
- }
- },
- "physical_tag": {
- "asset": {
- "guid": "bim://04aEp5ymD_$u5IxhJN2aGi",
- "site": "US-SFO-XYY",
- "name": "AHU-1",
- }
- },
- "aux": {
- "suffix": "extention11-optional"
- }
- },
- "localnet": {
- "subsystem": {
- "bacnet": {
- "local_id": "0x82eecd"
- }
- }
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "units": "Degrees-Celsius",
- "ref": "BV23.present_value"
- }
- }
- },
- "gateway": {
- "subsystem": "bacnet",
- "gateway_id": "GAT-123"
- }
-}
diff --git a/schemas/udmi/metadata.tests/proxy.out b/schemas/udmi/metadata.tests/proxy.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/metadata.tests/toomany.json b/schemas/udmi/metadata.tests/toomany.json
deleted file mode 100644
index 6c5b9671e0..0000000000
--- a/schemas/udmi/metadata.tests/toomany.json
+++ /dev/null
@@ -1,2035 +0,0 @@
-{
- "system": {
- "location": {
- "site": "UK-LON-S2"
- },
- "physical_tag": {
- "asset": {
- "name": "UK-LON-S2_LTGW-3",
- "guid": "ifc://27UivR75r3481CVsBDvlfl"
- }
- }
- },
- "pointset": {
- "points": {
- "bw4_group_brightness12": {
- "units": "Percent"
- },
- "bw4_group_brightness13": {
- "units": "Percent"
- },
- "bw4_group_brightness10": {
- "units": "Percent"
- },
- "bw4_group_brightness11": {
- "units": "Percent"
- },
- "bw4_group_brightness16": {
- "units": "Percent"
- },
- "bw4_group_brightness14": {
- "units": "Percent"
- },
- "bw4_group_brightness15": {
- "units": "Percent"
- },
- "bw4_light_level1": {
- "units": "Luxes"
- },
- "bw4_light_level2": {
- "units": "Luxes"
- },
- "bw4_light_level7": {
- "units": "Luxes"
- },
- "bw4_light_level8": {
- "units": "Luxes"
- },
- "bw4_light_level9": {
- "units": "Luxes"
- },
- "bw4_light_level3": {
- "units": "Luxes"
- },
- "bw4_light_level4": {
- "units": "Luxes"
- },
- "bw4_light_level5": {
- "units": "Luxes"
- },
- "bw4_light_level6": {
- "units": "Luxes"
- },
- "bw5_lamp_brightness61": {
- "units": "Percent"
- },
- "bw5_lamp_brightness62": {
- "units": "Percent"
- },
- "bw5_lamp_brightness63": {
- "units": "Percent"
- },
- "bw5_lamp_brightness64": {
- "units": "Percent"
- },
- "bw5_lamp_brightness60": {
- "units": "Percent"
- },
- "bw6_lamp_brightness63": {
- "units": "Percent"
- },
- "bw6_lamp_brightness62": {
- "units": "Percent"
- },
- "bw6_lamp_brightness61": {
- "units": "Percent"
- },
- "bw6_lamp_brightness60": {
- "units": "Percent"
- },
- "bw6_lamp_brightness64": {
- "units": "Percent"
- },
- "bw4_lamp_brightness60": {
- "units": "Percent"
- },
- "bw4_occupancy5": {
- "units": "No-units"
- },
- "bw5_lamp_brightness58": {
- "units": "Percent"
- },
- "bw4_lamp_brightness61": {
- "units": "Percent"
- },
- "bw4_occupancy6": {
- "units": "No-units"
- },
- "bw5_lamp_brightness59": {
- "units": "Percent"
- },
- "bw4_occupancy7": {
- "units": "No-units"
- },
- "bw4_occupancy8": {
- "units": "No-units"
- },
- "bw4_lamp_brightness64": {
- "units": "Percent"
- },
- "bw4_occupancy9": {
- "units": "No-units"
- },
- "bw5_lamp_brightness54": {
- "units": "Percent"
- },
- "bw5_lamp_brightness55": {
- "units": "Percent"
- },
- "bw4_lamp_brightness62": {
- "units": "Percent"
- },
- "bw5_lamp_brightness56": {
- "units": "Percent"
- },
- "bw4_lamp_brightness63": {
- "units": "Percent"
- },
- "bw5_lamp_brightness57": {
- "units": "Percent"
- },
- "bw5_lamp_brightness50": {
- "units": "Percent"
- },
- "bw5_lamp_brightness51": {
- "units": "Percent"
- },
- "bw5_lamp_brightness52": {
- "units": "Percent"
- },
- "bw5_lamp_brightness53": {
- "units": "Percent"
- },
- "bw4_occupancy1": {
- "units": "No-units"
- },
- "bw4_occupancy2": {
- "units": "No-units"
- },
- "bw4_occupancy3": {
- "units": "No-units"
- },
- "bw4_occupancy4": {
- "units": "No-units"
- },
- "bw1_group_brightness16": {
- "units": "Percent"
- },
- "bw1_group_brightness15": {
- "units": "Percent"
- },
- "bw1_group_brightness12": {
- "units": "Percent"
- },
- "bw1_group_brightness11": {
- "units": "Percent"
- },
- "bw1_group_brightness14": {
- "units": "Percent"
- },
- "bw1_group_brightness13": {
- "units": "Percent"
- },
- "bw1_group_brightness10": {
- "units": "Percent"
- },
- "bw5_lamp_brightness47": {
- "units": "Percent"
- },
- "bw5_lamp_brightness48": {
- "units": "Percent"
- },
- "bw5_lamp_brightness9": {
- "units": "Percent"
- },
- "bw5_lamp_brightness49": {
- "units": "Percent"
- },
- "bw5_lamp_brightness43": {
- "units": "Percent"
- },
- "bw6_occupancy7": {
- "units": "No-units"
- },
- "bw5_lamp_brightness44": {
- "units": "Percent"
- },
- "bw6_occupancy8": {
- "units": "No-units"
- },
- "bw5_lamp_brightness45": {
- "units": "Percent"
- },
- "bw6_occupancy9": {
- "units": "No-units"
- },
- "bw5_lamp_brightness46": {
- "units": "Percent"
- },
- "bw6_occupancy3": {
- "units": "No-units"
- },
- "bw2_lamp_brightness64": {
- "units": "Percent"
- },
- "bw5_lamp_brightness40": {
- "units": "Percent"
- },
- "bw6_occupancy4": {
- "units": "No-units"
- },
- "bw5_lamp_brightness41": {
- "units": "Percent"
- },
- "bw6_occupancy5": {
- "units": "No-units"
- },
- "bw5_lamp_brightness42": {
- "units": "Percent"
- },
- "bw6_occupancy6": {
- "units": "No-units"
- },
- "bw2_lamp_brightness61": {
- "units": "Percent"
- },
- "bw2_lamp_brightness60": {
- "units": "Percent"
- },
- "bw2_lamp_brightness63": {
- "units": "Percent"
- },
- "bw6_occupancy1": {
- "units": "No-units"
- },
- "bw2_lamp_brightness62": {
- "units": "Percent"
- },
- "bw6_occupancy2": {
- "units": "No-units"
- },
- "bw3_lamp_brightness6": {
- "units": "Percent"
- },
- "bw5_lamp_brightness36": {
- "units": "Percent"
- },
- "bw3_lamp_brightness5": {
- "units": "Percent"
- },
- "bw5_lamp_brightness37": {
- "units": "Percent"
- },
- "bw3_lamp_brightness4": {
- "units": "Percent"
- },
- "bw5_lamp_brightness38": {
- "units": "Percent"
- },
- "bw3_lamp_brightness3": {
- "units": "Percent"
- },
- "bw5_lamp_brightness39": {
- "units": "Percent"
- },
- "bw2_lamp_brightness58": {
- "units": "Percent"
- },
- "bw3_lamp_brightness2": {
- "units": "Percent"
- },
- "bw5_lamp_brightness32": {
- "units": "Percent"
- },
- "bw2_lamp_brightness57": {
- "units": "Percent"
- },
- "bw3_lamp_brightness1": {
- "units": "Percent"
- },
- "bw5_lamp_brightness33": {
- "units": "Percent"
- },
- "bw5_lamp_brightness34": {
- "units": "Percent"
- },
- "bw6_lamp_brightness29": {
- "units": "Percent"
- },
- "bw2_lamp_brightness59": {
- "units": "Percent"
- },
- "bw5_lamp_brightness35": {
- "units": "Percent"
- },
- "bw6_lamp_brightness28": {
- "units": "Percent"
- },
- "bw2_lamp_brightness54": {
- "units": "Percent"
- },
- "bw2_lamp_brightness53": {
- "units": "Percent"
- },
- "bw2_lamp_brightness56": {
- "units": "Percent"
- },
- "bw5_lamp_brightness30": {
- "units": "Percent"
- },
- "bw2_lamp_brightness55": {
- "units": "Percent"
- },
- "bw5_lamp_brightness31": {
- "units": "Percent"
- },
- "bw2_lamp_brightness50": {
- "units": "Percent"
- },
- "bw2_lamp_brightness52": {
- "units": "Percent"
- },
- "bw2_lamp_brightness51": {
- "units": "Percent"
- },
- "bw2_light_level1": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness55": {
- "units": "Percent"
- },
- "bw6_lamp_brightness30": {
- "units": "Percent"
- },
- "bw2_light_level2": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness54": {
- "units": "Percent"
- },
- "bw2_light_level3": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness53": {
- "units": "Percent"
- },
- "bw2_light_level4": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness52": {
- "units": "Percent"
- },
- "bw2_light_level5": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness51": {
- "units": "Percent"
- },
- "bw6_light_level1": {
- "units": "Luxes"
- },
- "bw2_light_level6": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness50": {
- "units": "Percent"
- },
- "bw6_light_level2": {
- "units": "Luxes"
- },
- "bw2_light_level7": {
- "units": "Luxes"
- },
- "bw6_light_level3": {
- "units": "Luxes"
- },
- "bw2_light_level8": {
- "units": "Luxes"
- },
- "bw6_light_level4": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness38": {
- "units": "Percent"
- },
- "bw6_light_level5": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness37": {
- "units": "Percent"
- },
- "bw6_light_level6": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness36": {
- "units": "Percent"
- },
- "bw6_light_level7": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness35": {
- "units": "Percent"
- },
- "bw6_light_level8": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness59": {
- "units": "Percent"
- },
- "bw5_lamp_brightness29": {
- "units": "Percent"
- },
- "bw6_lamp_brightness34": {
- "units": "Percent"
- },
- "bw6_light_level9": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness58": {
- "units": "Percent"
- },
- "bw6_lamp_brightness33": {
- "units": "Percent"
- },
- "bw3_lamp_brightness57": {
- "units": "Percent"
- },
- "bw6_lamp_brightness32": {
- "units": "Percent"
- },
- "bw3_lamp_brightness56": {
- "units": "Percent"
- },
- "bw6_lamp_brightness31": {
- "units": "Percent"
- },
- "bw5_lamp_brightness25": {
- "units": "Percent"
- },
- "bw5_lamp_brightness26": {
- "units": "Percent"
- },
- "bw5_lamp_brightness27": {
- "units": "Percent"
- },
- "bw5_lamp_brightness28": {
- "units": "Percent"
- },
- "bw2_lamp_brightness47": {
- "units": "Percent"
- },
- "bw5_lamp_brightness21": {
- "units": "Percent"
- },
- "bw2_lamp_brightness46": {
- "units": "Percent"
- },
- "bw5_lamp_brightness22": {
- "units": "Percent"
- },
- "bw6_lamp_brightness19": {
- "units": "Percent"
- },
- "bw2_lamp_brightness49": {
- "units": "Percent"
- },
- "bw5_lamp_brightness23": {
- "units": "Percent"
- },
- "bw6_lamp_brightness18": {
- "units": "Percent"
- },
- "bw2_lamp_brightness48": {
- "units": "Percent"
- },
- "bw5_lamp_brightness24": {
- "units": "Percent"
- },
- "bw6_lamp_brightness17": {
- "units": "Percent"
- },
- "bw2_lamp_brightness43": {
- "units": "Percent"
- },
- "bw2_lamp_brightness42": {
- "units": "Percent"
- },
- "bw2_lamp_brightness45": {
- "units": "Percent"
- },
- "bw2_lamp_brightness44": {
- "units": "Percent"
- },
- "bw5_lamp_brightness20": {
- "units": "Percent"
- },
- "bw2_lamp_brightness41": {
- "units": "Percent"
- },
- "bw2_lamp_brightness40": {
- "units": "Percent"
- },
- "bw3_lamp_brightness44": {
- "units": "Percent"
- },
- "bw3_lamp_brightness43": {
- "units": "Percent"
- },
- "bw3_lamp_brightness42": {
- "units": "Percent"
- },
- "bw3_lamp_brightness41": {
- "units": "Percent"
- },
- "bw3_lamp_brightness40": {
- "units": "Percent"
- },
- "bw6_lamp_brightness27": {
- "units": "Percent"
- },
- "bw6_lamp_brightness26": {
- "units": "Percent"
- },
- "bw6_lamp_brightness25": {
- "units": "Percent"
- },
- "bw3_lamp_brightness49": {
- "units": "Percent"
- },
- "bw6_lamp_brightness24": {
- "units": "Percent"
- },
- "bw3_lamp_brightness48": {
- "units": "Percent"
- },
- "bw5_lamp_brightness18": {
- "units": "Percent"
- },
- "bw6_lamp_brightness23": {
- "units": "Percent"
- },
- "bw3_lamp_brightness47": {
- "units": "Percent"
- },
- "bw5_lamp_brightness19": {
- "units": "Percent"
- },
- "bw6_lamp_brightness22": {
- "units": "Percent"
- },
- "bw3_lamp_brightness46": {
- "units": "Percent"
- },
- "bw6_lamp_brightness21": {
- "units": "Percent"
- },
- "bw3_lamp_brightness45": {
- "units": "Percent"
- },
- "bw6_lamp_brightness20": {
- "units": "Percent"
- },
- "bw5_lamp_brightness14": {
- "units": "Percent"
- },
- "bw2_lamp_brightness39": {
- "units": "Percent"
- },
- "bw5_lamp_brightness15": {
- "units": "Percent"
- },
- "bw5_lamp_brightness16": {
- "units": "Percent"
- },
- "bw5_lamp_brightness17": {
- "units": "Percent"
- },
- "bw2_lamp_brightness36": {
- "units": "Percent"
- },
- "bw5_lamp_brightness10": {
- "units": "Percent"
- },
- "bw2_lamp_brightness35": {
- "units": "Percent"
- },
- "bw5_lamp_brightness11": {
- "units": "Percent"
- },
- "bw2_lamp_brightness38": {
- "units": "Percent"
- },
- "bw5_lamp_brightness12": {
- "units": "Percent"
- },
- "bw2_lamp_brightness37": {
- "units": "Percent"
- },
- "bw5_lamp_brightness13": {
- "units": "Percent"
- },
- "bw2_lamp_brightness32": {
- "units": "Percent"
- },
- "bw2_lamp_brightness31": {
- "units": "Percent"
- },
- "bw2_lamp_brightness34": {
- "units": "Percent"
- },
- "bw2_lamp_brightness33": {
- "units": "Percent"
- },
- "bw2_lamp_brightness30": {
- "units": "Percent"
- },
- "bw6_lamp_brightness52": {
- "units": "Percent"
- },
- "bw3_light_level1": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness51": {
- "units": "Percent"
- },
- "bw6_lamp_brightness50": {
- "units": "Percent"
- },
- "bw1_occupancy1": {
- "units": "No-units"
- },
- "bw1_occupancy5": {
- "units": "No-units"
- },
- "bw3_light_level8": {
- "units": "Luxes"
- },
- "bw1_occupancy4": {
- "units": "No-units"
- },
- "bw3_light_level9": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness59": {
- "units": "Percent"
- },
- "bw1_occupancy3": {
- "units": "No-units"
- },
- "bw3_light_level6": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness58": {
- "units": "Percent"
- },
- "bw1_occupancy2": {
- "units": "No-units"
- },
- "bw3_light_level7": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness57": {
- "units": "Percent"
- },
- "bw1_occupancy9": {
- "units": "No-units"
- },
- "bw3_light_level4": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness56": {
- "units": "Percent"
- },
- "bw1_occupancy8": {
- "units": "No-units"
- },
- "bw3_light_level5": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness55": {
- "units": "Percent"
- },
- "bw1_occupancy7": {
- "units": "No-units"
- },
- "bw3_light_level2": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness54": {
- "units": "Percent"
- },
- "bw1_occupancy6": {
- "units": "No-units"
- },
- "bw3_light_level3": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness53": {
- "units": "Percent"
- },
- "bw2_lamp_brightness29": {
- "units": "Percent"
- },
- "bw2_lamp_brightness28": {
- "units": "Percent"
- },
- "bw2_lamp_brightness25": {
- "units": "Percent"
- },
- "bw2_lamp_brightness24": {
- "units": "Percent"
- },
- "bw2_lamp_brightness27": {
- "units": "Percent"
- },
- "bw2_lamp_brightness26": {
- "units": "Percent"
- },
- "bw6_lamp_brightness39": {
- "units": "Percent"
- },
- "bw2_lamp_brightness21": {
- "units": "Percent"
- },
- "bw2_lamp_brightness20": {
- "units": "Percent"
- },
- "bw2_lamp_brightness23": {
- "units": "Percent"
- },
- "bw2_lamp_brightness22": {
- "units": "Percent"
- },
- "bw6_lamp_brightness41": {
- "units": "Percent"
- },
- "bw6_lamp_brightness40": {
- "units": "Percent"
- },
- "bw3_lamp_brightness64": {
- "units": "Percent"
- },
- "bw3_lamp_brightness63": {
- "units": "Percent"
- },
- "bw3_lamp_brightness62": {
- "units": "Percent"
- },
- "bw3_lamp_brightness61": {
- "units": "Percent"
- },
- "bw3_lamp_brightness60": {
- "units": "Percent"
- },
- "bw6_lamp_brightness49": {
- "units": "Percent"
- },
- "bw6_lamp_brightness48": {
- "units": "Percent"
- },
- "bw6_lamp_brightness47": {
- "units": "Percent"
- },
- "bw6_lamp_brightness46": {
- "units": "Percent"
- },
- "bw6_lamp_brightness45": {
- "units": "Percent"
- },
- "bw3_lamp_brightness9": {
- "units": "Percent"
- },
- "bw6_lamp_brightness44": {
- "units": "Percent"
- },
- "bw3_lamp_brightness8": {
- "units": "Percent"
- },
- "bw6_lamp_brightness43": {
- "units": "Percent"
- },
- "bw3_lamp_brightness7": {
- "units": "Percent"
- },
- "bw6_lamp_brightness42": {
- "units": "Percent"
- },
- "bw1_lamp_brightness20": {
- "units": "Percent"
- },
- "bw1_lamp_brightness24": {
- "units": "Percent"
- },
- "bw1_group_brightness1": {
- "units": "Percent"
- },
- "bw1_lamp_brightness23": {
- "units": "Percent"
- },
- "bw1_group_brightness2": {
- "units": "Percent"
- },
- "bw1_lamp_brightness22": {
- "units": "Percent"
- },
- "bw1_group_brightness3": {
- "units": "Percent"
- },
- "bw1_lamp_brightness21": {
- "units": "Percent"
- },
- "bw1_lamp_brightness28": {
- "units": "Percent"
- },
- "bw1_lamp_brightness27": {
- "units": "Percent"
- },
- "bw1_lamp_brightness26": {
- "units": "Percent"
- },
- "bw1_lamp_brightness25": {
- "units": "Percent"
- },
- "bw1_lamp_brightness29": {
- "units": "Percent"
- },
- "bw1_lamp_brightness31": {
- "units": "Percent"
- },
- "bw1_light_level13": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness30": {
- "units": "Percent"
- },
- "bw1_light_level14": {
- "units": "Luxes"
- },
- "bw1_light_level11": {
- "units": "Luxes"
- },
- "bw1_light_level12": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness35": {
- "units": "Percent"
- },
- "bw1_lamp_brightness34": {
- "units": "Percent"
- },
- "bw1_lamp_brightness33": {
- "units": "Percent"
- },
- "bw1_light_level15": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness32": {
- "units": "Percent"
- },
- "bw1_light_level16": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness39": {
- "units": "Percent"
- },
- "bw1_lamp_brightness38": {
- "units": "Percent"
- },
- "bw1_lamp_brightness37": {
- "units": "Percent"
- },
- "bw1_lamp_brightness36": {
- "units": "Percent"
- },
- "bw1_light_level10": {
- "units": "Luxes"
- },
- "bw3_group_brightness10": {
- "units": "Percent"
- },
- "bw5_group_brightness15": {
- "units": "Percent"
- },
- "bw5_group_brightness16": {
- "units": "Percent"
- },
- "bw3_group_brightness12": {
- "units": "Percent"
- },
- "bw3_group_brightness11": {
- "units": "Percent"
- },
- "bw3_group_brightness14": {
- "units": "Percent"
- },
- "bw5_group_brightness11": {
- "units": "Percent"
- },
- "bw3_group_brightness13": {
- "units": "Percent"
- },
- "bw5_group_brightness12": {
- "units": "Percent"
- },
- "bw3_group_brightness16": {
- "units": "Percent"
- },
- "bw5_group_brightness13": {
- "units": "Percent"
- },
- "bw3_group_brightness15": {
- "units": "Percent"
- },
- "bw5_group_brightness14": {
- "units": "Percent"
- },
- "bw6_lamp_brightness16": {
- "units": "Percent"
- },
- "bw6_lamp_brightness15": {
- "units": "Percent"
- },
- "bw6_lamp_brightness14": {
- "units": "Percent"
- },
- "bw5_group_brightness10": {
- "units": "Percent"
- },
- "bw6_lamp_brightness13": {
- "units": "Percent"
- },
- "bw6_lamp_brightness12": {
- "units": "Percent"
- },
- "bw6_lamp_brightness11": {
- "units": "Percent"
- },
- "bw6_lamp_brightness10": {
- "units": "Percent"
- },
- "bw3_occupancy10": {
- "units": "No-units"
- },
- "bw6_occupancy16": {
- "units": "No-units"
- },
- "bw2_occupancy10": {
- "units": "No-units"
- },
- "bw2_occupancy11": {
- "units": "No-units"
- },
- "bw1_lamp_brightness4": {
- "units": "Percent"
- },
- "bw1_lamp_brightness13": {
- "units": "Percent"
- },
- "bw2_occupancy12": {
- "units": "No-units"
- },
- "bw3_occupancy14": {
- "units": "No-units"
- },
- "bw6_occupancy12": {
- "units": "No-units"
- },
- "bw1_lamp_brightness3": {
- "units": "Percent"
- },
- "bw1_lamp_brightness12": {
- "units": "Percent"
- },
- "bw2_occupancy13": {
- "units": "No-units"
- },
- "bw3_occupancy13": {
- "units": "No-units"
- },
- "bw6_occupancy13": {
- "units": "No-units"
- },
- "bw1_lamp_brightness2": {
- "units": "Percent"
- },
- "bw1_lamp_brightness11": {
- "units": "Percent"
- },
- "bw2_occupancy14": {
- "units": "No-units"
- },
- "bw3_occupancy12": {
- "units": "No-units"
- },
- "bw6_occupancy14": {
- "units": "No-units"
- },
- "bw1_lamp_brightness1": {
- "units": "Percent"
- },
- "bw1_lamp_brightness10": {
- "units": "Percent"
- },
- "bw2_occupancy15": {
- "units": "No-units"
- },
- "bw3_occupancy11": {
- "units": "No-units"
- },
- "bw6_occupancy15": {
- "units": "No-units"
- },
- "bw1_lamp_brightness17": {
- "units": "Percent"
- },
- "bw2_light_level9": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness16": {
- "units": "Percent"
- },
- "bw1_lamp_brightness15": {
- "units": "Percent"
- },
- "bw1_lamp_brightness14": {
- "units": "Percent"
- },
- "bw1_lamp_brightness19": {
- "units": "Percent"
- },
- "bw1_lamp_brightness18": {
- "units": "Percent"
- },
- "bw1_group_brightness4": {
- "units": "Percent"
- },
- "bw2_occupancy16": {
- "units": "No-units"
- },
- "bw1_group_brightness5": {
- "units": "Percent"
- },
- "bw1_group_brightness6": {
- "units": "Percent"
- },
- "bw3_occupancy16": {
- "units": "No-units"
- },
- "bw6_occupancy10": {
- "units": "No-units"
- },
- "bw1_group_brightness7": {
- "units": "Percent"
- },
- "bw3_occupancy15": {
- "units": "No-units"
- },
- "bw6_occupancy11": {
- "units": "No-units"
- },
- "bw1_group_brightness8": {
- "units": "Percent"
- },
- "bw1_group_brightness9": {
- "units": "Percent"
- },
- "bw1_lamp_brightness64": {
- "units": "Percent"
- },
- "bw1_lamp_brightness63": {
- "units": "Percent"
- },
- "bw1_lamp_brightness62": {
- "units": "Percent"
- },
- "bw1_lamp_brightness61": {
- "units": "Percent"
- },
- "bw1_lamp_brightness8": {
- "units": "Percent"
- },
- "bw1_lamp_brightness7": {
- "units": "Percent"
- },
- "bw1_lamp_brightness6": {
- "units": "Percent"
- },
- "bw1_lamp_brightness5": {
- "units": "Percent"
- },
- "bw1_lamp_brightness60": {
- "units": "Percent"
- },
- "bw1_lamp_brightness9": {
- "units": "Percent"
- },
- "bw1_lamp_brightness42": {
- "units": "Percent"
- },
- "bw1_lamp_brightness41": {
- "units": "Percent"
- },
- "bw1_lamp_brightness40": {
- "units": "Percent"
- },
- "bw1_lamp_brightness46": {
- "units": "Percent"
- },
- "bw1_lamp_brightness45": {
- "units": "Percent"
- },
- "bw1_lamp_brightness44": {
- "units": "Percent"
- },
- "bw1_lamp_brightness43": {
- "units": "Percent"
- },
- "bw1_lamp_brightness49": {
- "units": "Percent"
- },
- "bw1_lamp_brightness48": {
- "units": "Percent"
- },
- "bw1_lamp_brightness47": {
- "units": "Percent"
- },
- "bw1_light_level3": {
- "units": "Luxes"
- },
- "bw1_light_level2": {
- "units": "Luxes"
- },
- "bw1_light_level1": {
- "units": "Luxes"
- },
- "bw1_light_level7": {
- "units": "Luxes"
- },
- "bw1_light_level6": {
- "units": "Luxes"
- },
- "bw1_light_level5": {
- "units": "Luxes"
- },
- "bw1_light_level4": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness53": {
- "units": "Percent"
- },
- "bw1_lamp_brightness52": {
- "units": "Percent"
- },
- "bw1_lamp_brightness51": {
- "units": "Percent"
- },
- "bw1_lamp_brightness50": {
- "units": "Percent"
- },
- "bw1_lamp_brightness57": {
- "units": "Percent"
- },
- "bw1_lamp_brightness56": {
- "units": "Percent"
- },
- "bw1_lamp_brightness55": {
- "units": "Percent"
- },
- "bw1_lamp_brightness54": {
- "units": "Percent"
- },
- "bw1_lamp_brightness59": {
- "units": "Percent"
- },
- "bw1_light_level9": {
- "units": "Luxes"
- },
- "bw1_lamp_brightness58": {
- "units": "Percent"
- },
- "bw1_light_level8": {
- "units": "Luxes"
- },
- "bw4_lamp_brightness1": {
- "units": "Percent"
- },
- "bw4_lamp_brightness3": {
- "units": "Percent"
- },
- "bw4_lamp_brightness2": {
- "units": "Percent"
- },
- "bw4_lamp_brightness5": {
- "units": "Percent"
- },
- "bw4_lamp_brightness4": {
- "units": "Percent"
- },
- "bw4_lamp_brightness7": {
- "units": "Percent"
- },
- "bw4_lamp_brightness6": {
- "units": "Percent"
- },
- "bw5_occupancy8": {
- "units": "No-units"
- },
- "bw5_occupancy9": {
- "units": "No-units"
- },
- "bw5_occupancy6": {
- "units": "No-units"
- },
- "bw5_occupancy7": {
- "units": "No-units"
- },
- "bw5_occupancy1": {
- "units": "No-units"
- },
- "bw5_occupancy4": {
- "units": "No-units"
- },
- "bw5_occupancy5": {
- "units": "No-units"
- },
- "bw5_occupancy2": {
- "units": "No-units"
- },
- "bw5_occupancy3": {
- "units": "No-units"
- },
- "bw4_lamp_brightness9": {
- "units": "Percent"
- },
- "bw4_lamp_brightness8": {
- "units": "Percent"
- },
- "bw6_group_brightness16": {
- "units": "Percent"
- },
- "bw6_group_brightness7": {
- "units": "Percent"
- },
- "bw6_group_brightness14": {
- "units": "Percent"
- },
- "bw6_group_brightness8": {
- "units": "Percent"
- },
- "bw6_group_brightness15": {
- "units": "Percent"
- },
- "bw6_group_brightness9": {
- "units": "Percent"
- },
- "bw6_group_brightness12": {
- "units": "Percent"
- },
- "bw6_group_brightness13": {
- "units": "Percent"
- },
- "bw2_light_level15": {
- "units": "Luxes"
- },
- "bw6_group_brightness3": {
- "units": "Percent"
- },
- "bw2_light_level14": {
- "units": "Luxes"
- },
- "bw6_group_brightness4": {
- "units": "Percent"
- },
- "bw6_group_brightness5": {
- "units": "Percent"
- },
- "bw2_light_level16": {
- "units": "Luxes"
- },
- "bw6_group_brightness6": {
- "units": "Percent"
- },
- "bw2_light_level11": {
- "units": "Luxes"
- },
- "bw2_light_level10": {
- "units": "Luxes"
- },
- "bw2_light_level13": {
- "units": "Luxes"
- },
- "bw6_group_brightness1": {
- "units": "Percent"
- },
- "bw2_light_level12": {
- "units": "Luxes"
- },
- "bw6_group_brightness2": {
- "units": "Percent"
- },
- "bw2_group_brightness16": {
- "units": "Percent"
- },
- "bw2_group_brightness15": {
- "units": "Percent"
- },
- "bw6_group_brightness10": {
- "units": "Percent"
- },
- "bw2_group_brightness14": {
- "units": "Percent"
- },
- "bw6_group_brightness11": {
- "units": "Percent"
- },
- "bw2_group_brightness13": {
- "units": "Percent"
- },
- "bw2_group_brightness12": {
- "units": "Percent"
- },
- "bw2_group_brightness11": {
- "units": "Percent"
- },
- "bw2_group_brightness10": {
- "units": "Percent"
- },
- "bw5_group_brightness1": {
- "units": "Percent"
- },
- "bw5_group_brightness3": {
- "units": "Percent"
- },
- "bw5_group_brightness2": {
- "units": "Percent"
- },
- "bw5_group_brightness5": {
- "units": "Percent"
- },
- "bw5_group_brightness4": {
- "units": "Percent"
- },
- "bw5_group_brightness7": {
- "units": "Percent"
- },
- "bw5_group_brightness6": {
- "units": "Percent"
- },
- "bw5_group_brightness9": {
- "units": "Percent"
- },
- "bw5_group_brightness8": {
- "units": "Percent"
- },
- "bw4_light_level16": {
- "units": "Luxes"
- },
- "bw3_occupancy3": {
- "units": "No-units"
- },
- "bw3_occupancy2": {
- "units": "No-units"
- },
- "bw3_occupancy1": {
- "units": "No-units"
- },
- "bw5_light_level10": {
- "units": "Luxes"
- },
- "bw5_light_level11": {
- "units": "Luxes"
- },
- "bw5_light_level12": {
- "units": "Luxes"
- },
- "bw5_light_level13": {
- "units": "Luxes"
- },
- "bw5_light_level14": {
- "units": "Luxes"
- },
- "bw3_occupancy9": {
- "units": "No-units"
- },
- "bw3_occupancy8": {
- "units": "No-units"
- },
- "bw3_occupancy7": {
- "units": "No-units"
- },
- "bw3_occupancy6": {
- "units": "No-units"
- },
- "bw3_occupancy5": {
- "units": "No-units"
- },
- "bw3_occupancy4": {
- "units": "No-units"
- },
- "bw4_light_level12": {
- "units": "Luxes"
- },
- "bw4_light_level13": {
- "units": "Luxes"
- },
- "bw4_light_level14": {
- "units": "Luxes"
- },
- "bw4_light_level15": {
- "units": "Luxes"
- },
- "bw4_light_level10": {
- "units": "Luxes"
- },
- "bw4_light_level11": {
- "units": "Luxes"
- },
- "bw2_group_brightness5": {
- "units": "Percent"
- },
- "bw2_group_brightness6": {
- "units": "Percent"
- },
- "bw2_group_brightness3": {
- "units": "Percent"
- },
- "bw2_group_brightness4": {
- "units": "Percent"
- },
- "bw6_lamp_brightness1": {
- "units": "Percent"
- },
- "bw2_group_brightness9": {
- "units": "Percent"
- },
- "bw2_group_brightness7": {
- "units": "Percent"
- },
- "bw2_group_brightness8": {
- "units": "Percent"
- },
- "bw3_light_level10": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness6": {
- "units": "Percent"
- },
- "bw6_lamp_brightness7": {
- "units": "Percent"
- },
- "bw3_light_level12": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness8": {
- "units": "Percent"
- },
- "bw3_light_level11": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness9": {
- "units": "Percent"
- },
- "bw3_light_level14": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness2": {
- "units": "Percent"
- },
- "bw3_light_level13": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness3": {
- "units": "Percent"
- },
- "bw3_light_level16": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness4": {
- "units": "Percent"
- },
- "bw3_light_level15": {
- "units": "Luxes"
- },
- "bw6_lamp_brightness5": {
- "units": "Percent"
- },
- "bw2_lamp_brightness18": {
- "units": "Percent"
- },
- "bw2_lamp_brightness17": {
- "units": "Percent"
- },
- "bw2_lamp_brightness19": {
- "units": "Percent"
- },
- "bw2_lamp_brightness14": {
- "units": "Percent"
- },
- "bw2_lamp_brightness13": {
- "units": "Percent"
- },
- "bw2_lamp_brightness16": {
- "units": "Percent"
- },
- "bw2_lamp_brightness15": {
- "units": "Percent"
- },
- "bw2_lamp_brightness10": {
- "units": "Percent"
- },
- "bw2_lamp_brightness12": {
- "units": "Percent"
- },
- "bw2_lamp_brightness11": {
- "units": "Percent"
- },
- "bw2_lamp_brightness7": {
- "units": "Percent"
- },
- "bw2_lamp_brightness6": {
- "units": "Percent"
- },
- "bw2_lamp_brightness9": {
- "units": "Percent"
- },
- "bw2_lamp_brightness8": {
- "units": "Percent"
- },
- "bw3_lamp_brightness11": {
- "units": "Percent"
- },
- "bw3_lamp_brightness10": {
- "units": "Percent"
- },
- "bw3_lamp_brightness19": {
- "units": "Percent"
- },
- "bw3_lamp_brightness18": {
- "units": "Percent"
- },
- "bw3_lamp_brightness17": {
- "units": "Percent"
- },
- "bw3_lamp_brightness16": {
- "units": "Percent"
- },
- "bw2_group_brightness1": {
- "units": "Percent"
- },
- "bw3_lamp_brightness15": {
- "units": "Percent"
- },
- "bw6_light_level13": {
- "units": "Luxes"
- },
- "bw2_group_brightness2": {
- "units": "Percent"
- },
- "bw3_lamp_brightness14": {
- "units": "Percent"
- },
- "bw6_light_level12": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness13": {
- "units": "Percent"
- },
- "bw6_light_level11": {
- "units": "Luxes"
- },
- "bw3_lamp_brightness12": {
- "units": "Percent"
- },
- "bw6_light_level10": {
- "units": "Luxes"
- },
- "bw2_occupancy2": {
- "units": "No-units"
- },
- "bw2_occupancy1": {
- "units": "No-units"
- },
- "bw4_lamp_brightness10": {
- "units": "Percent"
- },
- "bw2_occupancy8": {
- "units": "No-units"
- },
- "bw4_lamp_brightness13": {
- "units": "Percent"
- },
- "bw2_occupancy7": {
- "units": "No-units"
- },
- "bw4_lamp_brightness14": {
- "units": "Percent"
- },
- "bw4_lamp_brightness11": {
- "units": "Percent"
- },
- "bw2_occupancy9": {
- "units": "No-units"
- },
- "bw4_lamp_brightness12": {
- "units": "Percent"
- },
- "bw2_occupancy4": {
- "units": "No-units"
- },
- "bw4_lamp_brightness17": {
- "units": "Percent"
- },
- "bw2_occupancy3": {
- "units": "No-units"
- },
- "bw4_lamp_brightness18": {
- "units": "Percent"
- },
- "bw2_occupancy6": {
- "units": "No-units"
- },
- "bw4_lamp_brightness15": {
- "units": "Percent"
- },
- "bw2_occupancy5": {
- "units": "No-units"
- },
- "bw4_lamp_brightness16": {
- "units": "Percent"
- },
- "bw2_lamp_brightness3": {
- "units": "Percent"
- },
- "bw2_lamp_brightness2": {
- "units": "Percent"
- },
- "bw2_lamp_brightness5": {
- "units": "Percent"
- },
- "bw2_lamp_brightness4": {
- "units": "Percent"
- },
- "bw3_group_brightness1": {
- "units": "Percent"
- },
- "bw2_lamp_brightness1": {
- "units": "Percent"
- },
- "bw3_group_brightness6": {
- "units": "Percent"
- },
- "bw5_light_level15": {
- "units": "Luxes"
- },
- "bw3_group_brightness7": {
- "units": "Percent"
- },
- "bw5_light_level16": {
- "units": "Luxes"
- },
- "bw3_group_brightness8": {
- "units": "Percent"
- },
- "bw3_group_brightness9": {
- "units": "Percent"
- },
- "bw3_group_brightness2": {
- "units": "Percent"
- },
- "bw3_group_brightness3": {
- "units": "Percent"
- },
- "bw3_group_brightness4": {
- "units": "Percent"
- },
- "bw3_group_brightness5": {
- "units": "Percent"
- },
- "bw3_lamp_brightness33": {
- "units": "Percent"
- },
- "bw3_lamp_brightness32": {
- "units": "Percent"
- },
- "bw3_lamp_brightness31": {
- "units": "Percent"
- },
- "bw3_lamp_brightness30": {
- "units": "Percent"
- },
- "bw3_lamp_brightness39": {
- "units": "Percent"
- },
- "bw3_lamp_brightness38": {
- "units": "Percent"
- },
- "bw3_lamp_brightness37": {
- "units": "Percent"
- },
- "bw3_lamp_brightness36": {
- "units": "Percent"
- },
- "bw3_lamp_brightness35": {
- "units": "Percent"
- },
- "bw3_lamp_brightness34": {
- "units": "Percent"
- },
- "bw4_occupancy12": {
- "units": "No-units"
- },
- "bw5_occupancy14": {
- "units": "No-units"
- },
- "bw4_occupancy13": {
- "units": "No-units"
- },
- "bw5_occupancy13": {
- "units": "No-units"
- },
- "bw6_light_level16": {
- "units": "Luxes"
- },
- "bw4_occupancy10": {
- "units": "No-units"
- },
- "bw5_occupancy16": {
- "units": "No-units"
- },
- "bw6_light_level15": {
- "units": "Luxes"
- },
- "bw4_occupancy11": {
- "units": "No-units"
- },
- "bw5_occupancy15": {
- "units": "No-units"
- },
- "bw6_light_level14": {
- "units": "Luxes"
- },
- "bw1_occupancy14": {
- "units": "No-units"
- },
- "bw1_occupancy13": {
- "units": "No-units"
- },
- "bw1_occupancy16": {
- "units": "No-units"
- },
- "bw1_occupancy15": {
- "units": "No-units"
- },
- "bw1_occupancy10": {
- "units": "No-units"
- },
- "bw1_occupancy12": {
- "units": "No-units"
- },
- "bw1_occupancy11": {
- "units": "No-units"
- },
- "bw3_lamp_brightness22": {
- "units": "Percent"
- },
- "bw3_lamp_brightness21": {
- "units": "Percent"
- },
- "bw3_lamp_brightness20": {
- "units": "Percent"
- },
- "bw3_lamp_brightness29": {
- "units": "Percent"
- },
- "bw3_lamp_brightness28": {
- "units": "Percent"
- },
- "bw3_lamp_brightness27": {
- "units": "Percent"
- },
- "bw3_lamp_brightness26": {
- "units": "Percent"
- },
- "bw4_occupancy16": {
- "units": "No-units"
- },
- "bw5_occupancy10": {
- "units": "No-units"
- },
- "bw3_lamp_brightness25": {
- "units": "Percent"
- },
- "bw3_lamp_brightness24": {
- "units": "Percent"
- },
- "bw4_occupancy14": {
- "units": "No-units"
- },
- "bw5_occupancy12": {
- "units": "No-units"
- },
- "bw3_lamp_brightness23": {
- "units": "Percent"
- },
- "bw4_occupancy15": {
- "units": "No-units"
- },
- "bw5_occupancy11": {
- "units": "No-units"
- },
- "bw4_lamp_brightness42": {
- "units": "Percent"
- },
- "bw4_lamp_brightness43": {
- "units": "Percent"
- },
- "bw4_lamp_brightness40": {
- "units": "Percent"
- },
- "bw4_lamp_brightness41": {
- "units": "Percent"
- },
- "bw4_lamp_brightness46": {
- "units": "Percent"
- },
- "bw4_lamp_brightness47": {
- "units": "Percent"
- },
- "bw4_lamp_brightness44": {
- "units": "Percent"
- },
- "bw4_lamp_brightness45": {
- "units": "Percent"
- },
- "bw4_lamp_brightness48": {
- "units": "Percent"
- },
- "bw4_lamp_brightness49": {
- "units": "Percent"
- },
- "bw5_lamp_brightness3": {
- "units": "Percent"
- },
- "bw5_lamp_brightness4": {
- "units": "Percent"
- },
- "bw5_lamp_brightness1": {
- "units": "Percent"
- },
- "bw5_lamp_brightness2": {
- "units": "Percent"
- },
- "bw5_lamp_brightness7": {
- "units": "Percent"
- },
- "bw5_lamp_brightness8": {
- "units": "Percent"
- },
- "bw5_lamp_brightness5": {
- "units": "Percent"
- },
- "bw5_lamp_brightness6": {
- "units": "Percent"
- },
- "bw4_lamp_brightness50": {
- "units": "Percent"
- },
- "bw4_lamp_brightness53": {
- "units": "Percent"
- },
- "bw4_lamp_brightness54": {
- "units": "Percent"
- },
- "bw4_lamp_brightness51": {
- "units": "Percent"
- },
- "bw4_lamp_brightness52": {
- "units": "Percent"
- },
- "bw4_lamp_brightness57": {
- "units": "Percent"
- },
- "bw4_lamp_brightness58": {
- "units": "Percent"
- },
- "bw4_lamp_brightness55": {
- "units": "Percent"
- },
- "bw4_lamp_brightness56": {
- "units": "Percent"
- },
- "bw4_lamp_brightness59": {
- "units": "Percent"
- },
- "bw4_group_brightness8": {
- "units": "Percent"
- },
- "bw4_group_brightness7": {
- "units": "Percent"
- },
- "bw4_group_brightness6": {
- "units": "Percent"
- },
- "bw4_group_brightness5": {
- "units": "Percent"
- },
- "bw4_group_brightness4": {
- "units": "Percent"
- },
- "bw4_group_brightness3": {
- "units": "Percent"
- },
- "bw4_group_brightness2": {
- "units": "Percent"
- },
- "bw4_group_brightness1": {
- "units": "Percent"
- },
- "bw4_group_brightness9": {
- "units": "Percent"
- },
- "bw4_lamp_brightness20": {
- "units": "Percent"
- },
- "bw4_lamp_brightness21": {
- "units": "Percent"
- },
- "bw4_lamp_brightness24": {
- "units": "Percent"
- },
- "bw4_lamp_brightness25": {
- "units": "Percent"
- },
- "bw4_lamp_brightness22": {
- "units": "Percent"
- },
- "bw4_lamp_brightness23": {
- "units": "Percent"
- },
- "bw4_lamp_brightness28": {
- "units": "Percent"
- },
- "bw4_lamp_brightness29": {
- "units": "Percent"
- },
- "bw4_lamp_brightness26": {
- "units": "Percent"
- },
- "bw4_lamp_brightness27": {
- "units": "Percent"
- },
- "bw4_lamp_brightness19": {
- "units": "Percent"
- },
- "bw5_light_level2": {
- "units": "Luxes"
- },
- "bw5_light_level3": {
- "units": "Luxes"
- },
- "bw5_light_level1": {
- "units": "Luxes"
- },
- "bw5_light_level6": {
- "units": "Luxes"
- },
- "bw5_light_level7": {
- "units": "Luxes"
- },
- "bw5_light_level4": {
- "units": "Luxes"
- },
- "bw5_light_level5": {
- "units": "Luxes"
- },
- "bw5_light_level8": {
- "units": "Luxes"
- },
- "bw5_light_level9": {
- "units": "Luxes"
- },
- "bw4_lamp_brightness31": {
- "units": "Percent"
- },
- "bw4_lamp_brightness32": {
- "units": "Percent"
- },
- "bw4_lamp_brightness30": {
- "units": "Percent"
- },
- "bw4_lamp_brightness35": {
- "units": "Percent"
- },
- "bw4_lamp_brightness36": {
- "units": "Percent"
- },
- "bw4_lamp_brightness33": {
- "units": "Percent"
- },
- "bw4_lamp_brightness34": {
- "units": "Percent"
- },
- "bw4_lamp_brightness39": {
- "units": "Percent"
- },
- "bw4_lamp_brightness37": {
- "units": "Percent"
- },
- "bw4_lamp_brightness38": {
- "units": "Percent"
- }
- }
- },
- "version": 1,
- "timestamp": "2020-03-05T14:42:59.743Z"
-}
diff --git a/schemas/udmi/metadata.tests/toomany.out b/schemas/udmi/metadata.tests/toomany.out
deleted file mode 100644
index c4e33ac250..0000000000
--- a/schemas/udmi/metadata.tests/toomany.out
+++ /dev/null
@@ -1,6 +0,0 @@
-Validating 1 schemas
- Validating 1 files against metadata.json
- Against input metadata.tests/toomany.json
- #: 2 schema violations found
- #/pointset/points: maximum size: [150], found: [672]
- #/system/physical_tag/asset/name: string [UK-LON-S2_LTGW-3] does not match pattern ^[A-Z]{2,6}-[0-9]{1,6}$
diff --git a/schemas/udmi/metadata_cloud.json b/schemas/udmi/metadata_cloud.json
deleted file mode 100644
index 5ca41285a7..0000000000
--- a/schemas/udmi/metadata_cloud.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "title": "Cloud configuration metadata snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "auth_type": {
- "enum": [
- "RS256",
- "RS256_X506"
- ]
- },
- "is_gateway": {
- "type": "boolean"
- }
- },
- "required": [
- "auth_type"
- ]
-}
diff --git a/schemas/udmi/metadata_gateway.json b/schemas/udmi/metadata_gateway.json
deleted file mode 100644
index f15dee7344..0000000000
--- a/schemas/udmi/metadata_gateway.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "title": "Gateway metadata snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "gateway_id": {
- "type": "string",
- "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$"
- },
- "subsystem": {
- "type": "string",
- "pattern": "^[a-z0-9-]+$"
- },
- "proxy_ids": {
- "type": "array",
- "items": {
- "type": "string",
- "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$"
- }
- }
- },
- "oneOf": [
- { "required": ["gateway_id"] },
- { "required": ["proxy_ids"] }
- ]
-}
diff --git a/schemas/udmi/metadata_localnet.json b/schemas/udmi/metadata_localnet.json
deleted file mode 100644
index be7de102b5..0000000000
--- a/schemas/udmi/metadata_localnet.json
+++ /dev/null
@@ -1,27 +0,0 @@
-{
- "title": "Local network metadata snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "subsystem": {
- "type": "object",
- "patternProperties": {
- "^[a-z0-9-]+$": {
- "additionalProperties": false,
- "properties": {
- "local_id": {
- "type": "string"
- }
- },
- "required": [
- "local_id"
- ]
- }
- }
- }
- },
- "required": [
- "subsystem"
- ]
-}
diff --git a/schemas/udmi/metadata_pointset.json b/schemas/udmi/metadata_pointset.json
deleted file mode 100644
index 4d6ac05dce..0000000000
--- a/schemas/udmi/metadata_pointset.json
+++ /dev/null
@@ -1,28 +0,0 @@
-{
- "title": "Pointset metadata snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "points": {
- "additionalProperties": false,
- "maxProperties": 150,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "additionalProperties": false,
- "properties": {
- "units": {
- "$ref": "file:units.json#"
- },
- "ref": {
- "type": "string"
- }
- }
- }
- }
- }
- },
- "required": [
- "points"
- ]
-}
diff --git a/schemas/udmi/metadata_system.json b/schemas/udmi/metadata_system.json
deleted file mode 100644
index f1d48da2fd..0000000000
--- a/schemas/udmi/metadata_system.json
+++ /dev/null
@@ -1,86 +0,0 @@
-{
- "title": "System metadata snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "location": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "site": {
- "type": "string",
- "pattern": "^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$"
- },
- "section": {
- "type": "string",
- "pattern": "^[A-Z0-9-]+$"
- },
- "position": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "x": {
- "type": "number"
- },
- "y": {
- "type": "number"
- }
- },
- "required": [
- "x",
- "y"
- ]
- }
- },
- "required": [
- "site"
- ]
- },
- "physical_tag": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "asset": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "guid": {
- "type": "string",
- "pattern": "^[a-z]+://[-0-9a-zA-Z_$]+$"
- },
- "site": {
- "type": "string",
- "pattern": "^[A-Z]{2}-[A-Z]{3}-[A-Z0-9]{2,9}$"
- },
- "name": {
- "type": "string",
- "pattern": "^[A-Z]{2,6}-[0-9]{1,6}$"
- }
- },
- "required": [
- "guid",
- "name"
- ]
- }
- },
- "required": [
- "asset"
- ]
- },
- "aux": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "suffix": {
- "type": "string",
- "pattern": "^[a-zA-Z0-9-]+$"
- }
- }
- }
- },
- "required": [
- "location",
- "physical_tag"
- ]
-}
diff --git a/schemas/udmi/pointset.json b/schemas/udmi/pointset.json
deleted file mode 100644
index b18d31eda8..0000000000
--- a/schemas/udmi/pointset.json
+++ /dev/null
@@ -1,47 +0,0 @@
-{
- "title": "Pointset telemetry schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "points": {
- "additionalProperties": false,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "$ref": "#/definitions/point_property_names"
- }
- }
- }
- },
- "required": [
- "timestamp",
- "version",
- "points"
- ],
- "definitions": {
- "point_property_names": {
- "type": "object",
- "propertyNames": {
- "oneOf": [
- {
- "enum": [
- "present_value"
- ]
- }
- ]
- },
- "required": [
- "present_value"
- ]
- }
- }
-}
diff --git a/schemas/udmi/pointset.tests/empty.json b/schemas/udmi/pointset.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/pointset.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/pointset.tests/empty.out b/schemas/udmi/pointset.tests/empty.out
deleted file mode 100644
index 5f645ca58a..0000000000
--- a/schemas/udmi/pointset.tests/empty.out
+++ /dev/null
@@ -1,7 +0,0 @@
-Validating 1 schemas
- Validating 1 files against pointset.json
- Against input pointset.tests/empty.json
- #: 3 schema violations found
- #: required key [points] not found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/pointset.tests/errors.json b/schemas/udmi/pointset.tests/errors.json
deleted file mode 100644
index 321ea7c4a5..0000000000
--- a/schemas/udmi/pointset.tests/errors.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "id": "sneakyCASE",
- "comment$string": "world",
- "properties": {
- "$comment": "Common error cases for target telemetry."
- },
- "points": {
- "comment$string": "world",
- "analogValue_1": {
- "present_value": true
- },
- "bad_entity_name_": {
- "present_value": 21.30108642578125
- },
- "guid": "ab9402fa-2c5a-42d1-b4f3-d40b440dea13",
- "yoyo_motion_sensor": {
- "bad_property_name": true
- },
- "bad_____sensor": {
- "present_value": true
- },
- "missing_present_value": {
- },
- "old_properties": {
- "properties": {
- "present_value": true
- }
- },
- "magic_voice_recognizer": {
- "present_value": {
- "present_value": true
- }
- }
- }
-}
diff --git a/schemas/udmi/pointset.tests/errors.out b/schemas/udmi/pointset.tests/errors.out
deleted file mode 100644
index e973a0e733..0000000000
--- a/schemas/udmi/pointset.tests/errors.out
+++ /dev/null
@@ -1,22 +0,0 @@
-Validating 1 schemas
- Validating 1 files against pointset.json
- Against input pointset.tests/errors.json
- #: 13 schema violations found
- #/points: 10 schema violations found
- #/points/guid: expected type: JSONObject, found: String
- #/points/missing_present_value: required key [present_value] not found
- #/points/old_properties: 2 schema violations found
- #/points/old_properties/properties: #: 0 subschemas matched instead of one
- #/points/old_properties/properties: properties is not a valid enum value
- #/points/old_properties: required key [present_value] not found
- #/points/yoyo_motion_sensor: 2 schema violations found
- #/points/yoyo_motion_sensor/bad_property_name: #: 0 subschemas matched instead of one
- #/points/yoyo_motion_sensor/bad_property_name: bad_property_name is not a valid enum value
- #/points/yoyo_motion_sensor: required key [present_value] not found
- #/points: extraneous key [analogValue_1] is not permitted
- #/points: extraneous key [bad_____sensor] is not permitted
- #/points: extraneous key [bad_entity_name_] is not permitted
- #/points: extraneous key [comment$string] is not permitted
- #: extraneous key [comment$string] is not permitted
- #: extraneous key [id] is not permitted
- #: extraneous key [properties] is not permitted
diff --git a/schemas/udmi/pointset.tests/example.json b/schemas/udmi/pointset.tests/example.json
deleted file mode 100644
index 7af10743e5..0000000000
--- a/schemas/udmi/pointset.tests/example.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "points": {
- "reading_value": {
- "present_value": 21.30108642578125
- },
- "nexus_sensor": {
- "present_value": 21.1
- },
- "yoyo_motion_sensor": {
- "present_value": true
- },
- "enum_value": {
- "present_value": "hello"
- }
- }
-}
diff --git a/schemas/udmi/pointset.tests/example.out b/schemas/udmi/pointset.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/pointset.tests/fcu.json b/schemas/udmi/pointset.tests/fcu.json
deleted file mode 100644
index 9b45dfbfe6..0000000000
--- a/schemas/udmi/pointset.tests/fcu.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "version": 1,
- "timestamp": "2019-01-17T14:02:29.364Z",
- "points": {
- "space_temperature_sensor": {
- "present_value": 21.30108642578125
- },
- "fan_run_status": {
- "present_value": true
- },
- "fan_run_enable": {
- "present_value": false
- },
- "chilled_water_valve_percentage_command": {
- "present_value": 76
- }
- }
-}
diff --git a/schemas/udmi/pointset.tests/fcu.out b/schemas/udmi/pointset.tests/fcu.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/pointset.tests/smartprimus.json b/schemas/udmi/pointset.tests/smartprimus.json
deleted file mode 100644
index 9b45dfbfe6..0000000000
--- a/schemas/udmi/pointset.tests/smartprimus.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "version": 1,
- "timestamp": "2019-01-17T14:02:29.364Z",
- "points": {
- "space_temperature_sensor": {
- "present_value": 21.30108642578125
- },
- "fan_run_status": {
- "present_value": true
- },
- "fan_run_enable": {
- "present_value": false
- },
- "chilled_water_valve_percentage_command": {
- "present_value": 76
- }
- }
-}
diff --git a/schemas/udmi/pointset.tests/smartprimus.out b/schemas/udmi/pointset.tests/smartprimus.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/properties.json b/schemas/udmi/properties.json
deleted file mode 100644
index 8b4375c07a..0000000000
--- a/schemas/udmi/properties.json
+++ /dev/null
@@ -1,29 +0,0 @@
-{
- "title": "Device Properties Schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "key_type",
- "version",
- "connect"
- ],
- "properties": {
- "key_type": {
- "enum": [
- "RSA_PEM",
- "RSA_X509_PEM"
- ]
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "connect": {
- "enum": [
- "direct"
- ]
- }
- }
-}
diff --git a/schemas/udmi/state.json b/schemas/udmi/state.json
deleted file mode 100644
index 117881e3cb..0000000000
--- a/schemas/udmi/state.json
+++ /dev/null
@@ -1,31 +0,0 @@
-{
- "title": "Device State schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "timestamp",
- "version",
- "system"
- ],
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "system": {
- "$ref": "file:state_system.json#"
- },
- "gateway": {
- "$ref": "file:state_gateway.json#"
- },
- "pointset": {
- "$ref": "file:state_pointset.json#"
- }
- }
-}
diff --git a/schemas/udmi/state.tests/empty.json b/schemas/udmi/state.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/state.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/state.tests/empty.out b/schemas/udmi/state.tests/empty.out
deleted file mode 100644
index 09d7bf88cf..0000000000
--- a/schemas/udmi/state.tests/empty.out
+++ /dev/null
@@ -1,7 +0,0 @@
-Validating 1 schemas
- Validating 1 files against state.json
- Against input state.tests/empty.json
- #: 3 schema violations found
- #: required key [system] not found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/state.tests/errors.json b/schemas/udmi/state.tests/errors.json
deleted file mode 100644
index 5977f68e90..0000000000
--- a/schemas/udmi/state.tests/errors.json
+++ /dev/null
@@ -1,43 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "id": "monkey_brains",
- "system": {
- "device_id": "33895507",
- "device_status": "ok",
- "object_name": "UK-BRH-XX_AHU-001",
- "fling": "hello",
- "firmware": {
- "revision": "should be version"
- },
- "system_status": "Operational",
- "statuses": {
- "default": {
- "timestamp": "2018-08-26T21:39:30.364Z",
- "level": 30
- }
- },
- "status": "hunky-dory"
- },
- "status": [
- {
- "level": 30
- }
- ],
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "object_type": "analog_input",
- "instance_number": 4,
- "cov_increment": 0.300000011920929,
- "deadband": 0,
- "rapt": "hello",
- "high_limit": 0,
- "low_limit": 0,
- "resolution": 0.04952822998166084,
- "units": "Degrees Celsius",
- "status": "it's working!"
- }
- }
- }
-}
diff --git a/schemas/udmi/state.tests/errors.out b/schemas/udmi/state.tests/errors.out
deleted file mode 100644
index f58e247fc5..0000000000
--- a/schemas/udmi/state.tests/errors.out
+++ /dev/null
@@ -1,32 +0,0 @@
-Validating 1 schemas
- Validating 1 files against state.json
- Against input state.tests/errors.json
- #: 24 schema violations found
- #/pointset/points/return_air_temperature_sensor: 9 schema violations found
- #/pointset/points/return_air_temperature_sensor/status: expected type: JSONObject, found: String
- #/pointset/points/return_air_temperature_sensor: extraneous key [cov_increment] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [deadband] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [high_limit] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [instance_number] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [low_limit] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [object_type] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [rapt] is not permitted
- #/pointset/points/return_air_temperature_sensor: extraneous key [resolution] is not permitted
- #/system: 13 schema violations found
- #/system/firmware: 2 schema violations found
- #/system/firmware: extraneous key [revision] is not permitted
- #/system/firmware: required key [version] not found
- #/system/statuses/default: 3 schema violations found
- #/system/statuses/default/level: 30 is not greater or equal to 100
- #/system/statuses/default: required key [category] not found
- #/system/statuses/default: required key [message] not found
- #/system: extraneous key [device_id] is not permitted
- #/system: extraneous key [device_status] is not permitted
- #/system: extraneous key [fling] is not permitted
- #/system: extraneous key [object_name] is not permitted
- #/system: extraneous key [status] is not permitted
- #/system: extraneous key [system_status] is not permitted
- #/system: required key [make_model] not found
- #/system: required key [operational] not found
- #: extraneous key [id] is not permitted
- #: extraneous key [status] is not permitted
diff --git a/schemas/udmi/state.tests/example.json b/schemas/udmi/state.tests/example.json
deleted file mode 100644
index 92044b86a6..0000000000
--- a/schemas/udmi/state.tests/example.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "make_model": "ACME Bird Trap",
- "firmware": {
- "version": "3.2a"
- },
- "last_config": "2018-08-26T21:49:29.364Z",
- "operational": true,
- "statuses": {
- "base_system": {
- "message": "Tickity Boo",
- "category": "device.state.com",
- "timestamp": "2018-08-26T21:39:30.364Z",
- "level": 600
- }
- }
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "units": "Celsius",
- "status": {
- "message": "Invalid sample time",
- "category": "device.config.validate",
- "timestamp": "2018-08-26T21:39:28.364Z",
- "level": 800
- }
- },
- "nexus_sensor": {
- "units": "Celsius",
- "source": "fix"
- }
- }
- }
-}
diff --git a/schemas/udmi/state.tests/example.out b/schemas/udmi/state.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/state.tests/fcu.json b/schemas/udmi/state.tests/fcu.json
deleted file mode 100644
index b2a55220e8..0000000000
--- a/schemas/udmi/state.tests/fcu.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "version": 1,
- "timestamp": "2019-01-17T14:02:29.364Z",
- "system": {
- "make_model": "EasyIO FW-14",
- "firmware": {
- "version": "3.2a"
- },
- "last_config": "2019-01-14T21:49:29.364Z",
- "operational": true,
- "statuses": {
- "base_system": {
- "message": "Time on the device is not synchronized",
- "category": "com.acme.sync",
- "timestamp": "2019-01-17T13:29:47.364Z",
- "level": 600
- }
- }
- },
- "pointset": {
- "points": {
- "space_temperature_sensor": {
- "units": "Degrees-Celsius",
- "status": {
- "message": "Present value out of limits",
- "category": "com.acme.device.regulator",
- "timestamp": "2019-01-17T11:39:28.364Z",
- "level": 400
- }
- },
- "fan_run_status": {
- "status": {
- "message": "Value overridden by fix_value",
- "category": "com.acme.device.monitor",
- "timestamp": "2019-01-17T10:59:11.364Z",
- "level": 300
- }
- },
- "fan_run_enable": {
- "status": {
- "message": "Value overridden by fix_value",
- "category": "com.acme.device.manager",
- "timestamp": "2019-01-17T13:14:55.364Z",
- "level": 300
- }
- },
- "chilled_water_valve_percentage_command": {
- "units": "Percent"
- }
- }
- }
-}
diff --git a/schemas/udmi/state.tests/fcu.out b/schemas/udmi/state.tests/fcu.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/state.tests/gateway.json b/schemas/udmi/state.tests/gateway.json
deleted file mode 100644
index 72bbdbc4bb..0000000000
--- a/schemas/udmi/state.tests/gateway.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "make_model": "ACME Gateway v2",
- "firmware": {
- "version": "3.2a"
- },
- "last_config": "2018-08-26T21:49:29.364Z",
- "operational": true,
- "statuses": {
- "base_system": {
- "message": "Tickity Boo",
- "category": "device.state.com",
- "timestamp": "2018-08-26T21:39:30.364Z",
- "level": 600
- }
- }
- },
- "gateway": {
- "error_ids": [ "991", "SMS-91" ]
- }
-}
diff --git a/schemas/udmi/state.tests/gateway.out b/schemas/udmi/state.tests/gateway.out
deleted file mode 100644
index ae5507e2c2..0000000000
--- a/schemas/udmi/state.tests/gateway.out
+++ /dev/null
@@ -1,4 +0,0 @@
-Validating 1 schemas
- Validating 1 files against state.json
- Against input state.tests/gateway.json
- #/gateway/error_ids/0: string [991] does not match pattern ^[A-Z]{3}-[1-9][0-9]{0,2}$
diff --git a/schemas/udmi/state.tests/rotate.json b/schemas/udmi/state.tests/rotate.json
deleted file mode 100644
index 9c77e543ea..0000000000
--- a/schemas/udmi/state.tests/rotate.json
+++ /dev/null
@@ -1,37 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "system": {
- "make_model": "ACME Bird Trap",
- "firmware": {
- "version": "3.2a"
- },
- "auth_key": {
- "private_hash": "sha512:4e61746f21abe6708ca81a45a1851b82efd1f3ad7f9e6f6fc2dcf431e0ff95cdbcc6f5940a4bfb77df7aeb2f057d19cf5f234a664775edc66175025a14a87c3b"
- },
- "last_config": "2018-08-26T21:49:29.364Z",
- "operational": true,
- "statuses": {
- "base_system": {
- "message": "Tickity Boo",
- "category": "device.state.com",
- "timestamp": "2018-08-26T21:39:30.364Z",
- "level": 600
- }
- }
- },
- "pointset": {
- "points": {
- "return_air_temperature_sensor": {
- "units": "Celsius",
- "status": {
- "message": "Invalid sample time",
- "category": "device.config.validate",
- "timestamp": "2018-08-26T21:39:28.364Z",
- "level": 800
- }
- }
- }
- }
-}
-
diff --git a/schemas/udmi/state.tests/rotate.out b/schemas/udmi/state.tests/rotate.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/state_gateway.json b/schemas/udmi/state_gateway.json
deleted file mode 100644
index 45dfd6f35c..0000000000
--- a/schemas/udmi/state_gateway.json
+++ /dev/null
@@ -1,18 +0,0 @@
-{
- "title": "Gateway Config Snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "required": [
- "error_ids"
- ],
- "properties": {
- "error_ids": {
- "type": "array",
- "items": {
- "type": "string",
- "pattern": "^[A-Z]{3}-[1-9][0-9]{0,2}$"
- }
- }
- }
-}
diff --git a/schemas/udmi/state_pointset.json b/schemas/udmi/state_pointset.json
deleted file mode 100644
index b6d44061bd..0000000000
--- a/schemas/udmi/state_pointset.json
+++ /dev/null
@@ -1,35 +0,0 @@
-{
- "title": "pointset state snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "points": {
- "additionalProperties": false,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "additionalProperties": false,
- "properties": {
- "fault": {
- "type": "boolean"
- },
- "units": {
- "type": "string",
- },
- "source": {
- "enum": [
- "fix"
- ]
- },
- "status": {
- "$ref": "file:system.json#/definitions/entry"
- }
- }
- }
- }
- }
- },
- "required": [
- "points"
- ]
-}
diff --git a/schemas/udmi/state_system.json b/schemas/udmi/state_system.json
deleted file mode 100644
index 5fb326ac93..0000000000
--- a/schemas/udmi/state_system.json
+++ /dev/null
@@ -1,56 +0,0 @@
-{
- "title": "System state snippet",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "make_model": {
- "type": "string"
- },
- "auth_key": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "private_hash": {
- "type": "string"
- }
- },
- "required": [
- "private_hash"
- ]
- },
- "firmware": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "version": {
- "type": "string"
- }
- },
- "required": [
- "version"
- ]
- },
- "last_config": {
- "type": "string",
- "format": "date-time"
- },
- "operational": {
- "type": "boolean"
- },
- "statuses": {
- "type": "object",
- "additionalProperties": false,
- "patternProperties": {
- "^[a-z][a-z0-9]*(_[a-z0-9]+)*$": {
- "$ref": "file:system.json#/definitions/entry"
- }
- }
- }
- },
- "required": [
- "make_model",
- "firmware",
- "operational"
- ]
-}
diff --git a/schemas/udmi/system.json b/schemas/udmi/system.json
deleted file mode 100644
index ca3d33d2d4..0000000000
--- a/schemas/udmi/system.json
+++ /dev/null
@@ -1,62 +0,0 @@
-{
- "title": "Log entry schema",
- "type": "object",
- "$schema": "http://json-schema.org/draft-07/schema#",
- "additionalProperties": false,
- "properties": {
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "version": {
- "enum": [
- 1
- ]
- },
- "logentries": {
- "type": "array",
- "items": {
- "$ref": "#/definitions/entry"
- }
- }
- },
- "required": [
- "timestamp",
- "version"
- ],
- "definitions": {
- "entry": {
- "type": "object",
- "additionalProperties": false,
- "properties": {
- "message": {
- "type": "string"
- },
- "detail": {
- "type": "string"
- },
- "category": {
- "type": "string",
- "pattern": "^[a-z][.a-zA-Z]*[a-zA-Z]$"
- },
- "timestamp": {
- "type": "string",
- "format": "date-time"
- },
- "level": {
- "$comment": "https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#logseverity",
- "type": "integer",
- "multipleOf": 1,
- "minimum": 100,
- "maximum": 800
- }
- },
- "required": [
- "message",
- "category",
- "timestamp",
- "level"
- ]
- }
- }
-}
diff --git a/schemas/udmi/system.tests/empty.json b/schemas/udmi/system.tests/empty.json
deleted file mode 100644
index 2c63c08510..0000000000
--- a/schemas/udmi/system.tests/empty.json
+++ /dev/null
@@ -1,2 +0,0 @@
-{
-}
diff --git a/schemas/udmi/system.tests/empty.out b/schemas/udmi/system.tests/empty.out
deleted file mode 100644
index ad7bdffef5..0000000000
--- a/schemas/udmi/system.tests/empty.out
+++ /dev/null
@@ -1,6 +0,0 @@
-Validating 1 schemas
- Validating 1 files against system.json
- Against input system.tests/empty.json
- #: 2 schema violations found
- #: required key [timestamp] not found
- #: required key [version] not found
diff --git a/schemas/udmi/system.tests/errors.json b/schemas/udmi/system.tests/errors.json
deleted file mode 100644
index ddf4c70815..0000000000
--- a/schemas/udmi/system.tests/errors.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "logentries": [
- {
- "detail": "someplace, sometime",
- "category": "com.testCategory$",
- "level": 60
- },
- "nope"
- ]
-}
diff --git a/schemas/udmi/system.tests/errors.out b/schemas/udmi/system.tests/errors.out
deleted file mode 100644
index 3479a7a113..0000000000
--- a/schemas/udmi/system.tests/errors.out
+++ /dev/null
@@ -1,10 +0,0 @@
-Validating 1 schemas
- Validating 1 files against system.json
- Against input system.tests/errors.json
- #/logentries: 5 schema violations found
- #/logentries/0: 4 schema violations found
- #/logentries/0/category: string [com.testCategory$] does not match pattern ^[a-z][.a-zA-Z]*[a-zA-Z]$
- #/logentries/0/level: 60 is not greater or equal to 100
- #/logentries/0: required key [message] not found
- #/logentries/0: required key [timestamp] not found
- #/logentries/1: expected type: JSONObject, found: String
diff --git a/schemas/udmi/system.tests/example.json b/schemas/udmi/system.tests/example.json
deleted file mode 100644
index 139253f721..0000000000
--- a/schemas/udmi/system.tests/example.json
+++ /dev/null
@@ -1,20 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "logentries": [
- {
- "message": "things are happening",
- "detail": "someplace, sometime",
- "timestamp": "2018-08-26T21:39:19.364Z",
- "category": "com.testCategory",
- "level": 600
- },
- {
- "message": "something else happened",
- "timestamp": "2018-08-26T21:39:39.364Z",
- "detail": "someplace, sometime",
- "category": "com.testCategory",
- "level": 700
- }
- ]
-}
diff --git a/schemas/udmi/system.tests/example.out b/schemas/udmi/system.tests/example.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/system.tests/fcu.json b/schemas/udmi/system.tests/fcu.json
deleted file mode 100644
index ed4a8c7f1b..0000000000
--- a/schemas/udmi/system.tests/fcu.json
+++ /dev/null
@@ -1,19 +0,0 @@
-{
- "version": 1,
- "timestamp": "2018-08-26T21:39:29.364Z",
- "logentries": [
- {
- "message": "System Booted",
- "timestamp": "2018-08-26T20:39:19.364Z",
- "category": "com.acme.system",
- "level": 300
- },
- {
- "message": "Device communication failed",
- "detail": "Connection attempt to device 3564 failed",
- "timestamp": "2018-08-26T21:39:19.364Z",
- "category": "com.acme.comms",
- "level": 700
- }
- ]
-}
diff --git a/schemas/udmi/system.tests/fcu.out b/schemas/udmi/system.tests/fcu.out
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/schemas/udmi/units.json b/schemas/udmi/units.json
deleted file mode 100644
index 3e375667d1..0000000000
--- a/schemas/udmi/units.json
+++ /dev/null
@@ -1,194 +0,0 @@
-{
- "description": "Taken from standard BACnet engineering units",
- "enum": [
- "Square-meters",
- "Square-feet",
- "Milliamperes",
- "Amperes",
- "Ohms",
- "Volts",
- "Kilo-volts",
- "Mega-volts",
- "Volt-amperes",
- "Kilo-volt-amperes",
- "Mega-volt-amperes",
- "Volt-amperes-reactive",
- "Kilo-volt-amperes-reactive",
- "Mega-volt-amperes-reactive",
- "Degrees-phase",
- "Power-factor",
- "Joules",
- "Kilojoules",
- "Watt-hours",
- "Kilowatt-hours",
- "BTUs",
- "Therms",
- "Ton-hours",
- "Joules-per-kilogram-dry-air",
- "BTUs-per-pound-dry-air",
- "Cycles-per-hour",
- "Cycles-per-minute",
- "Hertz",
- "Grams-of-water-per-kilogram-dry-air",
- "Percent-relative-humidity",
- "Millimeters",
- "Meters",
- "Inches",
- "Feet",
- "Watts-per-square-foot",
- "Watts-per-square-meter",
- "Lumens",
- "Luxes",
- "Foot-candles",
- "Kilograms",
- "Pounds-mass",
- "Tons",
- "Kilograms-per-second",
- "Kilograms-per-minute",
- "Kilograms-per-hour",
- "Pounds-mass-per-minute",
- "Pounds-mass-per-hour",
- "Watts",
- "Kilowatts",
- "Megawatts",
- "BTUs-per-hour",
- "Horsepower",
- "Tons-refrigeration",
- "Pascals",
- "Kilopascals",
- "Bars",
- "Pounds-force-per-square-inch",
- "Centimeters-of-water",
- "Inches-of-water",
- "Millimeters-of-mercury",
- "Centimeters-of-mercury",
- "Inches-of-mercury",
- "Degrees-Celsius",
- "Degrees-Kelvin",
- "Degrees-Fahrenheit",
- "Degree-days-Celsius",
- "Degree-days-Fahrenheit",
- "Years",
- "Months",
- "Weeks",
- "Days",
- "Hours",
- "Minutes",
- "Seconds",
- "Meters-per-second",
- "Kilometers-per-hour",
- "Feet-per-second",
- "Feet-per-minute",
- "Miles-per-hour",
- "Cubic-feet",
- "Cubic-meters",
- "Imperial-gallons",
- "Liters",
- "Us-gallons",
- "Cubic-feet-per-minute",
- "Cubic-meters-per-second",
- "Imperial-gallons-per-minute",
- "Liters-per-second",
- "Liters-per-minute",
- "Us-gallons-per-minute",
- "Degrees-angular",
- "Degrees-Celsius-per-hour",
- "Degrees-Celsius-per-minute",
- "Degrees-Fahrenheit-per-hour",
- "Degrees-Fahrenheit-per-minute",
- "No-units",
- "Parts-per-million",
- "Parts-per-billion",
- "Percent",
- "Percent-per-second",
- "Per-minute",
- "Per-second",
- "Psi-per-Degree-Fahrenheit",
- "Radians",
- "Revolutions-per-minute",
- "Currency1",
- "Currency2",
- "Currency3",
- "Currency4",
- "Currency5",
- "Currency6",
- "Currency7",
- "Currency8",
- "Currency9",
- "Currency10",
- "Square-inches",
- "Square-centimeters",
- "BTUs-per-pound",
- "Centimeters",
- "Pounds-mass-per-second",
- "Delta-Degrees-Fahrenheit",
- "Delta-Degrees-Kelvin",
- "Kilohms",
- "Megohms",
- "Millivolts",
- "Kilojoules-per-kilogram",
- "Megajoules",
- "Joules-per-degree-Kelvin",
- "Joules-per-kilogram-degree-Kelvin",
- "Kilohertz",
- "Megahertz",
- "Per-hour",
- "Milliwatts",
- "Hectopascals",
- "Millibars",
- "Cubic-meters-per-hour",
- "Liters-per-hour",
- "Kilowatt-hours-per-square-meter",
- "Kilowatt-hours-per-square-foot",
- "Megajoules-per-square-meter",
- "Megajoules-per-square-foot",
- "Watts-per-square-meter-Degree-Kelvin",
- "Cubic-feet-per-second",
- "Percent-obscuration-per-foot",
- "Percent-obscuration-per-meter",
- "Milliohms",
- "Megawatt-hours",
- "Kilo-BTUs",
- "Mega-BTUs",
- "Kilojoules-per-kilogram-dry-air",
- "Megajoules-per-kilogram-dry-air",
- "Kilojoules-per-degree-Kelvin",
- "Megajoules-per-degree-Kelvin",
- "Newton",
- "Grams-per-second",
- "Grams-per-minute",
- "Tons-per-hour",
- "Kilo-BTUs-per-hour",
- "Hundredths-seconds",
- "Milliseconds",
- "Newton-meters",
- "Millimeters-per-second",
- "Millimeters-per-minute",
- "Meters-per-minute",
- "Meters-per-hour",
- "Cubic-meters-per-minute",
- "Meters-per-second-per-second",
- "Amperes-per-meter",
- "Amperes-per-square-meter",
- "Ampere-square-meters",
- "Farads",
- "Henrys",
- "Ohm-meters",
- "Siemens",
- "Siemens-per-meter",
- "Teslas",
- "Volts-per-degree-Kelvin",
- "Volts-per-meter",
- "Webers",
- "Candelas",
- "Candelas-per-square-meter",
- "Kelvins-per-hour",
- "Kelvins-per-minute",
- "Joule-seconds",
- "Square-meters-per-Newton",
- "Kilogram-per-cubic-meter",
- "Newton-seconds",
- "Newtons-per-meter",
- "Watts-per-meter-per-degree-Kelvin"
- ]
-}
diff --git a/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties b/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties
index 622ab64a3c..12d38de6a4 100644
--- a/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties
+++ b/subset/bacnet/bacnetTests/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
diff --git a/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java b/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java
index 20ac76297b..35ef1bc27c 100644
--- a/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java
+++ b/subset/bacnet/bacnetTests/src/main/java/helper/FileManager.java
@@ -4,61 +4,71 @@
public class FileManager {
- private String filePath = "";
- private String csvName = "pics";
- private String csvExtension = ".csv";
- private boolean debug = false;
+ private String filePath = "";
+ private String csvName = "pics";
+ private String csvExtension = ".csv";
+ private boolean debug = false;
- public boolean checkDevicePicCSV() {
- String csvFolder = getCSVPath();
- try{
- File[] listFiles = new File(csvFolder).listFiles();
- for (int i = 0; i < listFiles.length; i++) {
- if (listFiles[i].isFile()) {
- String fileName = listFiles[i].getName();
- if (fileName.contains(csvName)
- && fileName.endsWith(csvExtension)) {
- System.out.println("pics.csv file found in " + csvFolder);
- setFilePath(fileName);
- return true;
- }
- }
- }
- String errorMessage = "pics.csv not found.\n";
- System.err.println(errorMessage);
- } catch(Exception e) {
- System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder);
+ /**
+ * Checks if pics.csv exists.
+ * @return if pics.csv exists
+ */
+ public boolean checkDevicePicCSV() {
+ String csvFolder = getCSVPath();
+ try {
+ File[] listFiles = new File(csvFolder).listFiles();
+ for (int i = 0; i < listFiles.length; i++) {
+ if (listFiles[i].isFile()) {
+ String fileName = listFiles[i].getName();
+ if (fileName.contains(csvName)
+ && fileName.endsWith(csvExtension)) {
+ System.out.println("pics.csv file found in " + csvFolder);
+ setFilePath(fileName);
+ return true;
+ }
}
- return false;
+ }
+ String errorMessage = "pics.csv not found.\n";
+ System.err.println(errorMessage);
+ } catch (Exception e) {
+ System.out.println("Error in reading " + csvName + csvExtension + " in " + csvFolder);
}
+ return false;
+ }
- private void setFilePath(String fileName) {
- String absolute_path = getCSVPath();
- this.filePath = absolute_path + "/" + fileName;
- }
+ private void setFilePath(String fileName) {
+ String absolutePath = getCSVPath();
+ this.filePath = absolutePath + "/" + fileName;
+ }
- public String getFilePath() {
- return this.filePath;
- }
+ public String getFilePath() {
+ return this.filePath;
+ }
- public String getAbsolutePath() {
- String absolute_path = "";
- String system_path = System.getProperty("user.dir");
- System.out.println("system_path: " + system_path);
- String[] path_arr = system_path.split("/");
- for (int count = 0; count < path_arr.length; count++) {
- if (path_arr[count].equals("bacnetTests")) {
- break;
- }
- absolute_path += path_arr[count] + "/";
- }
- return absolute_path;
+ /**
+ * Returns absolute path to the working directory.
+ */
+ public String getAbsolutePath() {
+ String absolutePath = "";
+ String systemPath = System.getProperty("user.dir");
+ System.out.println("system_path: " + systemPath);
+ String[] pathArr = systemPath.split("/");
+ for (int count = 0; count < pathArr.length; count++) {
+ if (pathArr[count].equals("bacnetTests")) {
+ break;
+ }
+ absolutePath += pathArr[count] + "/";
}
+ return absolutePath;
+ }
- public String getCSVPath() {
- if (debug) {
- return "src/main/resources";
- }
- return "/config/type";
+ /**
+ * Returns directory pics.csv is located within test container
+ */
+ public String getCSVPath() {
+ if (debug) {
+ return "src/main/resources";
}
+ return "/config/type";
+ }
}
diff --git a/subset/bacnet/bacnetTests/src/main/resources/pics.csv b/subset/bacnet/bacnetTests/src/main/resources/pics.csv
index bedf7117dd..aab3985dec 100644
--- a/subset/bacnet/bacnetTests/src/main/resources/pics.csv
+++ b/subset/bacnet/bacnetTests/src/main/resources/pics.csv
@@ -1,486 +1,486 @@
-Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported,
-Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
-Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE,
-Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE,
-Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE,
- ,Description,CharacterString,O,TRUE,
-Bacnet_Analogue_Input,Device_Type,,O,TRUE,
-Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
-Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE,
-Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE,
-Bacnet_Analogue_Input,Update_Interval,,O,TRUE,
-Bacnet_Analogue_Input,Units,,R,TRUE,
-Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE,
-Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE,
-Bacnet_Analogue_Input,Resolution,,O,TRUE,
-Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE,
-Bacnet_Analogue_Input,COV_Period,,O,TRUE,
-Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Analogue_Input,Time_Delay,,O,TRUE,
-Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Analogue_Input,High_Limit,,O,TRUE,
-Bacnet_Analogue_Input,Low_Limit,,O,TRUE,
-Bacnet_Analogue_Input,Deadband,,O,TRUE,
-Bacnet_Analogue_Input,Limit_Enable,,O,TRUE,
-Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE,
-Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
-Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
-Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE,
-Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE,
-Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE,
-Bacnet_Analogue_Output,Description,CharacterString,O,TRUE,
-Bacnet_Analogue_Output,Device_Type,,O,TRUE,
-Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE,
-Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE,
-Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE,
-Bacnet_Analogue_Output,Units,,R,TRUE,
-Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE,
-Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE,
-Bacnet_Analogue_Output,Resolution,,O,TRUE,
-Bacnet_Analogue_Output,Priority_Array,,R,TRUE,
-Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE,
-Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE,
-Bacnet_Analogue_Output,COV_Period,,O,TRUE,
-Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Analogue_Output,Time_Delay,,O,TRUE,
-Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Analogue_Output,High_Limit,,O,TRUE,
-Bacnet_Analogue_Output,Low_Limit,,O,TRUE,
-Bacnet_Analogue_Output,Deadband,,O,TRUE,
-Bacnet_Analogue_Output,Limit_Enable,,O,TRUE,
-Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE,
-Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
-Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE,
-Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE,
-Bacnet_Analogue_Value,Description,CharacterString,O,TRUE,
-Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Analogue_Value,Units,,O,TRUE,
-Bacnet_Analogue_Value,Priority_Array,,O,TRUE,
-Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE,
-Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE,
-Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE,
-Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE,
-Bacnet_Analogue_Value,COV_Period,,O,TRUE,
-Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Analogue_Value,Time_Delay,,O,TRUE,
-Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Analogue_Value,High_Limit,,O,TRUE,
-Bacnet_Analogue_Value,Low_Limit,,O,TRUE,
-Bacnet_Analogue_Value,Deadband,,O,TRUE,
-Bacnet_Analogue_Value,Limit_Enable,,O,TRUE,
-Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE,
-Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
-Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
-Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE,
-Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE,
-Bacnet_Binary_Input,Present_Value,REAL,R,TRUE,
-Bacnet_Binary_Input,Description,CharacterString,O,TRUE,
-Bacnet_Binary_Input,Device_Type,,O,TRUE,
-Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
-Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE,
-Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE,
-Bacnet_Binary_Input,Polarity,,R,TRUE,
-Bacnet_Binary_Input,Inactive_Text,,O,TRUE,
-Bacnet_Binary_Input,Active_Text,,O,TRUE,
-Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE,
-Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE,
-Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE,
-Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE,
-Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE,
-Bacnet_Binary_Input,COV_Period,,O,TRUE,
-Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Binary_Input,Time_Delay,,O,TRUE,
-Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Binary_Input,Alarm_Value,,O,TRUE,
-Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE,
-Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
-Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE,
-Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Binary_Output,Present_Value,REAL,O,TRUE,
-Bacnet_Binary_Output,Description,CharacterString,O,TRUE,
-Bacnet_Binary_Output,Device_Type,,O,TRUE,
-Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Binary_Output,Polarity,,O,TRUE,
-Bacnet_Binary_Output,Inactive_Text,,O,TRUE,
-Bacnet_Binary_Output,Active_Text,,O,TRUE,
-Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE,
-Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE,
-Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE,
-Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE,
-Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE,
-Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE,
-Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE,
-Bacnet_Binary_Output,Priority_Array,,O,TRUE,
-Bacnet_Binary_Output,Relinquish_Default,,O,TRUE,
-Bacnet_Binary_Output,COV_Period,,O,TRUE,
-Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Binary_Output,Time_Delay,,O,TRUE,
-Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Binary_Output,Feedback_Value,,O,TRUE,
-Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE,
-Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE,
-Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Binary_Value,Present_Value,REAL,O,TRUE,
-Bacnet_Binary_Value,Description,CharacterString,O,TRUE,
-Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Binary_Value,Inactive_Text,,O,TRUE,
-Bacnet_Binary_Value,Active_Text,,O,TRUE,
-Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE,
-Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE,
-Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE,
-Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE,
-Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE,
-Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE,
-Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE,
-Bacnet_Binary_Value,Priority_Array,,O,TRUE,
-Bacnet_Binary_Value,Relinquish_Default,,O,TRUE,
-Bacnet_Binary_Value,COV_Period,,O,TRUE,
-Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Binary_Value,Time_Delay,,O,TRUE,
-Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Binary_Value,Alarm_Value,,O,TRUE,
-Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE,
-Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Calendar,Object_Name,CharacterString,O,TRUE,
-Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Calendar,Present_Value,REAL,O,TRUE,
-Bacnet_Calendar,Description,CharacterString,O,TRUE,
-Bacnet_Calendar,Date_List,,O,TRUE,
-Bacnet_Calendar,Time_To_Next_State,,O,TRUE,
-Bacnet_Calendar,Next_State,,O,TRUE,
-Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE,
-Device,Object_Name,CharacterString,W,TRUE,
-Device,Object_Type,BACnetObjectType,R,TRUE,
-Device,System_Status,,R,TRUE,
-Device,Vendor_Name,,R,TRUE,
-Device,Vendor_Identifier,,R,TRUE,
-Device,Model_Name,,R,TRUE,
-Device,Firmware_Revision,,R,TRUE,
-Device,Application_Software_Version,,R,TRUE,
-Device,Location,,O,TRUE,
-Device,Description,CharacterString,O,TRUE,
-Device,Protocol_Version,,R,TRUE,
-Device,Protocol_Revision,,R,TRUE,
-Device,Protocol_Services_Supported,,R,TRUE,
-Device,Protocol_Object_Types_Supported,,R,TRUE,
-Device,Object_List,,R,TRUE,
-Device,Max_APDU_Length_Accepted,,R,TRUE,
-Device,Segmentation_Supported,,R,TRUE,
-Device,Max_Segments_Accepted,,O,TRUE,
-Device,Local_Date,,O,TRUE,
-Device,Local_Time,,O,TRUE,
-Device,UTC_Offset,,O,TRUE,
-Device,Daylight_Savings_Status,,O,TRUE,
-Device,Apdu_Segment_Timeout,,O,TRUE,
-Device,APDU_Timeout,,W,TRUE,
-Device,Number_Of_APDU_Retries,,W,TRUE,
-Device,Time_Synchronization_Recipients,,O,TRUE,
-Device,Device_Address_Binding,,R,TRUE,
-Device,Database_Revision,,R,TRUE,
-Device,Configuration_Files,,O,TRUE,
-Device,Last_Restore_Time,,O,TRUE,
-Device,Backup_Failure_Timeout,,O,TRUE,
-Device,Backup_Preparation_Time,,O,TRUE,
-Device,Restore_Preparation_Time,,O,TRUE,
-Device,Restore_Completion_Time,,O,TRUE,
-Device,Backup_And_Restore_State,,O,TRUE,
-Device,Active_COV_Subscriptions,,O,TRUE,
-Device,Last_Restart_Reason,,O,TRUE,
-Device,Time_Of_Device_Restart,,O,TRUE,
-Device,Restart_Notification_Recipients,,O,TRUE,
-Device,Utc_Time_Synchronization_Recipients,,O,TRUE,
-Device,Max_Master,,O,TRUE,
-Device,Max_Info_Frames,,O,TRUE,
-Device,Time_Synchronization_Interval,,O,TRUE,
-Device,Align_Intervals,,O,TRUE,
-Device,Interval_Offset,,O,TRUE,
-Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
-Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Event_Enrollment,Object_Name,CharacterString,O,TRUE,
-Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE,
-Event_Enrollment,Description,CharacterString,O,TRUE,
-Event_Enrollment,Event_Type,,O,TRUE,
-Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE,
-Event_Enrollment,Event_Parameters,,O,TRUE,
-Event_Enrollment,Object_Property_Reference,,O,TRUE,
-Event_Enrollment,Event_State,BACnetEventState,O,TRUE,
-Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Event_Enrollment,Notification_Class,Unsigned,O,TRUE,
-Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Event_Enrollment,Time_Delay_Normal,,O,TRUE,
-Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE,
-Event_Enrollment,Reliability,BACnetReliability,O,TRUE,
-Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_File,Object_Name,CharacterString,O,TRUE,
-Bacnet_File,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_File,Description,CharacterString,O,TRUE,
-Bacnet_File,File_Type,,O,TRUE,
-Bacnet_File,File_Size,,O,TRUE,
-Bacnet_File,Modification_Date,,O,TRUE,
-Bacnet_File,Archive,,O,TRUE,
-Bacnet_File,Read_Only,,O,TRUE,
-Bacnet_File,File_Access_Method,,O,TRUE,
-Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Loop,Object_Name,CharacterString,O,TRUE,
-Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Loop,Present_Value,REAL,O,TRUE,
-Bacnet_Loop,Description,CharacterString,O,TRUE,
-Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Loop,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Loop,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Loop,Update_Interval,,O,TRUE,
-Bacnet_Loop,Output_Units,,O,TRUE,
-Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE,
-Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE,
-Bacnet_Loop,Controlled_Variable_Value,,O,TRUE,
-Bacnet_Loop,Controlled_Variable_Units,,O,TRUE,
-Bacnet_Loop,Setpoint_Reference,,O,TRUE,
-Bacnet_Loop,Setpoint,,O,TRUE,
-Bacnet_Loop,Action,,O,TRUE,
-Bacnet_Loop,Proportional_Constant,,O,TRUE,
-Bacnet_Loop,Proportional_Constant_Units,,O,TRUE,
-Bacnet_Loop,Integral_Constant,,O,TRUE,
-Bacnet_Loop,Integral_Constant_Units,,O,TRUE,
-Bacnet_Loop,Derivative_Constant,,O,TRUE,
-Bacnet_Loop,Derivative_Constant_Units,,O,TRUE,
-Bacnet_Loop,Bias,,O,TRUE,
-Bacnet_Loop,Maximum_Output,,O,TRUE,
-Bacnet_Loop,Minimum_Output,,O,TRUE,
-Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE,
-Bacnet_Loop,LoopDeadband,,O,TRUE,
-Bacnet_Loop,Saturation_Time,,O,TRUE,
-Bacnet_Loop,COV_Increment,REAL,O,TRUE,
-Bacnet_Loop,COV_Period,,O,TRUE,
-Bacnet_Loop,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Loop,Ramp_Time,,O,TRUE,
-Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE,
-Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE,
-Bacnet_Loop,Time_Delay,,O,TRUE,
-Bacnet_Loop,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Loop,Error_Limit,,O,TRUE,
-Bacnet_Loop,Deadband,,O,TRUE,
-Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Loop,Time_Delay_Normal,,O,TRUE,
-Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE,
-Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE,
-Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE,
-Bacnet_Multi-state_Input,Device_Type,,O,TRUE,
-Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Multi-state_Input,Number_of_States,,O,TRUE,
-Bacnet_Multi-state_Input,State_Text,,O,TRUE,
-Bacnet_Multi-state_Input,COV_Period,,O,TRUE,
-Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Multi-state_Input,Time_Delay,,O,TRUE,
-Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE,
-Bacnet_Multi-state_Input,Fault_Values,,O,TRUE,
-Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE,
-Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE,
-Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE,
-Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE,
-Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Multi-state_Value,Number_of_States,,O,TRUE,
-Bacnet_Multi-state_Value,State_Text,,O,TRUE,
-Bacnet_Multi-state_Value,Priority_Array,,O,TRUE,
-Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE,
-Bacnet_Multi-state_Value,COV_Period,,O,TRUE,
-Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE,
-Bacnet_Multi-state_Value,Time_Delay,,O,TRUE,
-Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE,
-Bacnet_Multi-state_Value,Fault_Values,,O,TRUE,
-Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE,
-Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
-Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Program,Object_Name,CharacterString,O,TRUE,
-Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Program,Description,CharacterString,O,TRUE,
-Bacnet_Program,Program_State,,O,TRUE,
-Bacnet_Program,Program_Change,,O,TRUE,
-Bacnet_Program,Description_Of_Halt,,O,TRUE,
-Bacnet_Program,Reason_For_Halt,,O,TRUE,
-Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Program,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Notification,Object_Name,CharacterString,O,TRUE,
-Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Notification,Description,CharacterString,O,TRUE,
-Bacnet_Notification,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Notification,Priority,,O,TRUE,
-Bacnet_Notification,Ack_Required,,O,TRUE,
-Bacnet_Notification,Recipient_List,,O,TRUE,
-Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Schedule,Object_Name,CharacterString,O,TRUE,
-Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Schedule,Description,CharacterString,O,TRUE,
-Bacnet_Schedule,Present_Value,REAL,O,TRUE,
-Bacnet_Schedule,Effective_Period,,O,TRUE,
-Bacnet_Schedule,Weekly_Schedule,,O,TRUE,
-Bacnet_Schedule,Exception_Schedule,,O,TRUE,
-Bacnet_Schedule,Schedule_Default,,O,TRUE,
-Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE,
-Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE,
-Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE,
-Bacnet_Schedule,Time_To_Next_State,,O,TRUE,
-Bacnet_Schedule,Next_State,,O,TRUE,
-Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
-Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
-Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE,
-Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE,
-Bacnet_Trend_Log,Description,CharacterString,O,TRUE,
-Bacnet_Trend_Log,Enable,,O,TRUE,
-Bacnet_Trend_Log,Start_Time,,O,TRUE,
-Bacnet_Trend_Log,Stop_Time,,O,TRUE,
-Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE,
-Bacnet_Trend_Log,Log_Interval,,O,TRUE,
-Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE,
-Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE,
-Bacnet_Trend_Log,Stop_When_Full,,O,TRUE,
-Bacnet_Trend_Log,Buffer_Size,,O,TRUE,
-Bacnet_Trend_Log,Log_Buffer,,O,TRUE,
-Bacnet_Trend_Log,Record_Count,,O,TRUE,
-Bacnet_Trend_Log,Total_Record_Count,,O,TRUE,
-Bacnet_Trend_Log,Logging_Type,,O,TRUE,
-Bacnet_Trend_Log,Align_Intervals,,O,TRUE,
-Bacnet_Trend_Log,Interval_Offset,,O,TRUE,
-Bacnet_Trend_Log,Trigger,,O,TRUE,
-Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE,
-Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE,
-Bacnet_Trend_Log,Notification_Threshold,,O,TRUE,
-Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE,
-Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE,
-Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE,
-Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE,
-Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
-Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE,
-Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
-Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
-Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE,
-Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE,
-Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Object_Type,Bacnet_Object_Property,Property_Datatype,Conformance_Code,Supported,
+Bacnet_Analogue_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Analogue_Input,Object_Name,CharacterString,W,TRUE,
+Bacnet_Analogue_Input,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Analogue_Input,Present_Value,REAL,R,TRUE,
+ ,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Device_Type,,O,TRUE,
+Bacnet_Analogue_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Analogue_Input,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Analogue_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Input,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Analogue_Input,Update_Interval,,O,TRUE,
+Bacnet_Analogue_Input,Units,,R,TRUE,
+Bacnet_Analogue_Input,Min_Pres_Value,REAL,O,TRUE,
+Bacnet_Analogue_Input,Max_Pres_Value,REAL,O,TRUE,
+Bacnet_Analogue_Input,Resolution,,O,TRUE,
+Bacnet_Analogue_Input,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Input,COV_Period,,O,TRUE,
+Bacnet_Analogue_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Input,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Input,High_Limit,,O,TRUE,
+Bacnet_Analogue_Input,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Input,Deadband,,O,TRUE,
+Bacnet_Analogue_Input,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Analogue_Output,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Analogue_Output,Object_Name,CharacterString,W,TRUE,
+Bacnet_Analogue_Output,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Analogue_Output,Present_Value,REAL,W,TRUE,
+Bacnet_Analogue_Output,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Device_Type,,O,TRUE,
+Bacnet_Analogue_Output,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Analogue_Output,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Analogue_Output,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Output,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Analogue_Output,Units,,R,TRUE,
+Bacnet_Analogue_Output,Min_Present_Value,,O,TRUE,
+Bacnet_Analogue_Output,Max_Present_Value,,O,TRUE,
+Bacnet_Analogue_Output,Resolution,,O,TRUE,
+Bacnet_Analogue_Output,Priority_Array,,R,TRUE,
+Bacnet_Analogue_Output,Relinquish_Default,,W,TRUE,
+Bacnet_Analogue_Output,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Output,COV_Period,,O,TRUE,
+Bacnet_Analogue_Output,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Output,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Output,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Output,High_Limit,,O,TRUE,
+Bacnet_Analogue_Output,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Output,Deadband,,O,TRUE,
+Bacnet_Analogue_Output,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Output,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Output,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Output,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Analogue_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Analogue_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Analogue_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Analogue_Value,Description,CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Analogue_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Analogue_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Analogue_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Value,Units,,O,TRUE,
+Bacnet_Analogue_Value,Priority_Array,,O,TRUE,
+Bacnet_Analogue_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Analogue_Value,Min_Present_Value,,O,TRUE,
+Bacnet_Analogue_Value,Max_Present_Value,,O,TRUE,
+Bacnet_Analogue_Value,COV_Increment,REAL,O,TRUE,
+Bacnet_Analogue_Value,COV_Period,,O,TRUE,
+Bacnet_Analogue_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Analogue_Value,Time_Delay,,O,TRUE,
+Bacnet_Analogue_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Analogue_Value,High_Limit,,O,TRUE,
+Bacnet_Analogue_Value,Low_Limit,,O,TRUE,
+Bacnet_Analogue_Value,Deadband,,O,TRUE,
+Bacnet_Analogue_Value,Limit_Enable,,O,TRUE,
+Bacnet_Analogue_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Analogue_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Analogue_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Analogue_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Analogue_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Analogue_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Analogue_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Analogue_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Analogue_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Binary_Input,Object_Identifier,BACnetObjectIdentifier,R,TRUE,
+Bacnet_Binary_Input,Object_Name,CharacterString,W,TRUE,
+Bacnet_Binary_Input,Object_Type,BACnetObjectType,R,TRUE,
+Bacnet_Binary_Input,Present_Value,REAL,R,TRUE,
+Bacnet_Binary_Input,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Input,Device_Type,,O,TRUE,
+Bacnet_Binary_Input,Status_Flags,BACnetStatusFlags,R,TRUE,
+Bacnet_Binary_Input,Event_State,BACnetEventState,R,TRUE,
+Bacnet_Binary_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Input,Out_Of_Service,BOOLEAN,W,TRUE,
+Bacnet_Binary_Input,Polarity,,R,TRUE,
+Bacnet_Binary_Input,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Input,Active_Text,,O,TRUE,
+Bacnet_Binary_Input,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Input,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Input,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Input,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Input,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Input,COV_Period,,O,TRUE,
+Bacnet_Binary_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Input,Time_Delay,,O,TRUE,
+Bacnet_Binary_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Input,Alarm_Value,,O,TRUE,
+Bacnet_Binary_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Bacnet_Binary_Output,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Binary_Output,Object_Name,CharacterString,O,TRUE,
+Bacnet_Binary_Output,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Binary_Output,Present_Value,REAL,O,TRUE,
+Bacnet_Binary_Output,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Output,Device_Type,,O,TRUE,
+Bacnet_Binary_Output,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Binary_Output,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Binary_Output,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Output,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Binary_Output,Polarity,,O,TRUE,
+Bacnet_Binary_Output,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Output,Active_Text,,O,TRUE,
+Bacnet_Binary_Output,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Output,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Output,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Output,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Output,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Output,Minimum_Off_Time,,O,TRUE,
+Bacnet_Binary_Output,Minimum_On_Time,,O,TRUE,
+Bacnet_Binary_Output,Priority_Array,,O,TRUE,
+Bacnet_Binary_Output,Relinquish_Default,,O,TRUE,
+Bacnet_Binary_Output,COV_Period,,O,TRUE,
+Bacnet_Binary_Output,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Output,Time_Delay,,O,TRUE,
+Bacnet_Binary_Output,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Output,Feedback_Value,,O,TRUE,
+Bacnet_Binary_Output,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Output,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Output,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Output,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Output,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Output,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Output,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Output,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Output,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Output,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Output,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Binary_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Binary_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Binary_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Binary_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Binary_Value,Description,CharacterString,O,TRUE,
+Bacnet_Binary_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Binary_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Binary_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Binary_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Binary_Value,Inactive_Text,,O,TRUE,
+Bacnet_Binary_Value,Active_Text,,O,TRUE,
+Bacnet_Binary_Value,Change_Of_State_Time,,O,TRUE,
+Bacnet_Binary_Value,Change_Of_State_Count,,O,TRUE,
+Bacnet_Binary_Value,Time_Of_State_Count_Reset,,O,TRUE,
+Bacnet_Binary_Value,Elapsed_Active_Time,,O,TRUE,
+Bacnet_Binary_Value,Time_Of_Active_Time_Reset,,O,TRUE,
+Bacnet_Binary_Value,Minimum_Off_Time,,O,TRUE,
+Bacnet_Binary_Value,Minimum_On_Time,,O,TRUE,
+Bacnet_Binary_Value,Priority_Array,,O,TRUE,
+Bacnet_Binary_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Binary_Value,COV_Period,,O,TRUE,
+Bacnet_Binary_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Binary_Value,Time_Delay,,O,TRUE,
+Bacnet_Binary_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Binary_Value,Alarm_Value,,O,TRUE,
+Bacnet_Binary_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Binary_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Binary_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Binary_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Binary_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Binary_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Binary_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Binary_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Binary_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Calendar,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Calendar,Object_Name,CharacterString,O,TRUE,
+Bacnet_Calendar,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Calendar,Present_Value,REAL,O,TRUE,
+Bacnet_Calendar,Description,CharacterString,O,TRUE,
+Bacnet_Calendar,Date_List,,O,TRUE,
+Bacnet_Calendar,Time_To_Next_State,,O,TRUE,
+Bacnet_Calendar,Next_State,,O,TRUE,
+Bacnet_Calendar,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Device,Object_Identifier,BACnetObjectIdentifier,W,TRUE,
+Device,Object_Name,CharacterString,W,TRUE,
+Device,Object_Type,BACnetObjectType,R,TRUE,
+Device,System_Status,,R,TRUE,
+Device,Vendor_Name,,R,TRUE,
+Device,Vendor_Identifier,,R,TRUE,
+Device,Model_Name,,R,TRUE,
+Device,Firmware_Revision,,R,TRUE,
+Device,Application_Software_Version,,R,TRUE,
+Device,Location,,O,TRUE,
+Device,Description,CharacterString,O,TRUE,
+Device,Protocol_Version,,R,TRUE,
+Device,Protocol_Revision,,R,TRUE,
+Device,Protocol_Services_Supported,,R,TRUE,
+Device,Protocol_Object_Types_Supported,,R,TRUE,
+Device,Object_List,,R,TRUE,
+Device,Max_APDU_Length_Accepted,,R,TRUE,
+Device,Segmentation_Supported,,R,TRUE,
+Device,Max_Segments_Accepted,,O,TRUE,
+Device,Local_Date,,O,TRUE,
+Device,Local_Time,,O,TRUE,
+Device,UTC_Offset,,O,TRUE,
+Device,Daylight_Savings_Status,,O,TRUE,
+Device,Apdu_Segment_Timeout,,O,TRUE,
+Device,APDU_Timeout,,W,TRUE,
+Device,Number_Of_APDU_Retries,,W,TRUE,
+Device,Time_Synchronization_Recipients,,O,TRUE,
+Device,Device_Address_Binding,,R,TRUE,
+Device,Database_Revision,,R,TRUE,
+Device,Configuration_Files,,O,TRUE,
+Device,Last_Restore_Time,,O,TRUE,
+Device,Backup_Failure_Timeout,,O,TRUE,
+Device,Backup_Preparation_Time,,O,TRUE,
+Device,Restore_Preparation_Time,,O,TRUE,
+Device,Restore_Completion_Time,,O,TRUE,
+Device,Backup_And_Restore_State,,O,TRUE,
+Device,Active_COV_Subscriptions,,O,TRUE,
+Device,Last_Restart_Reason,,O,TRUE,
+Device,Time_Of_Device_Restart,,O,TRUE,
+Device,Restart_Notification_Recipients,,O,TRUE,
+Device,Utc_Time_Synchronization_Recipients,,O,TRUE,
+Device,Max_Master,,O,TRUE,
+Device,Max_Info_Frames,,O,TRUE,
+Device,Time_Synchronization_Interval,,O,TRUE,
+Device,Align_Intervals,,O,TRUE,
+Device,Interval_Offset,,O,TRUE,
+Device,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
+Event_Enrollment,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Event_Enrollment,Object_Name,CharacterString,O,TRUE,
+Event_Enrollment,Object_Type,BACnetObjectType,O,TRUE,
+Event_Enrollment,Description,CharacterString,O,TRUE,
+Event_Enrollment,Event_Type,,O,TRUE,
+Event_Enrollment,Notify_Type,BACnetNotifyType,O,TRUE,
+Event_Enrollment,Event_Parameters,,O,TRUE,
+Event_Enrollment,Object_Property_Reference,,O,TRUE,
+Event_Enrollment,Event_State,BACnetEventState,O,TRUE,
+Event_Enrollment,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Event_Enrollment,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Event_Enrollment,Notification_Class,Unsigned,O,TRUE,
+Event_Enrollment,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Event_Enrollment,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Event_Enrollment,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Event_Enrollment,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Event_Enrollment,Time_Delay_Normal,,O,TRUE,
+Event_Enrollment,Status_Flags,BACnetStatusFlags,O,TRUE,
+Event_Enrollment,Reliability,BACnetReliability,O,TRUE,
+Event_Enrollment,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_File,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_File,Object_Name,CharacterString,O,TRUE,
+Bacnet_File,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_File,Description,CharacterString,O,TRUE,
+Bacnet_File,File_Type,,O,TRUE,
+Bacnet_File,File_Size,,O,TRUE,
+Bacnet_File,Modification_Date,,O,TRUE,
+Bacnet_File,Archive,,O,TRUE,
+Bacnet_File,Read_Only,,O,TRUE,
+Bacnet_File,File_Access_Method,,O,TRUE,
+Bacnet_File,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Loop,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Loop,Object_Name,CharacterString,O,TRUE,
+Bacnet_Loop,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Loop,Present_Value,REAL,O,TRUE,
+Bacnet_Loop,Description,CharacterString,O,TRUE,
+Bacnet_Loop,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Loop,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Loop,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Loop,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Loop,Update_Interval,,O,TRUE,
+Bacnet_Loop,Output_Units,,O,TRUE,
+Bacnet_Loop,Manipulated_Variable_Reference,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Reference,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Value,,O,TRUE,
+Bacnet_Loop,Controlled_Variable_Units,,O,TRUE,
+Bacnet_Loop,Setpoint_Reference,,O,TRUE,
+Bacnet_Loop,Setpoint,,O,TRUE,
+Bacnet_Loop,Action,,O,TRUE,
+Bacnet_Loop,Proportional_Constant,,O,TRUE,
+Bacnet_Loop,Proportional_Constant_Units,,O,TRUE,
+Bacnet_Loop,Integral_Constant,,O,TRUE,
+Bacnet_Loop,Integral_Constant_Units,,O,TRUE,
+Bacnet_Loop,Derivative_Constant,,O,TRUE,
+Bacnet_Loop,Derivative_Constant_Units,,O,TRUE,
+Bacnet_Loop,Bias,,O,TRUE,
+Bacnet_Loop,Maximum_Output,,O,TRUE,
+Bacnet_Loop,Minimum_Output,,O,TRUE,
+Bacnet_Loop,Priority_For_Writing,Unsigned(1..16),O,TRUE,
+Bacnet_Loop,LoopDeadband,,O,TRUE,
+Bacnet_Loop,Saturation_Time,,O,TRUE,
+Bacnet_Loop,COV_Increment,REAL,O,TRUE,
+Bacnet_Loop,COV_Period,,O,TRUE,
+Bacnet_Loop,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Loop,Ramp_Time,,O,TRUE,
+Bacnet_Loop,Saturation_Time_Low_Limit_Enable,,O,TRUE,
+Bacnet_Loop,Saturation_Time_High_Limit_Enable,,O,TRUE,
+Bacnet_Loop,Time_Delay,,O,TRUE,
+Bacnet_Loop,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Loop,Error_Limit,,O,TRUE,
+Bacnet_Loop,Deadband,,O,TRUE,
+Bacnet_Loop,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Loop,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Loop,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Loop,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Loop,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Loop,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Loop,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Loop,Time_Delay_Normal,,O,TRUE,
+Bacnet_Loop,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Loop,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Loop,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Multi-state_Input,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Multi-state_Input,Object_Name,CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Multi-state_Input,Present_Value,REAL,O,TRUE,
+Bacnet_Multi-state_Input,Description,CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Device_Type,,O,TRUE,
+Bacnet_Multi-state_Input,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Multi-state_Input,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Multi-state_Input,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Multi-state_Input,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Input,Number_of_States,,O,TRUE,
+Bacnet_Multi-state_Input,State_Text,,O,TRUE,
+Bacnet_Multi-state_Input,COV_Period,,O,TRUE,
+Bacnet_Multi-state_Input,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Multi-state_Input,Time_Delay,,O,TRUE,
+Bacnet_Multi-state_Input,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Multi-state_Input,Alarm_Values,,O,TRUE,
+Bacnet_Multi-state_Input,Fault_Values,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Input,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Input,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Multi-state_Input,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Multi-state_Input,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Input,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Input,Time_Delay_Normal,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Multi-state_Input,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Multi-state_Input,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Multi-state_Value,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Multi-state_Value,Object_Name,CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Multi-state_Value,Present_Value,REAL,O,TRUE,
+Bacnet_Multi-state_Value,Description,CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Multi-state_Value,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Multi-state_Value,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Multi-state_Value,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Value,Number_of_States,,O,TRUE,
+Bacnet_Multi-state_Value,State_Text,,O,TRUE,
+Bacnet_Multi-state_Value,Priority_Array,,O,TRUE,
+Bacnet_Multi-state_Value,Relinquish_Default,,O,TRUE,
+Bacnet_Multi-state_Value,COV_Period,,O,TRUE,
+Bacnet_Multi-state_Value,COV_Min_Send_Time,,O,TRUE,
+Bacnet_Multi-state_Value,Time_Delay,,O,TRUE,
+Bacnet_Multi-state_Value,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Multi-state_Value,Alarm_Values,,O,TRUE,
+Bacnet_Multi-state_Value,Fault_Values,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Value,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Multi-state_Value,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Multi-state_Value,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Multi-state_Value,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Multi-state_Value,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Multi-state_Value,Time_Delay_Normal,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Multi-state_Value,Event_Algorithm_Inhibit_Ref,,O,TRUE,
+Bacnet_Multi-state_Value,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Program,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Program,Object_Name,CharacterString,O,TRUE,
+Bacnet_Program,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Program,Description,CharacterString,O,TRUE,
+Bacnet_Program,Program_State,,O,TRUE,
+Bacnet_Program,Program_Change,,O,TRUE,
+Bacnet_Program,Description_Of_Halt,,O,TRUE,
+Bacnet_Program,Reason_For_Halt,,O,TRUE,
+Bacnet_Program,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Program,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Program,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Program,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Notification,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Notification,Object_Name,CharacterString,O,TRUE,
+Bacnet_Notification,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Notification,Description,CharacterString,O,TRUE,
+Bacnet_Notification,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Notification,Priority,,O,TRUE,
+Bacnet_Notification,Ack_Required,,O,TRUE,
+Bacnet_Notification,Recipient_List,,O,TRUE,
+Bacnet_Notification,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Schedule,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Schedule,Object_Name,CharacterString,O,TRUE,
+Bacnet_Schedule,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Schedule,Description,CharacterString,O,TRUE,
+Bacnet_Schedule,Present_Value,REAL,O,TRUE,
+Bacnet_Schedule,Effective_Period,,O,TRUE,
+Bacnet_Schedule,Weekly_Schedule,,O,TRUE,
+Bacnet_Schedule,Exception_Schedule,,O,TRUE,
+Bacnet_Schedule,Schedule_Default,,O,TRUE,
+Bacnet_Schedule,List_Of_Object_Property_References,,O,TRUE,
+Bacnet_Schedule,Priority_For_Writing,Unsigned(1..16),O,TRUE,
+Bacnet_Schedule,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Schedule,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Schedule,Out_Of_Service,BOOLEAN,O,TRUE,
+Bacnet_Schedule,Time_To_Next_State,,O,TRUE,
+Bacnet_Schedule,Next_State,,O,TRUE,
+Bacnet_Schedule,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,O,TRUE,
+Bacnet_Trend_Log,Object_Identifier,BACnetObjectIdentifier,O,TRUE,
+Bacnet_Trend_Log,Object_Name,CharacterString,O,TRUE,
+Bacnet_Trend_Log,Object_Type,BACnetObjectType,O,TRUE,
+Bacnet_Trend_Log,Description,CharacterString,O,TRUE,
+Bacnet_Trend_Log,Enable,,O,TRUE,
+Bacnet_Trend_Log,Start_Time,,O,TRUE,
+Bacnet_Trend_Log,Stop_Time,,O,TRUE,
+Bacnet_Trend_Log,Log_Device_Object_Property,,O,TRUE,
+Bacnet_Trend_Log,Log_Interval,,O,TRUE,
+Bacnet_Trend_Log,Cov_Resubscription_Interval,,O,TRUE,
+Bacnet_Trend_Log,Client_Cov_Increment,,O,TRUE,
+Bacnet_Trend_Log,Stop_When_Full,,O,TRUE,
+Bacnet_Trend_Log,Buffer_Size,,O,TRUE,
+Bacnet_Trend_Log,Log_Buffer,,O,TRUE,
+Bacnet_Trend_Log,Record_Count,,O,TRUE,
+Bacnet_Trend_Log,Total_Record_Count,,O,TRUE,
+Bacnet_Trend_Log,Logging_Type,,O,TRUE,
+Bacnet_Trend_Log,Align_Intervals,,O,TRUE,
+Bacnet_Trend_Log,Interval_Offset,,O,TRUE,
+Bacnet_Trend_Log,Trigger,,O,TRUE,
+Bacnet_Trend_Log,Status_Flags,BACnetStatusFlags,O,TRUE,
+Bacnet_Trend_Log,Reliability,BACnetReliability,O,TRUE,
+Bacnet_Trend_Log,Notification_Threshold,,O,TRUE,
+Bacnet_Trend_Log,Records_Since_Notification,,O,TRUE,
+Bacnet_Trend_Log,Last_Notify_Record,,O,TRUE,
+Bacnet_Trend_Log,Event_State,BACnetEventState,O,TRUE,
+Bacnet_Trend_Log,Notification_Class,Unsigned,O,TRUE,
+Bacnet_Trend_Log,Event_Enable,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Trend_Log,Acked_Transitions,BACnetEventTransitionBits,O,TRUE,
+Bacnet_Trend_Log,Notify_Type,BACnetNotifyType,O,TRUE,
+Bacnet_Trend_Log,Event_Time_Stamps,BACnetARRAY[3] of BACnetTimeStamp,O,TRUE,
+Bacnet_Trend_Log,Event_Message_Texts,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Trend_Log,Event_Message_Texts_Config,BACnetARRAY[3] of CharacterString,O,TRUE,
+Bacnet_Trend_Log,Event_Detection_Enable,BOOLEAN,O,TRUE,
+Bacnet_Trend_Log,Event_Algorithm_Inhibit,,O,TRUE,
+Bacnet_Trend_Log,Event_Algorithm_Inhibit_Ref,,O,TRUE,
Bacnet_Trend_Log,Property_List,BACnetARRAY[N] of BACnetPropertyIdentifier,R,TRUE,
\ No newline at end of file
diff --git a/subset/cloud/Dockerfile.test_udmi b/subset/cloud/Dockerfile.test_udmi
index 220bd01ba0..ee5fa4c721 100644
--- a/subset/cloud/Dockerfile.test_udmi
+++ b/subset/cloud/Dockerfile.test_udmi
@@ -9,11 +9,15 @@ FROM daqf/aardvark:latest
RUN $AG update && $AG install openjdk-11-jre
RUN $AG update && $AG install openjdk-11-jdk git
-COPY validator/ validator/
+RUN $AG update && $AG install curl
+
+COPY udmi/validator/ validator/
RUN validator/bin/build
-COPY schemas/udmi/ schemas/udmi/
+COPY udmi/schema/ schema/
COPY subset/cloud/test_udmi .
+COPY resources/test_site/ local/
+
CMD ./test_udmi
diff --git a/subset/cloud/test_udmi b/subset/cloud/test_udmi
index 4b0cfb2a32..0f016e18e6 100755
--- a/subset/cloud/test_udmi
+++ b/subset/cloud/test_udmi
@@ -1,4 +1,5 @@
#!/bin/bash -e
+
source reporting.sh
REPORT=/tmp/report.txt
@@ -7,23 +8,27 @@ LOG=/tmp/udmi.log
# Necessary to reach gcp. Should be done by framework but this works for now.
route add default gw $GATEWAY_IP
+ip addr
route -n
-ping -c 2 172.217.164.106 || true
+ping -c 2 $GATEWAY_IP
arp -n
-ip addr
gcp_cred=/config/inst/gcp_service_account.json
gcp_topic=target
-schema_path=schemas/udmi
+schema_path=schema
+subscription=daq-validator-dev
message_types="state pointset system"
device_id=`jq -r .device_id /config/device/module_config.json`
# Do basic network connectivity check
+echo "nameserver 8.8.8.8" >> /etc/resolv.conf
cat /etc/resolv.conf
-ping -c 2 172.217.164.106
-ping -c 2 pubsub.googleapis.com
+ping -c 2 google.com || true # blocked on github actions
+echo "********GOOGLE PAGE********"
+curl google.com
+echo "***************************"
if [ "$device_id" == null ]; then
skip="No device id"
elif [ ! -f $gcp_cred ]; then
@@ -55,15 +60,14 @@ echo Using credentials from $GOOGLE_APPLICATION_CREDENTIALS
echo Extracted project $project_id
echo Extracted service $service_id
echo Configured topic is $gcp_topic
-echo Configured schema is $schema_path
echo Target device is $device_id
echo
-timeout 60 validator/bin/validate $PWD/$schema_path pubsub:$gcp_topic $service_id-$HOSTNAME || true
+timeout 90 validator/bin/validate $project_id $schema_path pubsub $subscription local/ || true
function message_report {
message_type=$1
- base=validations/devices/$device_id/$message_type
+ base=out/devices/$device_id/$message_type
ls -l $base* || true
if [ -f "$base.out" ]; then
@@ -87,3 +91,7 @@ function message_report {
for message_type in $message_types; do
message_report $message_type
done
+
+fgrep RESULT $REPORT
+
+echo Done with test_udmi
diff --git a/subset/connection/Dockerfile.test_macoui b/subset/connection/Dockerfile.test_macoui
deleted file mode 100644
index 6624495d00..0000000000
--- a/subset/connection/Dockerfile.test_macoui
+++ /dev/null
@@ -1,17 +0,0 @@
-FROM daqf/aardvark:latest
-
-RUN $AG update && $AG install openjdk-8-jre
-
-RUN $AG update && $AG install openjdk-8-jdk git
-
-RUN $AG update && $AG install curl
-
-COPY subset/connection/ .
-
-RUN mkdir -p mac_oui/src/main/resources
-
-RUN curl https://svn.nmap.org/nmap/nmap-mac-prefixes > mac_oui/src/main/resources/macList.txt
-
-RUN cd mac_oui && ./gradlew shadowJar
-
-CMD ["./test_macoui"]
diff --git a/subset/connection/build.conf b/subset/connection/build.conf
deleted file mode 100644
index 5c585856af..0000000000
--- a/subset/connection/build.conf
+++ /dev/null
@@ -1,2 +0,0 @@
-build subset/connection
-add macoui
diff --git a/subset/connection/mac_oui/.project b/subset/connection/mac_oui/.project
deleted file mode 100644
index cd2d52f077..0000000000
--- a/subset/connection/mac_oui/.project
+++ /dev/null
@@ -1,23 +0,0 @@
-
-
- mac_oui
- Project mac_oui created by Buildship.
-
-
-
-
- org.eclipse.jdt.core.javabuilder
-
-
-
-
- org.eclipse.buildship.core.gradleprojectbuilder
-
-
-
-
-
- org.eclipse.jdt.core.javanature
- org.eclipse.buildship.core.gradleprojectnature
-
-
diff --git a/subset/connection/readme.md b/subset/connection/readme.md
deleted file mode 100644
index f1166e933c..0000000000
--- a/subset/connection/readme.md
+++ /dev/null
@@ -1,20 +0,0 @@
-# Connection testing
-
-## test_macoui
-The MAC OUI test looks up the manufacturer information for the mac address of the device under test.
-
-### Note for test developers
-The functional test code is included in the `mac_oui/src/main/java` folder.
-
-The `macList.txt` file containing the MAC OUI database is downloaded at build time by the container specified in
-the `Dockerfile.test_macoui` file.
-
-If java code requires debugging in an IDE, then it will require the `macList.txt` to be placed under the
-`mac_oui/src/main/resources/` folder. Use the curl command from the `Dockerfile.test_macoui` file to download and
-place the file locally into your project. This `.txt` file is git ignored to avoid being included as a
-static resource on the source code repo.
-
-### Conditions for mac_oui
- - pass -> if the MAC OUI matches the mac prefix IEEE registration.
- - fail -> if the MAC OUI does not match with any of the mac prefixes.
-
diff --git a/subset/manual/readme.md b/subset/manual/readme.md
index ea2056eff6..a5b300a370 100644
--- a/subset/manual/readme.md
+++ b/subset/manual/readme.md
@@ -2,10 +2,14 @@
## Manual Tests
-Some tests cannot be automated with DAQ although these may be required. To facilitate a single test report which incorporates all tests undertaken on a device, the `manual` test can be used to input the results into reports produced by DAQ.
+Some tests cannot be automated with DAQ although these may be required.
+To facilitate a single test report which incorporates all tests
+undertaken on a device, the `manual` test can be used to input
+the results into reports produced by DAQ.
## Configuration
-Manual tests including results are inserted into the device's `module_config.json` and marked by `"type": "manual"`.
+Manual tests including results are inserted into the device's
+`module_config.json` and marked by `"type": "manual"`.
```
"tests": {
@@ -14,7 +18,7 @@ Manual tests including results are inserted into the device's `module_config.jso
"enabled": true,
"type": "manual",
"result": "required",
- "outcome": "pass"
+ "outcome": "pass",
"summary" : "summary note in results table",
"test_log" : "additional information in report appendix"
}
@@ -38,4 +42,4 @@ Test description
Test description
--------------------
RESULT pass manual.test.name Manual test - Test summary
-```
\ No newline at end of file
+```
diff --git a/subset/network/Dockerfile.test_network b/subset/network/Dockerfile.test_network
index fbbb2fdec7..ef4b204207 100644
--- a/subset/network/Dockerfile.test_network
+++ b/subset/network/Dockerfile.test_network
@@ -1,8 +1,21 @@
FROM daqf/aardvark:latest
-RUN $AG update && $AG install python netcat
+RUN $AG update && $AG install openjdk-8-jre
-COPY subset/network/network_tests.py .
-COPY subset/network/test_network .
+RUN $AG update && $AG install openjdk-8-jdk git
+
+RUN $AG update && $AG install python python-setuptools python-pip netcat
+
+RUN $AG update && $AG install curl
+
+RUN pip install scapy
+
+COPY subset/network/ .
+
+RUN mkdir -p mac_oui/src/main/resources
+
+RUN curl https://svn.nmap.org/nmap/nmap-mac-prefixes > mac_oui/src/main/resources/macList.txt
+
+RUN cd mac_oui && ./gradlew shadowJar
CMD ["./test_network"]
diff --git a/subset/network/NTPClient/src/main/java/Main.java b/subset/network/NTPClient/src/main/java/Main.java
deleted file mode 100644
index b9e53b2833..0000000000
--- a/subset/network/NTPClient/src/main/java/Main.java
+++ /dev/null
@@ -1,81 +0,0 @@
-import java.io.IOException;
-import java.net.*;
-import java.text.DecimalFormat;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-
-
-public class Main {
- static final double SECONDS_FROM_01_01_1900_TO_01_01_1970 = 2208988800.0;
- static String serverName = "time.google.com";
- static int PORT = 123;
- static int timerPeriod = 10;
-
- public static void main(String[] args) {
- if (args.length < 2) {
- throw new IllegalArgumentException("Usage: server_name port timerPeriod");
- }
- serverName = args[0];
- PORT = Integer.parseInt(args[1]);
- timerPeriod = Integer.parseInt(args[2]);
-
- Runnable senderRunnable = new Runnable() {
- @Override
- public void run() {
- try {
- sendRequest();
- } catch (IOException e) {
- System.out.println(e.getMessage());
- }
- }
- };
- ScheduledExecutorService executor = Executors.newScheduledThreadPool(1);
- executor.scheduleAtFixedRate(senderRunnable, 0, timerPeriod, TimeUnit.SECONDS);
- }
-
- private static void sendRequest() throws IOException {
- // Send request
- DatagramSocket socket = new DatagramSocket();
- InetAddress address = InetAddress.getByName(serverName);
- byte[] buf = new NtpMessage(SECONDS_FROM_01_01_1900_TO_01_01_1970).toByteArray();
- DatagramPacket packet =
- new DatagramPacket(buf, buf.length, address, PORT);
-
- // Set the transmit timestamp *just* before sending the packet
- NtpMessage.encodeTimestamp(packet.getData(), 40,
- (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970);
- sendPacket(socket, packet, buf);
- }
-
- private static void sendPacket(DatagramSocket socket, DatagramPacket packet, byte[] buf) throws IOException {
- socket.send(packet);
-
- // Get response
- System.out.println("NTP request sent, waiting for response...\n");
- packet = new DatagramPacket(buf, buf.length);
- socket.receive(packet);
-
- // Immediately record the incoming timestamp
- double destinationTimestamp =
- (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970;
-
- // Process response
- NtpMessage msg = new NtpMessage(packet.getData());
- double roundTripDelay = (destinationTimestamp-msg.originateTimestamp) -
- (msg.transmitTimestamp-msg.receiveTimestamp);
- double localClockOffset =
- ((msg.receiveTimestamp - msg.originateTimestamp) +
- (msg.transmitTimestamp - destinationTimestamp)) / 2;
-
- // Display response
- System.out.println("NTP server: " + serverName);
- System.out.println(msg.toString());
- System.out.println("Dest. timestamp: " +
- NtpMessage.timestampToString(destinationTimestamp));
- System.out.println("Round-trip delay: " +
- new DecimalFormat("0.00").format(roundTripDelay * 1000) + " ms");
- System.out.println("Local clock offset: " +
- new DecimalFormat("0.00").format(localClockOffset * 1000) + " ms");
- }
-}
diff --git a/subset/network/NTPClient/src/main/java/NtpMessage.java b/subset/network/NTPClient/src/main/java/NtpMessage.java
deleted file mode 100644
index cfea458b1e..0000000000
--- a/subset/network/NTPClient/src/main/java/NtpMessage.java
+++ /dev/null
@@ -1,206 +0,0 @@
-import java.text.DecimalFormat;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-
-public class NtpMessage {
- public byte leapIndicator = 0;
- public byte version = 3;
- public byte mode = 0;
- public short stratum = 0;
- public byte pollInterval = 0;
- public byte precision = 0;
- public double rootDelay = 0;
- public double rootDispersion = 0;
- public byte[] referenceIdentifier = {0, 0, 0, 0};
- public double referenceTimestamp = 0;
- public double originateTimestamp = 0;
- public double receiveTimestamp = 0;
- public double transmitTimestamp = 0;
-
- /**
- * Constructs a new NtpMessage from an array of bytes.
- */
- public NtpMessage(byte[] array) {
- // See the packet format diagram in RFC 2030 for details
- leapIndicator = (byte)((array[0] >> 6) & 0x3);
- version = (byte)((array[0] >> 3) & 0x7);
- mode = (byte)(array[0] & 0x7);
- stratum = unsignedByteToShort(array[1]);
- pollInterval = array[2];
- precision = array[3];
-
- rootDelay = (array[4] * 256.0) +
- unsignedByteToShort(array[5]) +
- (unsignedByteToShort(array[6]) / 256.0) +
- (unsignedByteToShort(array[7]) / 65536.0);
-
- rootDispersion = (unsignedByteToShort(array[8]) * 256.0) +
- unsignedByteToShort(array[9]) +
- (unsignedByteToShort(array[10]) / 256.0) +
- (unsignedByteToShort(array[11]) / 65536.0);
-
- referenceIdentifier[0] = array[12];
- referenceIdentifier[1] = array[13];
- referenceIdentifier[2] = array[14];
- referenceIdentifier[3] = array[15];
-
- referenceTimestamp = decodeTimestamp(array, 16);
- originateTimestamp = decodeTimestamp(array, 24);
- receiveTimestamp = decodeTimestamp(array, 32);
- transmitTimestamp = decodeTimestamp(array, 40);
- }
-
- /**
- * Constructs a new NtpMessage in client -> server mode, and sets the
- * transmit timestamp to the current time.
- */
- public NtpMessage(double SECONDS_FROM_01_01_1900_TO_01_01_1970) {
- this.mode = 3;
- this.transmitTimestamp = (System.currentTimeMillis() / 1000.0) + SECONDS_FROM_01_01_1900_TO_01_01_1970;
- }
-
- /**
- * This method constructs the data bytes of a raw NTP packet.
- */
- public byte[] toByteArray() {
- // All bytes are automatically set to 0
- byte[] p = new byte[48];
-
- p[0] = (byte)(leapIndicator << 6 | version << 3 | mode);
- p[1] = (byte)stratum;
- p[2] = (byte)pollInterval;
- p[3] = (byte)precision;
-
- // root delay is a signed 16.16-bit FP, in Java an int is 32-bits
- int l = (int)(rootDelay * 65536.0);
- p[4] = (byte)((l >> 24) & 0xFF);
- p[5] = (byte)((l >> 16) & 0xFF);
- p[6] = (byte)((l >> 8) & 0xFF);
- p[7] = (byte)(l & 0xFF);
-
- // root dispersion is an unsigned 16.16-bit FP, in Java there are no
- // unsigned primitive types, so we use a long which is 64-bits
- long ul = (long)(rootDispersion * 65536.0);
- p[8] = (byte)((ul >> 24) & 0xFF);
- p[9] = (byte)((ul >> 16) & 0xFF);
- p[10] = (byte)((ul >> 8) & 0xFF);
- p[11] = (byte)(ul & 0xFF);
-
- p[12] = referenceIdentifier[0];
- p[13] = referenceIdentifier[1];
- p[14] = referenceIdentifier[2];
- p[15] = referenceIdentifier[3];
-
- encodeTimestamp(p, 16, referenceTimestamp);
- encodeTimestamp(p, 24, originateTimestamp);
- encodeTimestamp(p, 32, receiveTimestamp);
- encodeTimestamp(p, 40, transmitTimestamp);
-
- return p;
- }
-
- /**
- * Returns a string representation of a NtpMessage
- */
- public String toString() {
- String precisionStr =
- new DecimalFormat("0.#E0").format(Math.pow(2, precision));
-
- return "Leap indicator: " + leapIndicator + "\n" +
- "Version: " + version + "\n" +
- "Mode: " + mode + "\n" +
- "Stratum: " + stratum + "\n" +
- "Poll: " + pollInterval + "\n" +
- "Precision: " + precision + " (" + precisionStr + " seconds)\n" +
- "Root delay: " + new DecimalFormat("0.00").format(rootDelay * 1000) + " ms\n" +
- "Root dispersion: " + new DecimalFormat("0.00").format(rootDispersion * 1000) + " ms\n" +
- "Reference identifier: " + referenceIdentifierToString(referenceIdentifier, stratum, version) + "\n" +
- "Reference timestamp: " + timestampToString(referenceTimestamp) + "\n" +
- "Originate timestamp: " + timestampToString(originateTimestamp) + "\n" +
- "Receive timestamp: " + timestampToString(receiveTimestamp) + "\n" +
- "Transmit timestamp: " + timestampToString(transmitTimestamp);
- }
-
- /**
- * Converts an unsigned byte to a short. By default, Java assumes that
- * a byte is signed.
- */
- public static short unsignedByteToShort(byte b) {
- if((b & 0x80) == 0x80) return (short)(128 + (b & 0x7f));
- else return (short)b;
- }
-
- /**
- * Will read 8 bytes of a message beginning at pointer
- * and return it as a double, according to the NTP 64-bit timestamp
- * format.
- */
- public static double decodeTimestamp(byte[] array, int pointer) {
- double r = 0.0;
-
- for(int i = 0; i < 8; i++)
- {
- r += unsignedByteToShort(array[pointer + i]) * Math.pow(2, (3 - i) * 8);
- }
-
- return r;
- }
-
- /**
- * Encodes a timestamp in the specified position in the message
- */
- public static void encodeTimestamp(byte[] array, int pointer, double timestamp) {
- // Converts a double into a 64-bit fixed point
- for(int i = 0; i < 8; i++) {
- // 2^24, 2^16, 2^8, .. 2^-32
- double base = Math.pow(2, (3 - i) * 8);
- // Capture byte value
- array[pointer + i] = (byte)(timestamp / base);
- // Subtract captured value from remaining total
- timestamp = timestamp - (double)(unsignedByteToShort(array[pointer + i]) * base);
- }
- array[7] = (byte)(Math.random() * 255.0);
- }
-
- /**
- * Returns a timestamp (number of seconds since 00:00 1-Jan-1900) as a
- * formatted date/time string.
- */
- public static String timestampToString(double timestamp) {
- if(timestamp == 0) return "0";
- double utc = timestamp - (2208988800.0);
- long ms = (long)(utc * 1000.0);
- String date = new SimpleDateFormat("dd-MMM-yyyy HH:mm:ss").format(new Date(ms));
- double fraction = timestamp - ((long)timestamp);
- String fractionSting = new DecimalFormat(".000000").format(fraction);
- return date + fractionSting;
- }
-
- /**
- * Returns a string representation of a reference identifier according
- * to the rules set out in RFC 2030.
- */
- public static String referenceIdentifierToString(byte[] ref, short stratum, byte version) {
- if(stratum == 0 || stratum == 1)
- {
- return new String(ref);
- }
- else if(version == 3)
- {
- return unsignedByteToShort(ref[0]) + "." +
- unsignedByteToShort(ref[1]) + "." +
- unsignedByteToShort(ref[2]) + "." +
- unsignedByteToShort(ref[3]);
- }
- // In NTP Version 4 secondary servers, this is the low order 32 bits
- // of the latest transmit timestamp of the reference source.
- else if(version == 4)
- {
- return "" + ((unsignedByteToShort(ref[0]) / 256.0) +
- (unsignedByteToShort(ref[1]) / 65536.0) +
- (unsignedByteToShort(ref[2]) / 16777216.0) +
- (unsignedByteToShort(ref[3]) / 4294967296.0));
- }
- return "";
- }
-}
diff --git a/subset/network/README.md b/subset/network/README.md
new file mode 100644
index 0000000000..677a519931
--- /dev/null
+++ b/subset/network/README.md
@@ -0,0 +1,76 @@
+# Network Tests
+
+## General Network Tests
+
+### connection.min_send
+- Located in network_tests.py, started up in test_network.
+- Check if a device sends any data packet at a frequency of less than five minutes.
+
+#### Result cases:
+- PASS: The time between packets is measured - pass if time between any two packets is less than five minutes (deals with case where a monitor scan is long)
+- FAIL: If data packets are sent, and there are packets with time interval of less than five minutes found, then fail.
+- SKIP: If no data packets are sent and the monitor scan period is short, the test will skip instead of failing.
+
+### communication.type.broadcast
+- Located in network_tests.py, started up in test_network.
+- This test counts the number of unicast, broadcast and multicast packets sent out by reading from the .pcap file that DAQ has created during runtime.
+
+#### Result cases:
+This is an 'info' test, it does not have a pass/fail/skip case.
+
+
+## NTP Tests
+The NTP tests inspect the client NTP version and the device's ability to update its clock precisely.
+
+### Note for test developers
+The functional test code is included in the `ntp_tests.py` file.
+
+The test reads packets from startup.pcap and monitor.pcap.
+
+If the python code needs debugging, the pip module `scapy` is required (`pip install scapy`).
+
+### NTP Test conditions
+| Test ID | Info | Pass | Fail | Skip |
+|---|---|---|---|---|
+| connection.network.ntp_support | Are the received NTP packets using NTP v4? | NTP version is 4 | NTP version is not 4 | No NTP packets are received |
+| connection.network.ntp_update | Does the device demonstrate updating its clock using NTP? | Device clock is synchronized | Device clock is not synchronized | Not enough NTP packets are received |
+
+#### NTP Support ####
+The version of NTP used by the client is extracted from the fist client (outbound) NTP packets discovered in startup.pcap.
+
+#### NTP Update ####
+The following criteria are used to determine whether a DUT has synced its clock with the NTP server provided by DAQ:
+ - A minimum of 2 NTP packets are present in startup.pcap and monitor.pcap (one potential poll).
+ - A minimum of 2 NTP packets have been exchanged between the DUT and the DAQ-provided NTP server.
+ - A valid NTP poll is present. Consisting of a client-server exchange.
+ - The calculated offset is less than 0.128 seconds and the final poll does not have a leap indicator of 3 (unsynchronized).
+
+When calculating the offset, the latest valid poll is inspected. A value of 0.128s is the maximum offset used to determine whether a device is considered in-sync with the NTP server because NTPv4 is capable of accuracy of tens of milliseconds.
+
+
+## MAC OUI
+The MAC OUI test looks up the manufacturer information for the mac address of the device under test.
+
+### Note for test developers
+The functional test code is included in the `mac_oui/src/main/java` folder.
+
+The `macList.txt` file containing the MAC OUI database is downloaded at build time by the container specified in
+the `Dockerfile.test_macoui` file.
+
+If java code requires debugging in an IDE, then it will require the `macList.txt` to be placed under the
+`mac_oui/src/main/resources/` folder. Use the curl command from the `Dockerfile.test_macoui` file to download and
+place the file locally into your project. This `.txt` file is git ignored to avoid being included as a
+static resource on the source code repo.
+
+### Conditions for mac_oui
+ - pass -> if the MAC OUI matches the mac prefix IEEE registration.
+ - fail -> if the MAC OUI does not match with any of the mac prefixes.
+
+
+## DNS Tests
+Check Device uses the DNS server from DHCP and resolves hostnames
+
+### Conditions for connection.dns.hostname_connect
+ - pass -> if the device uses the DNS server from DHCP, and resolves a hostname
+ - fail -> device uses a DNS serveer other than the server fron DHCP
+ - skip -> device did not send any DNS requests
\ No newline at end of file
diff --git a/subset/network/debug_generate_capture.py b/subset/network/debug_generate_capture.py
deleted file mode 100644
index 410837145f..0000000000
--- a/subset/network/debug_generate_capture.py
+++ /dev/null
@@ -1,20 +0,0 @@
-import subprocess
-import time
-import sys
-
-arguments = sys.argv
-
-capture_time = int(arguments[1])
-eth_interface = arguments[2]
-
-cap_pcap_file = 'capture.pcap'
-
-tcpdump_capture_unlimited_byte_packets = 'tcpdump -i {e} -s0 -w {c}'.format(e=eth_interface, c=cap_pcap_file)
-
-def shell_command_without_result(command, wait_time, terminate_flag):
- process = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- time.sleep(wait_time)
- if terminate_flag:
- process.terminate()
-
-shell_command_without_result(tcpdump_capture_unlimited_byte_packets, capture_time, True)
diff --git a/subset/network/dns_tests.py b/subset/network/dns_tests.py
new file mode 100644
index 0000000000..f8ebfde258
--- /dev/null
+++ b/subset/network/dns_tests.py
@@ -0,0 +1,183 @@
+"""
+ This script can be called to run DNS related test.
+
+"""
+from __future__ import absolute_import
+import subprocess
+import sys
+
+import re
+import datetime
+
+arguments = sys.argv
+
+test_request = str(arguments[1])
+cap_pcap_file = str(arguments[2])
+device_address = str(arguments[3])
+
+report_filename = 'dns_tests.txt'
+min_packet_length_bytes = 20
+max_packets_in_report = 10
+port_list = []
+ignore = '%%'
+summary_text = ''
+result = 'fail'
+dash_break_line = '--------------------\n'
+
+DESCRIPTION_HOSTNAME_CONNECT = 'Check device uses the DNS server from DHCP and resolves hostnames'
+
+TCPDUMP_DATE_FORMAT = "%Y-%m-%d %H:%M:%S.%f"
+
+IP_REGEX = r'(([0-9]{1,3}\.){3}[0-9]{1,3})'
+RDATA_REGEX = r''
+
+DNS_SERVER_HOST = '.2'
+
+
+def write_report(string_to_append):
+ print(string_to_append.strip())
+ with open(report_filename, 'a+') as file_open:
+ file_open.write(string_to_append)
+
+
+def exec_tcpdump(tcpdump_filter, capture_file=None):
+ """
+ Args
+ tcpdump_filter: Filter to pass onto tcpdump file
+ capture_file: Optional capture file to look
+
+ Returns
+ List of packets matching the filter
+ """
+
+ capture_file = cap_pcap_file if capture_file is None else capture_file
+ command = 'tcpdump -tttt -n -r {} {}'.format(capture_file, tcpdump_filter)
+
+ process = subprocess.Popen(command,
+ universal_newlines=True,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ text = str(process.stdout.read()).rstrip()
+
+ if text:
+ return text.split("\n")
+
+ return []
+
+
+def add_summary(text):
+ global summary_text
+ summary_text = summary_text + " " + text if summary_text else text
+
+
+def get_dns_server_from_ip(ip_address):
+ """
+ Returns the IP address of the DNS server provided by DAQ
+
+ Args
+ ip_address: IP address of the device under test
+
+ Returns
+ IP address of DNS server
+ """
+
+ return re.sub(r'\.\d+$', DNS_SERVER_HOST, ip_address)
+
+
+def check_communication_for_response(response_line):
+ """
+ Given a line from the TCPdump output for DNS responses
+ Look through the packet capture to see if any communitication to the
+ IP addresses from the DNS
+
+ Args
+ tcpdump_line: Line from tcpdump filtered to DNS resposnes
+
+ Returns
+ True/False if the device has communicated with an IP from the
+ DNS response after it has recieved it
+ """
+
+ response_time = datetime.datetime.strptime(response_line[:26], TCPDUMP_DATE_FORMAT)
+
+ # Use regex to extract all IP addresses in the response
+ matches = re.findall(IP_REGEX, response_line)
+
+ # The first two IP addresses are the source/destination
+ ip_addresses = matches[2:]
+
+ for address in ip_addresses:
+ packets = exec_tcpdump('dst host {}'.format(address[0]))
+ for packet in packets:
+ packet_time = datetime.datetime.strptime(packet[:26], TCPDUMP_DATE_FORMAT)
+ if packet_time > response_time:
+ return True
+
+ return False
+
+
+def test_dns(target_ip):
+ """ Runs the connection.dns.hostname_connect test
+
+ Checks that:
+ i) the device sends DNS requests
+ ii) the device uses the DNS server from DHCP
+ iii) the device uses an IP address recieved from the DNS server
+
+ Args
+ target_ip: IP address of the device
+ """
+
+ # Get server IP of the DHCP server
+ dhcp_dns_ip = get_dns_server_from_ip(target_ip)
+
+ # Check if the device has sent any DNS requests
+ filter_to_dns = 'dst port 53 and src host {}'.format(target_ip)
+ to_dns = exec_tcpdump(filter_to_dns)
+ num_query_dns = len(to_dns)
+
+ if num_query_dns == 0:
+ add_summary('Device did not send any DNS requests')
+ return 'skip'
+
+ # Check if the device only sent DNS requests to the DHCP Server
+ filter_to_dhcp_dns = 'dst port 53 and src host {} and dst host {}' \
+ .format(target_ip, dhcp_dns_ip)
+
+ to_dhcp_dns = exec_tcpdump(filter_to_dhcp_dns)
+ num_query_dhcp_dns = len(to_dhcp_dns)
+
+ if num_query_dns > num_query_dhcp_dns:
+ add_summary('Device sent DNS requests to servers other than the DHCP provided server')
+ return 'fail'
+
+ # Retrieve responses from DNS
+ filter_dns_response = 'src port 53 and src host {}'.format(dhcp_dns_ip)
+ dns_responses = exec_tcpdump(filter_dns_response)
+
+ num_dns_responses = len(dns_responses)
+
+ if num_dns_responses == 0:
+ add_summary('No results recieved from DNS server')
+ return 'fail'
+
+ # Check that the device has sent data packets to any of the IP addresses it has recieved
+ # it has recieved from the DNS requests
+
+ for response in dns_responses:
+ if check_communication_for_response(response):
+ add_summary('Device sends DNS requests and resolves host names')
+ return 'pass'
+
+ add_summary('Device did not send data to IP addresses retrieved from the DNS server')
+ return 'fail'
+
+
+write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request))
+
+if test_request == 'connection.dns.hostname_connect':
+ write_report("{d}\n{b}".format(b=dash_break_line, d=DESCRIPTION_HOSTNAME_CONNECT))
+ result = test_dns(device_address)
+
+write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip()))
diff --git a/subset/connection/mac_oui/.classpath b/subset/network/mac_oui/.classpath
similarity index 100%
rename from subset/connection/mac_oui/.classpath
rename to subset/network/mac_oui/.classpath
diff --git a/subset/connection/mac_oui/.gitignore b/subset/network/mac_oui/.gitignore
similarity index 100%
rename from subset/connection/mac_oui/.gitignore
rename to subset/network/mac_oui/.gitignore
diff --git a/subset/connection/mac_oui/.settings/org.eclipse.buildship.core.prefs b/subset/network/mac_oui/.settings/org.eclipse.buildship.core.prefs
similarity index 100%
rename from subset/connection/mac_oui/.settings/org.eclipse.buildship.core.prefs
rename to subset/network/mac_oui/.settings/org.eclipse.buildship.core.prefs
diff --git a/subset/connection/mac_oui/build.gradle b/subset/network/mac_oui/build.gradle
similarity index 90%
rename from subset/connection/mac_oui/build.gradle
rename to subset/network/mac_oui/build.gradle
index 476315bb29..0f0dc95fe6 100644
--- a/subset/connection/mac_oui/build.gradle
+++ b/subset/network/mac_oui/build.gradle
@@ -3,7 +3,7 @@ buildscript {
jcenter()
}
dependencies {
- classpath "com.github.jengelman.gradle.plugins:shadow:5.2.0"
+ classpath "com.github.jengelman.gradle.plugins:shadow:6.0.0"
}
}
diff --git a/subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.jar b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.jar
similarity index 100%
rename from subset/connection/mac_oui/gradle/wrapper/gradle-wrapper.jar
rename to subset/network/mac_oui/gradle/wrapper/gradle-wrapper.jar
diff --git a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties
similarity index 92%
rename from subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties
rename to subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties
index 622ab64a3c..8d8e8abe86 100644
--- a/subset/network/NTPClient/gradle/wrapper/gradle-wrapper.properties
+++ b/subset/network/mac_oui/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-bin.zip
diff --git a/subset/security/security_passwords/gradlew b/subset/network/mac_oui/gradlew
similarity index 100%
rename from subset/security/security_passwords/gradlew
rename to subset/network/mac_oui/gradlew
diff --git a/subset/connection/mac_oui/gradlew.bat b/subset/network/mac_oui/gradlew.bat
similarity index 100%
rename from subset/connection/mac_oui/gradlew.bat
rename to subset/network/mac_oui/gradlew.bat
diff --git a/subset/connection/mac_oui/mac_oui.iml b/subset/network/mac_oui/mac_oui.iml
similarity index 100%
rename from subset/connection/mac_oui/mac_oui.iml
rename to subset/network/mac_oui/mac_oui.iml
diff --git a/subset/connection/mac_oui/settings.gradle b/subset/network/mac_oui/settings.gradle
similarity index 100%
rename from subset/connection/mac_oui/settings.gradle
rename to subset/network/mac_oui/settings.gradle
diff --git a/subset/connection/mac_oui/src/main/java/MacLookup.java b/subset/network/mac_oui/src/main/java/MacLookup.java
similarity index 100%
rename from subset/connection/mac_oui/src/main/java/MacLookup.java
rename to subset/network/mac_oui/src/main/java/MacLookup.java
diff --git a/subset/connection/mac_oui/src/main/java/Main.java b/subset/network/mac_oui/src/main/java/Main.java
similarity index 100%
rename from subset/connection/mac_oui/src/main/java/Main.java
rename to subset/network/mac_oui/src/main/java/Main.java
diff --git a/subset/connection/mac_oui/src/main/java/ReportHandler.java b/subset/network/mac_oui/src/main/java/ReportHandler.java
similarity index 91%
rename from subset/connection/mac_oui/src/main/java/ReportHandler.java
rename to subset/network/mac_oui/src/main/java/ReportHandler.java
index 3f85070b5c..6b691cbfd4 100644
--- a/subset/connection/mac_oui/src/main/java/ReportHandler.java
+++ b/subset/network/mac_oui/src/main/java/ReportHandler.java
@@ -5,7 +5,7 @@
public class ReportHandler {
String report = "Mac OUI Test\n";
- File reportFile = new File("report/report.txt");
+ File reportFile = new File("/report/macoui.txt");
public void addText(String text) {
report += text + '\n';
diff --git a/subset/connection/mac_oui/src/main/java/RetrieveList.java b/subset/network/mac_oui/src/main/java/RetrieveList.java
similarity index 100%
rename from subset/connection/mac_oui/src/main/java/RetrieveList.java
rename to subset/network/mac_oui/src/main/java/RetrieveList.java
diff --git a/subset/network/network_tests.py b/subset/network/network_tests.py
index 6dbaca3f85..8fafee8856 100644
--- a/subset/network/network_tests.py
+++ b/subset/network/network_tests.py
@@ -1,16 +1,26 @@
+"""
+ This script can be called to run a specific network module test.
+ Currently supports:
+ - connection.min_send
+ - connection.dhcp_long
+ - protocol.app_min_send
+ - communication.type.broadcast
+ - network.ntp.support
+ Usage: python network_tests.py
+ E.g. python network_tests.py connection.min_send $MONITOR $TARGET_IP
+"""
import subprocess, time, sys, json
+import re
+import datetime
+
arguments = sys.argv
test_request = str(arguments[1])
cap_pcap_file = str(arguments[2])
device_address = str(arguments[3])
-if test_request == 'protocol.app_min_send':
- module_config = str(arguments[4])
- infastructure_excludes = str(arguments[5])
-
-report_filename = 'report.txt'
+report_filename = 'network_tests.txt'
min_packet_length_bytes = 20
max_packets_in_report = 10
port_list = []
@@ -18,24 +28,28 @@
summary_text = ''
result = 'fail'
dash_break_line = '--------------------\n'
+
description_min_send = 'Device sends data at a frequency of less than 5 minutes.'
-description_dhcp_long = 'Device sends ARP request on DHCP lease expiry.'
-description_app_min_send = 'Device sends application packets at a frequency of less than 5 minutes.'
description_communication_type = 'Device sends unicast or broadcast packets.'
-description_ntp_support = 'Device sends NTP request packets.'
-tcpdump_display_all_packets = 'tcpdump -n src host ' + device_address + ' -r ' + cap_pcap_file
+tcpdump_display_all_packets = 'tcpdump -tttt -n src host ' + device_address + ' -r ' + cap_pcap_file
tcpdump_display_udp_bacnet_packets = 'tcpdump -n udp dst portrange 47808-47809 -r ' + cap_pcap_file
-tcpdump_display_arp_packets = 'tcpdump arp -r ' + cap_pcap_file
-tcpdump_display_ntp_packets = 'tcpdump dst port 123 -r ' + cap_pcap_file
-tcpdump_display_eapol_packets = 'tcpdump port 1812 or port 1813 or port 3799 -r ' + cap_pcap_file
+tcpdump_display_arp_packets = 'tcpdump arp -n src host ' + device_address + ' -r ' + cap_pcap_file
+
tcpdump_display_broadcast_packets = 'tcpdump broadcast and src host ' + device_address + ' -r ' + cap_pcap_file
+tcpdump_display_multicast_packets = 'tcpdump -n \'ip[16] & 240 = 224\' -r ' + cap_pcap_file
+
+system_conf_file = "/config/inst/system.conf"
+tcpdump_date_format = "%Y-%m-%d %H:%M:%S.%f"
+min_send_seconds = 300
+min_send_duration = "5 minutes"
def write_report(string_to_append):
print(string_to_append.strip())
with open(report_filename, 'a+') as file_open:
file_open.write(string_to_append)
+
def shell_command_with_result(command, wait_time, terminate_flag):
process = subprocess.Popen(command, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
text = process.stdout.read()
@@ -45,16 +59,19 @@ def shell_command_with_result(command, wait_time, terminate_flag):
process.terminate()
return str(text)
+
def add_packet_count_to_report(packet_type, packet_count):
- write_report("{i} {t} Packets recieved={p}\n".format(i=ignore, t=packet_type, p=packet_count))
+ write_report("{i} {t} packets received={p}\n".format(i=ignore, t=packet_type, p=packet_count))
+
def add_packet_info_to_report(packets_received):
- packet_list = packets_received.rstrip().split("\n")
+ packet_list = packets_received.strip().split("\n")
outnum = min(len(packet_list), max_packets_in_report)
for x in range(0, outnum):
write_report("{i} {p}\n".format(i=ignore, p=packet_list[x]))
write_report("{i} packets_count={p}\n".format(i=ignore, p=len(packet_list)))
+
def decode_shell_result(shell_result):
if len(shell_result) > min_packet_length_bytes:
packet_request_list = shell_result.rstrip().split("\n")
@@ -62,135 +79,143 @@ def decode_shell_result(shell_result):
return packets_received
return 0
+
def packets_received_count(shell_result):
if shell_result is None:
return 0
else:
return decode_shell_result(shell_result)
-def load_json_config(json_filename):
- with open(json_filename, 'r') as json_file:
- return json.load(json_file)
-
-def add_to_port_list(port_map):
- global port_list
- for port, port_info in port_map.items():
- for key, value in port_info.items():
- if key == 'allowed':
- if value == True:
- port_list.append(port)
-
-def remove_from_port_list(port_map):
- global port_list
- for exclude in port_map:
- for port in port_list:
- if port == exclude:
- port_list.remove(exclude)
-
-def decode_json_config(config_file, map_name, action):
- dictionary = load_json_config(config_file)
- for key, value in dictionary.items():
- if key == map_name:
- for protocol, info in value.items():
- if protocol == 'udp' or protocol == 'tcp':
- for ports, port_map in info.items():
- if action == 'add':
- add_to_port_list(port_map)
- elif action == 'remove':
- remove_from_port_list(port_map)
+
+def get_scan_length(config_file):
+ """ Gets length of the monitor.pcap scan
+
+ Reads the system.conf file to and returns the length of the monitor_scan
+
+ Args:
+ config_file: Location of system.conf file within test container
+
+ Returns:
+ Length of monitor scan in seconds
+
+ If not defined, or system.conf could not be found
+ returns false
+ """
+
+ scan_length = False
+ try:
+ with open(config_file) as file:
+ for line in file:
+ match = re.search(r'^monitor_scan_sec=(\d+)', line)
+ if match:
+ matched_length = int(match.group(1))
+ # If scan length = 0 or not found, then monitor scan does not exist
+ scan_length = matched_length if matched_length > 0 else False
+ return scan_length
+ except Exception as e:
+ write_report("Error encountered reading system.conf {}".format(e))
+ return False
+
+
+def add_summary(text):
+ global summary_text
+ summary_text = summary_text + " " + text if summary_text else text
+
def test_connection_min_send():
+ """ Runs the connection.min_send test
+
+ Tests if the device sends data packets of any type (inc data, NTP, etc)
+ within a period of 5 minutes by looking through the monitor.pcap file
+
+ The length of test can be configured using the min_send_seconds variable
+ at the start of the file
+ """
+
+ # Get scan length
+ scan_length = get_scan_length(system_conf_file)
+ min_send_delta = datetime.timedelta(seconds=min_send_seconds)
+ min_send_pass = False
+
+ # The test scans the monitor.pcap, so if it's not found skip
+ if not scan_length:
+ add_summary("DAQ monitor scan not running, test skipped")
+ return 'skip'
+
arp_shell_result = shell_command_with_result(tcpdump_display_arp_packets, 0, False)
arp_packets_received = packets_received_count(arp_shell_result)
if arp_packets_received > 0:
add_summary("ARP packets received.")
+
shell_result = shell_command_with_result(tcpdump_display_all_packets, 0, False)
- all_packets_received = packets_received_count(shell_result)
- app_packets_received = all_packets_received - arp_packets_received
- if app_packets_received > 0:
- add_summary("Other packets received.")
- print('min_send_packets', arp_packets_received, all_packets_received)
+ all_packets = shell_result.splitlines()
+
+ # Loop through tcpdump result and measure the time between succesive packets
+ for i, packet in enumerate(all_packets):
+ # datetime is the first 26 characters of the line
+ packet_time = datetime.datetime.strptime(packet[:26], tcpdump_date_format)
+
+ if i == 0:
+ previous_packet_time = packet_time
+ continue
+
+ delta = packet_time - previous_packet_time
+ if delta < min_send_delta:
+ min_send_pass = True
+ break
+
+ previous_packet_time = packet_time
+
add_packet_info_to_report(shell_result)
- return 'pass' if app_packets_received > 0 else 'fail'
-def test_connection_dhcp_long():
- shell_result = shell_command_with_result(tcpdump_display_arp_packets, 0, False)
- arp_packets_received = packets_received_count(shell_result)
- if arp_packets_received > 0:
- add_summary("ARP packets received.")
- add_packet_info_to_report(shell_result)
- return 'pass'
- else:
- return 'fail'
+ if not min_send_pass:
+ if scan_length > min_send_seconds:
+ add_summary('Data packets were not sent at a frequency less than ' +
+ min_send_duration)
+ return 'fail'
+ else:
+ add_summary('Please set DAQ monitor scan to be greater than ' +
+ min_send_duration)
+ return 'skip'
+
+ add_summary('Data packets were sent at a frequency of less than ' +
+ min_send_duration)
+ return 'pass'
-def test_protocol_app_min_send():
- """
- reads module_config json file and adds ports to port_list
- read infastructure_excludes json file and removes ports from port_list (temporarily commented)
- """
- decode_json_config(module_config, 'servers', 'add')
- print('port_list:')
- app_packets_received = 0
- for port in port_list:
- try:
- tcpdump_command = 'tcpdump port {p} -r {c}'.format(p=port, c=cap_pcap_file)
- shell_result = shell_command_with_result(tcpdump_command, 2, False)
- for_port = packets_received_count(shell_result)
- app_packets_received += for_port
- print('app_packets_received', port, for_port)
- add_packet_info_to_report(shell_result)
- except Exception as e:
- print(e)
- print('app_packets_received', app_packets_received)
- if app_packets_received > 0:
- add_summary("Application packets received.")
- return 'pass'
- else:
- return 'fail'
def test_communication_type_broadcast():
- shell_result = shell_command_with_result(tcpdump_display_broadcast_packets, 0, False)
- broadcast_packets_received = packets_received_count(shell_result)
- if broadcast_packets_received > 0:
+ """ Runs the communication.type.broadcast DAQ test.
+ Counts the number of unicast, broadcast and multicast packets sent.
+ """
+
+ broadcast_result = shell_command_with_result(tcpdump_display_broadcast_packets, 0, False)
+ broadcast_packets = packets_received_count(broadcast_result)
+ if broadcast_packets > 0:
add_summary("Broadcast packets received.")
- add_packet_count_to_report("Broadcast", broadcast_packets_received)
- shell_result = shell_command_with_result(tcpdump_display_all_packets, 0, False)
- all_packets_received = packets_received_count(shell_result)
- if (all_packets_received - broadcast_packets_received) > 0:
+ add_packet_count_to_report("Broadcast", broadcast_packets)
+
+ multicast_result = shell_command_with_result(tcpdump_display_multicast_packets, 0, False)
+ multicast_packets = packets_received_count(multicast_result)
+ if multicast_packets > 0:
+ add_summary("Multicast packets received.")
+ add_packet_count_to_report("Multicast", multicast_packets)
+
+ unicast_result = shell_command_with_result(tcpdump_display_all_packets, 0, False)
+ unicast_packets = packets_received_count(unicast_result) - broadcast_packets - multicast_packets
+ if unicast_packets > 0:
add_summary("Unicast packets received.")
- add_packet_count_to_report("Unicast", all_packets_received - broadcast_packets_received)
- return 'info'
+ add_packet_count_to_report("Unicast", unicast_packets)
-def test_ntp_support():
- shell_result = shell_command_with_result(tcpdump_display_ntp_packets, 0, False)
- ntp_packets_received = packets_received_count(shell_result)
- if ntp_packets_received > 0:
- add_summary("NTP packets received.")
- add_packet_info_to_report(shell_result)
- return 'pass'
- else:
- return 'fail'
+ return 'info'
-def add_summary(text):
- global summary_text
- summary_text = summary_text + " " + text if summary_text else text
write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request))
if test_request == 'connection.min_send':
write_report("{d}\n{b}".format(b=dash_break_line, d=description_min_send))
result = test_connection_min_send()
-elif test_request == 'connection.dhcp_long':
- write_report("{d}\n{b}".format(b=dash_break_line, d=description_dhcp_long))
- result = test_connection_dhcp_long()
-elif test_request == 'protocol.app_min_send':
- write_report("{d}\n{b}".format(b=dash_break_line, d=description_app_min_send))
- result = test_protocol_app_min_send()
elif test_request == 'communication.type.broadcast':
write_report("{d}\n{b}".format(b=dash_break_line, d=description_communication_type))
result = test_communication_type_broadcast()
-elif test_request == 'network.ntp.support':
- write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_support))
- result = test_ntp_support()
write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip()))
diff --git a/subset/network/ntp_tests.py b/subset/network/ntp_tests.py
new file mode 100644
index 0000000000..6d0eb6b13b
--- /dev/null
+++ b/subset/network/ntp_tests.py
@@ -0,0 +1,170 @@
+from __future__ import absolute_import, division
+from scapy.all import NTP, rdpcap
+import sys
+import os
+
+arguments = sys.argv
+
+test_request = str(arguments[1])
+startup_pcap_file = str(arguments[2])
+monitor_pcap_file = str(arguments[3])
+
+report_filename = 'ntp_tests.txt'
+ignore = '%%'
+summary_text = ''
+result = 'fail'
+dash_break_line = '--------------------\n'
+description_ntp_support = 'Device supports NTP version 4.'
+description_ntp_update = 'Device synchronizes its time to the NTP server.'
+
+NTP_VERSION_PASS = 4
+LOCAL_PREFIX = '10.20.'
+NTP_SERVER_SUFFIX = '.2'
+MODE_CLIENT = 3
+MODE_SERVER = 4
+YEAR_2500 = 16725225600
+SECONDS_BETWEEN_1900_1970 = 2208988800
+OFFSET_ALLOWANCE = 0.128
+LEAP_ALARM = 3
+
+
+def write_report(string_to_append):
+ with open(report_filename, 'a+') as file_open:
+ file_open.write(string_to_append)
+
+
+# Extracts the NTP version from the first client NTP packet
+def ntp_client_version(capture):
+ client_packets = ntp_packets(capture, MODE_CLIENT)
+ if len(client_packets) == 0:
+ return None
+ return ntp_payload(client_packets[0]).version
+
+
+# Filters the packets by type (NTP)
+def ntp_packets(capture, mode=None):
+ packets = []
+ for packet in capture:
+ if NTP in packet:
+ ip = packet.payload
+ udp = ip.payload
+ ntp = udp.payload
+ if mode is None or mode == ntp.mode:
+ packets.append(packet)
+ return packets
+
+
+# Extracts the NTP payload from a packet of type NTP
+def ntp_payload(packet):
+ ip = packet.payload
+ udp = ip.payload
+ ntp = udp.payload
+ return ntp
+
+
+def test_ntp_support():
+ capture = rdpcap(startup_pcap_file)
+ if len(capture) > 0:
+ version = ntp_client_version(capture)
+ if version is None:
+ add_summary("No NTP packets received.")
+ return 'skip'
+ if version == NTP_VERSION_PASS:
+ add_summary("Using NTPv" + str(NTP_VERSION_PASS) + ".")
+ return 'pass'
+ else:
+ add_summary("Not using NTPv" + str(NTP_VERSION_PASS) + ".")
+ return 'fail'
+ else:
+ add_summary("No NTP packets received.")
+ return 'skip'
+
+
+def test_ntp_update():
+ startup_capture = rdpcap(startup_pcap_file)
+ packets = ntp_packets(startup_capture)
+ if os.path.isfile(monitor_pcap_file):
+ monitor_capture = rdpcap(monitor_pcap_file)
+ packets += ntp_packets(monitor_capture)
+ if len(packets) < 2:
+ add_summary("Not enough NTP packets received.")
+ return 'skip'
+ # Check that DAQ NTP server has been used
+ using_local_server = False
+ local_ntp_packets = []
+ for packet in packets:
+ # Packet is to or from local NTP server
+ if ((packet.payload.dst.startswith(LOCAL_PREFIX) and
+ packet.payload.dst.endswith(NTP_SERVER_SUFFIX)) or
+ (packet.payload.src.startswith(LOCAL_PREFIX) and
+ packet.payload.src.endswith(NTP_SERVER_SUFFIX))):
+ using_local_server = True
+ local_ntp_packets.append(packet)
+ if not using_local_server or len(local_ntp_packets) < 2:
+ add_summary("Device clock not synchronized with local NTP server.")
+ return 'fail'
+ # Obtain the latest NTP poll
+ p1 = p2 = p3 = p4 = None
+ for i in range(len(local_ntp_packets)):
+ if p1 is None:
+ if ntp_payload(local_ntp_packets[i]).mode == MODE_CLIENT:
+ p1 = local_ntp_packets[i]
+ elif p2 is None:
+ if ntp_payload(local_ntp_packets[i]).mode == MODE_SERVER:
+ p2 = local_ntp_packets[i]
+ else:
+ p1 = local_ntp_packets[i]
+ elif p3 is None:
+ if ntp_payload(local_ntp_packets[i]).mode == MODE_CLIENT:
+ p3 = local_ntp_packets[i]
+ elif p4 is None:
+ if ntp_payload(local_ntp_packets[i]).mode == MODE_SERVER:
+ p4 = local_ntp_packets[i]
+ p1 = p3
+ p2 = p4
+ p3 = p4 = None
+ else:
+ p3 = local_ntp_packets[i]
+ if p1 is None or p2 is None:
+ add_summary("Device clock not synchronized with local NTP server.")
+ return 'fail'
+ t1 = ntp_payload(p1).sent
+ t2 = ntp_payload(p1).time
+ t3 = ntp_payload(p2).sent
+ t4 = ntp_payload(p2).time
+
+ # Timestamps are inconsistenly either from 1900 or 1970
+ if t1 > YEAR_2500:
+ t1 = t1 - SECONDS_BETWEEN_1900_1970
+ if t2 > YEAR_2500:
+ t2 = t2 - SECONDS_BETWEEN_1900_1970
+ if t3 > YEAR_2500:
+ t3 = t3 - SECONDS_BETWEEN_1900_1970
+ if t4 > YEAR_2500:
+ t4 = t4 - SECONDS_BETWEEN_1900_1970
+
+ offset = abs((t2 - t1) + (t3 - t4))/2
+ if offset < OFFSET_ALLOWANCE and not ntp_payload(p1).leap == LEAP_ALARM:
+ add_summary("Device clock synchronized.")
+ return 'pass'
+ else:
+ add_summary("Device clock not synchronized with local NTP server.")
+ return 'fail'
+
+
+def add_summary(text):
+ global summary_text
+ summary_text = summary_text + " " + text if summary_text else text
+
+
+write_report("{b}{t}\n{b}".format(b=dash_break_line, t=test_request))
+
+
+if test_request == 'connection.network.ntp_support':
+ write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_support))
+ result = test_ntp_support()
+elif test_request == 'connection.network.ntp_update':
+ write_report("{d}\n{b}".format(b=dash_break_line, d=description_ntp_update))
+ result = test_ntp_update()
+
+write_report("RESULT {r} {t} {s}\n".format(r=result, t=test_request, s=summary_text.strip()))
diff --git a/subset/connection/test_macoui b/subset/network/run_macoui_test
similarity index 89%
rename from subset/connection/test_macoui
rename to subset/network/run_macoui_test
index 8abab5c5ce..fba57c38c6 100755
--- a/subset/connection/test_macoui
+++ b/subset/network/run_macoui_test
@@ -1,8 +1,10 @@
#!/bin/bash -e
source reporting.sh
-REPORT=/tmp/report.txt
-LOCAL_REPORT=report/report.txt
+TARGET_MAC=$1
+REPORT=$2
+
+LOCAL_REPORT=/report/macoui.txt
CONFIG=/config/device/module_config.json
LOG=/tmp/nmap.log
RESULT_LINES=/tmp/result_lines.txt
@@ -18,8 +20,6 @@ java -jar mac_oui/build/libs/mac_oui-all.jar $TARGET_MAC
RESULT_AND_SUMMARY="$(grep "RESULT" $LOCAL_REPORT)"
grep -v "RESULT" $LOCAL_REPORT >> $REDACTED_LOG
-# For testing module timeout.
-sleep 10
TEST_RESULT=$(cat $REDACTED_LOG)
diff --git a/subset/network/test_network b/subset/network/test_network
index 9e25ef0add..15b642f456 100755
--- a/subset/network/test_network
+++ b/subset/network/test_network
@@ -2,14 +2,25 @@
REPORT=/tmp/report.txt
+STARTUP=/scans/startup.pcap
MONITOR=/scans/monitor.pcap
-MODULE_CONFIG=/config/device/module_config.json
-EXCLUDES=infastructure_excludes.json
-python network_tests.py connection.dhcp_long $MONITOR $TARGET_IP
+# General Network Tests
python network_tests.py connection.min_send $MONITOR $TARGET_IP
python network_tests.py communication.type.broadcast $MONITOR $TARGET_IP
-python network_tests.py protocol.app_min_send $MONITOR $TARGET_IP $MODULE_CONFIG $EXCLUDES
-python network_tests.py network.ntp.support $MONITOR $TARGET_IP
-cat report.txt >> $REPORT
+cat network_tests.txt >> $REPORT
+
+# NTP Tests
+python ntp_tests.py connection.network.ntp_support $STARTUP $MONITOR
+python ntp_tests.py connection.network.ntp_update $STARTUP $MONITOR
+
+cat ntp_tests.txt >> $REPORT
+
+# MACOUI Test
+./run_macoui_test $TARGET_MAC $REPORT
+
+# DNS Tests
+python dns_tests.py connection.dns.hostname_connect $MONITOR $TARGET_IP
+
+cat dns_tests.txt >> $REPORT
diff --git a/subset/pentests/readme.md b/subset/pentests/readme.md
index 83b4c8ae32..845555fe11 100644
--- a/subset/pentests/readme.md
+++ b/subset/pentests/readme.md
@@ -33,12 +33,20 @@ Tests included in this module:
## test_nmap
The nmap module uses the nmap tool to check open ports and validates them in relation to the policy that is set in the module_config.json file or files.
+It also checks that there isn't any HTTP server running on any open port
Tests included in this module:
- security.nmap.ports
+- security.nmap.http
### Conditions for security.nmap.ports
- pass -> all the ports configured in module_config.json agree with the allow/deny policy
- fail -> one or more of the ports configured in module_config.json do not agree with the allow/deny policy
+
+### Conditions for security.nmap.http
+
+- pass -> there is no HTTP server running on any of the open ports
+- fail -> one or more of the ports is running an HTTP server
+
diff --git a/subset/pentests/test_nmap b/subset/pentests/test_nmap
index 5fec9ac7ac..06159d0b21 100755
--- a/subset/pentests/test_nmap
+++ b/subset/pentests/test_nmap
@@ -2,18 +2,31 @@
source reporting.sh
CONFIG=/config/device/module_config.json
-
REPORT=/tmp/report.txt
-LOG=/tmp/nmap.log
-OPENPORTSLIST_LOG=/tmp/nmap.ports.log
-REDACTED_LOG=/tmp/nmap.report.log
-TEST_NAME="security.ports.nmap"
-TEST_DESCRIPTION="Automatic TCP/UDP port scan using nmap"
-SUMMARY=""
+# security.nmap.ports test variables
+REPORT_NMAP=/tmp/report_nmap.txt
+LOG_NMAP=/tmp/nmap.log
+OPENPORTSLIST_LOG_NMAP=/tmp/nmap.ports.log
+REDACTED_LOG_NMAP=/tmp/nmap.report.log
+TEST_NAME_NMAP="security.nmap.ports"
+TEST_DESCRIPTION_NMAP="Automatic TCP/UDP port scan using nmap"
+SUMMARY_NMAP=""
+
+# security.nmap.http test variables
+REPORT_HTTP=/tmp/report_http.txt
+LOG_HTTP=/tmp/http.log
+OPENPORTSLIST_LOG_HTTP=/tmp/http.ports.log
+REDACTED_LOG_HTTP=/tmp/http.report.log
+TEST_NAME_HTTP="security.nmap.http"
+TEST_DESCRIPTION_HTTP="Check that the device does not have open ports exposing an unencrypted web interface using HTTP"
+SUMMARY_HTTP=""
-rm -f $LOG $REDACTED_LOG $OPENPORTSLIST_LOG $REPORT
+# remove temporary files
+rm -f $LOG_NMAP $REDACTED_LOG_NMAP $OPENPORTSLIST_LOG_NMAP $REPORT_NMAP
+rm -f $LOG_HTTP $REDACTED_LOG_HTTP $OPENPORTSLIST_LOG_HTTP $REPORT_HTTP
+# configure and run the security.nmap.ports test
if [ -f $CONFIG ]; then
echo Extracting servers config from $CONFIG
else
@@ -31,7 +44,7 @@ nc -nzv $TARGET_IP -w 5 23 || true
sleep 1
option="-sT"
-portslist=-p1-1024
+portslist=-p1-65535
if [ -f $CONFIG ]; then
# get list of ports to be scanned from module_config.json
ports="U:"
@@ -50,21 +63,20 @@ if [ -f $CONFIG ]; then
if [ $portslist != "U:T:" ]; then
portslist="-p$portslist"
else
- portslist=-p1-1024
+ portslist=-p1-65535
fi
fi
echo -e "\nTesting target $TARGET_IP to check open ports $portslist"
-nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG $TARGET_IP > /dev/null
-cat $LOG
+nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG_NMAP $TARGET_IP > /dev/null
+cat $LOG_NMAP
-touch $REDACTED_LOG
-cat $LOG | tee -a $REDACTED_LOG
-touch $REPORT
-touch $OPENPORTSLIST_LOG
+touch $REDACTED_LOG_NMAP
+cat $LOG_NMAP | tee -a $REDACTED_LOG_NMAP
+touch $REPORT_NMAP
+touch $OPENPORTSLIST_LOG_NMAP
rm -f .fail
-openportslist=" - "
-grep -oh '[0-9]*/open[^[:space:]]*' $LOG | while IFS=/ read -ra parts; do
+grep -oh '[0-9]*/open[^[:space:]]*' $LOG_NMAP | while IFS=/ read -ra parts; do
state=${parts[1]}
if [ "$state" == open ]; then
if [ -f $CONFIG ]; then
@@ -73,14 +85,14 @@ grep -oh '[0-9]*/open[^[:space:]]*' $LOG | while IFS=/ read -ra parts; do
allowed=$(jq ".servers.$proto.ports.\"$port\".allowed" $CONFIG)
if [ "$allowed" != true ]; then
touch .fail
- echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG
- echo ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG
+ echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP
+ echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_NMAP
else
- echo Allowing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG
+ echo Allowing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP
fi
else
- echo Open port ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG
- echo ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG
+ echo Open port ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_NMAP
+ echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_NMAP
touch .fail
fi
fi
@@ -88,19 +100,67 @@ done
if [ -f .fail ]; then
echo Open ports:
- cat $REDACTED_LOG
+ cat $REDACTED_LOG_NMAP
+ result=fail
+ SUMMARY_NMAP="Some disallowed ports are open: `cat $OPENPORTSLIST_LOG_NMAP | sed 's/,$//'`."
+else
+ echo No invalid ports found. | tee -a $REDACTED_LOG_NMAP
+ result=pass
+ SUMMARY_NMAP="Only allowed ports found open."
+fi
+
+RESULT_AND_SUMMARY_NMAP="RESULT $result $TEST_NAME_NMAP $SUMMARY_NMAP"
+
+# configure and run the security.nmap.http test
+option="-A --script http-methods" # Full NSE scan with the http-methods script
+portslist=-p- # scan all ports
+echo -e "\nTesting target $TARGET_IP to check open ports exposing HTTP servers"
+nmap -v -n -T5 $option --host-timeout=4m --open $portslist -oG $LOG_HTTP $TARGET_IP > /dev/null
+cat $LOG_HTTP
+
+touch $REDACTED_LOG_HTTP
+cat $LOG_HTTP | tee -a $REDACTED_LOG_HTTP
+touch $REPORT_HTTP
+touch $OPENPORTSLIST_LOG_HTTP
+rm -f .fail
+grep -oh '[0-9]*/open[^[:space:]]*' $LOG_HTTP | while IFS=/ read -ra parts; do
+ state=${parts[1]}
+ if [ "$state" == open ]; then
+ port=${parts[0]}
+ proto=${parts[4]}
+ echo $proto
+ if [ "$proto" == http ]; then
+ touch .fail
+ echo Failing ${parts[*]} | sed 's/,$//' | tee -a $REDACTED_LOG_HTTP
+ echo -n ${parts[0]}"," | tee -a $OPENPORTSLIST_LOG_HTTP
+ fi
+ fi
+done
+
+if [ -f .fail ]; then
+ echo Open http ports:
+ cat $REDACTED_LOG_HTTP
result=fail
- SUMMARY="Some disallowed ports are open: `cat $OPENPORTSLIST_LOG | sed 's/,$//'`"
+ SUMMARY_HTTP="Some ports are running http servers: `cat $OPENPORTSLIST_LOG_HTTP | sed 's/,$//'`."
else
- echo No invalid ports found. | tee -a $REDACTED_LOG
+ echo No running http servers have been found. | tee -a $REDACTED_LOG_HTTP
result=pass
- SUMMARY="Only allowed ports found open."
+ SUMMARY_HTTP="No running http servers have been found."
fi
-RESULT_AND_SUMMARY="RESULT $result $TEST_NAME $SUMMARY"
+RESULT_AND_SUMMARY_HTTP="RESULT $result $TEST_NAME_HTTP $SUMMARY_HTTP"
+
+# output test results to the report file
+write_out_result $REPORT_NMAP \
+ "$TEST_NAME_NMAP" \
+ "$TEST_DESCRIPTION_NMAP" \
+ "$(cat $REDACTED_LOG_NMAP)" \
+ "$RESULT_AND_SUMMARY_NMAP"
+
+write_out_result $REPORT_HTTP \
+ "$TEST_NAME_HTTP" \
+ "$TEST_DESCRIPTION_HTTP" \
+ "$(cat $REDACTED_LOG_HTTP)" \
+ "$RESULT_AND_SUMMARY_HTTP"
-write_out_result $REPORT \
- "$TEST_NAME" \
- "$TEST_DESCRIPTION" \
- "$(cat $REDACTED_LOG)" \
- "$RESULT_AND_SUMMARY"
+cat $REPORT_NMAP $REPORT_HTTP > $REPORT
\ No newline at end of file
diff --git a/subset/security/Dockerfile.test_password b/subset/security/Dockerfile.test_password
index cbe38728b0..fb4f7481f1 100644
--- a/subset/security/Dockerfile.test_password
+++ b/subset/security/Dockerfile.test_password
@@ -1,11 +1,9 @@
FROM daqf/aardvark:latest
-RUN $AG update && $AG install openjdk-8-jre
-RUN $AG update && $AG install openjdk-8-jdk git
-RUN $AG update && $AG install curl
-RUN $AG update && $AG install ncrack hydra nmap
+# Get dependencies
+RUN $AG update && $AG install curl ncrack medusa nmap git
-COPY subset/security .
-RUN cd security_passwords && ./gradlew shadowJar
-RUN ls -l security_passwords/build/libs/security_passwords-1.0-SNAPSHOT-all.jar
+COPY subset/security/password .
+
+# Run the test
CMD ["./test_password"]
diff --git a/subset/security/Dockerfile.test_ssh b/subset/security/Dockerfile.test_ssh
new file mode 100644
index 0000000000..aa701b5550
--- /dev/null
+++ b/subset/security/Dockerfile.test_ssh
@@ -0,0 +1,7 @@
+FROM daqf/aardvark:latest
+
+RUN $AG update && $AG install nmap
+
+COPY subset/security/test_ssh .
+
+CMD ./test_ssh
diff --git a/subset/security/build.conf b/subset/security/build.conf
index 763d155e46..26876f4343 100644
--- a/subset/security/build.conf
+++ b/subset/security/build.conf
@@ -1,3 +1,4 @@
build subset/security
add tls
add password
+add ssh
diff --git a/subset/security/password/create_brute_force_dictionaries b/subset/security/password/create_brute_force_dictionaries
new file mode 100755
index 0000000000..14345dc17e
--- /dev/null
+++ b/subset/security/password/create_brute_force_dictionaries
@@ -0,0 +1,148 @@
+#!/bin/bash
+
+# A script to retrieve, and collate several raw brute force dictionaries into a colon separated file with format :username:password, and also to two separate username and password lists.
+#
+# Make sure this script is run in the password directory.
+#
+# Usage ./create_brute_force_dictionaries
+
+RAW_DICTIONARY_DIR="resources/raw"
+DEFAULT_DICTIONARY_DIR="resources/default"
+
+MANUFACTURER_DEFAULTS_DICTIONARY="$RAW_DICTIONARY_DIR/manufacturer.csv"
+SSH_DICTIONARY="$RAW_DICTIONARY_DIR/ssh.txt"
+TELNET_DICTIONARY="$RAW_DICTIONARY_DIR/telnet.txt"
+
+OUTPUT_DICTIONARY="$DEFAULT_DICTIONARY_DIR/dictionary.txt"
+TMP_DICTIONARY="$DEFAULT_DICTIONARY_DIR/tmp_dictionary.txt"
+USERNAMES_LIST="$DEFAULT_DICTIONARY_DIR/usernames.txt"
+PASSWORDS_LIST="$DEFAULT_DICTIONARY_DIR/passwords.txt"
+
+# Retrieve the raw dictionary files from the gitlab source.
+# $1 Manufacturer dictionary
+# $2 SSH dictionary
+# $3 Telnet dictionary
+# $4 Raw directory
+function retrieve_raw_dictionaries() {
+ mkdir -p $4
+
+ curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/default-passwords.csv" \
+ --create-dirs --output $1
+ curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/ssh-betterdefaultpasslist.txt" \
+ --create-dirs --output $2
+ curl "https://gitlab.com/kalilinux/packages/seclists/-/raw/094459e5d757faccfcb44375a2e4c9602d5984d4/Passwords/Default-Credentials/telnet-betterdefaultpasslist.txt" \
+ --create-dirs --output $3
+}
+
+# Create the temporary and output dictionaries, and remove existing ones.
+# $1 Temporary dictionary file
+# $2 Output dictionary file
+# $3 Default dictionary directory
+function create_dictionary() {
+ mkdir -p $3
+
+ if [ -f $1 ]; then
+ rm $1
+ fi
+
+ if [ -f $2 ]; then
+ rm $2
+ fi
+
+ touch $1
+ touch $2
+}
+
+# Helps avoid certain special characters which cause grep to fail
+# $1 Line string in file
+function line_is_invalid_csv() {
+ echo $1 | grep -sqE "^\".*\"|\s|,,"
+}
+
+# Helps avoid certain special usernames/passwords which cause grep to fail.
+# $1 Username or password string
+function username_or_password_is_invalid() {
+ echo $1 | grep -sqE "|"
+}
+
+# Append the username and password pair into the output dictionary with colon separation.
+# $1 Username
+# $2 Password
+# $3 Output dictionary
+function add_colon_pair_to_output_dictionary() {
+ if ! username_or_password_is_invalid $1; then
+ if ! username_or_password_is_invalid $2; then
+ echo "$1:$2" >> $3
+ fi
+ fi
+}
+
+# Convert lines in the manufacturer csv file into colon separated username:password pairs, then append them to file.
+# $1 Raw dictionary
+# $2 Output dictionary
+function append_manufacturer_csv_to_output_dictionary() {
+ while read LINE
+ do
+ if ! line_is_invalid_csv "$LINE"; then
+ IFS=',' read -ra CREDENTIAL_ARRAY <<< "$LINE"
+ USERNAME="${CREDENTIAL_ARRAY[1]}"
+ PASSWORD="${CREDENTIAL_ARRAY[2]}"
+ add_colon_pair_to_output_dictionary $USERNAME $PASSWORD $2
+ fi
+ done < $1
+}
+
+# Add colon separated txt file into the output dictionary.
+# $1 Output dictionary file
+# $2 Colon separated file
+function append_colon_separated_file_to_output_dictionary() {
+ cat $2 >> $1
+}
+
+# Runs a few cleanup commands which do the following:
+# - Remove trailing whitespace
+# - Sort and remove duplicates
+# - Finally, add a colon at the start of each line to make it suitable for use in medusa
+#
+# $1 Temporary dictionary file
+# $2 Output dictionary file
+function clean_output_dictionary() {
+ cat $1 | sed -E 's/\s+$//' | sort -u | sed -E 's/^/:/' > $2
+}
+
+# Removes the necessary bits from the full dictionary to create separate files for usernames and passwords.
+# $1 Output dictionary
+# $2 Usernames file
+# $3 Passwords file
+function create_username_and_password_list_from_dictionary() {
+ cat $1 | sed -E 's/^://' | sed -E 's/:.*$//' > $2
+ cat $1 | sed -E 's/^.*://' > $3
+}
+
+# Main function:
+
+echo Creating credential files...
+create_dictionary $TMP_DICTIONARY $OUTPUT_DICTIONARY $DEFAULT_DICTIONARY_DIR
+
+echo Retrieving raw dictionaries from sources
+retrieve_raw_dictionaries $MANUFACTURER_DEFAULTS_DICTIONARY $SSH_DICTIONARY $TELNET_DICTIONARY $RAW_DICTIONARY_DIR
+
+echo Parsing CSV file...
+append_manufacturer_csv_to_output_dictionary $MANUFACTURER_DEFAULTS_DICTIONARY $TMP_DICTIONARY
+
+echo Parsing SSH passwords file...
+append_colon_separated_file_to_output_dictionary $TMP_DICTIONARY $SSH_DICTIONARY
+
+echo Parsing telnet passwords file...
+append_colon_separated_file_to_output_dictionary $TMP_DICTIONARY $TELNET_DICTIONARY
+
+echo Cleaning up output dictionary...
+clean_output_dictionary $TMP_DICTIONARY $OUTPUT_DICTIONARY
+
+echo Creating extra dictionaries...
+create_username_and_password_list_from_dictionary $OUTPUT_DICTIONARY $USERNAMES_LIST $PASSWORDS_LIST
+
+echo Removing temporary dictionary...
+rm $TMP_DICTIONARY
+
+echo Done!
diff --git a/subset/security/password/resources/default/dictionary.txt b/subset/security/password/resources/default/dictionary.txt
new file mode 100644
index 0000000000..681fba73a6
--- /dev/null
+++ b/subset/security/password/resources/default/dictionary.txt
@@ -0,0 +1,1271 @@
+:11111111:11111111
+:11111:x-admin
+:123:234
+:1234:1234
+:22222222:22222222
+:**23646:23646
+:**266344:266344
+:266344:266344
+:2800:2800
+:31994:31994
+:666666:666666
+:7654321:7654321
+:880175445:11223344
+:888888:888888
+:acer:acer
+:acitoolkit:acitoolkit
+:Adam:29111991
+:ADAMS:WOOD
+:ADLDEMO:ADLDEMO
+:adm:adm
+:admin:0
+:admin:000000
+:admin:1111
+:admin:1111111
+:admin:123
+:admin:123123
+:admin:1234
+:admin:12345
+:admin:123456
+:Admin:123456
+:admin:1234admin
+:Admin:123qwe
+:admin:1988
+:Admin:1988
+:Admin1:Admin1
+:admin1:password
+:admin:2222
+:admin:22222
+:admin2:changeme
+:admin:4321
+:Admin:5001
+:admin:abc123
+:admin:access
+:admin:admin
+:admin:!admin
+:Admin:admin
+:Admin:Admin
+:ADMIN:admin
+:ADMIN:ADMIN
+:admin:admin000
+:Admin:admin1
+:admin:admin123
+:admin:admin1234
+:admin:adminadmin
+:admin:adslolitec
+:admin:adslroot
+:admin:AitbISP4eCiG
+:admin:allot
+:admin:alphaadmin
+:ADMIN:alphacom
+:admin:AlpheusDigital1010
+:admin:amigosw1
+:admin:asante
+:admin:Ascend
+:admin:asd
+:Admin:atc456
+:admin:atlantis
+:admin:avocent
+:admin:axis2
+:admin:barney
+:admin:barricade
+:Admin:Barricade
+:admin:bintec
+:admin:broadband
+:admin:brocade1
+:admin:cat1029
+:admin:changeit
+:admin:changeme
+:admin:cisco
+:admin:comcomcom
+:admin:conexant
+:admin:default
+:admin:demo
+:admin:detmond
+:admin:diamond
+:admin:dmr99
+:admin:draadloos
+:Admin:Emerson1
+:admin:epicrouter
+:Admin:epicrouter
+:admin@example.com:admin
+:admin:extendnet
+:admin:funkwerk
+:admin:gvt12345
+:admin:hagpolm1
+:admin:hello
+:admin:hipchat
+:admin:hp.com
+:Admin:ImageFolio
+:admin:imss7.0
+:admin:infrant1
+:admin:insecure
+:admin:ip20
+:admin:ip21
+:admin:ip3000
+:admin:ip305Beheer
+:admin:ip400
+:admin:ironport
+:admin:isee
+:Administrator:0000
+:administrator:1234
+:Administrator:3ware
+:Administrator:adaptec
+:Administrator:admin
+:ADMINISTRATOR:admin
+:administrator:administrator
+:Administrator:Administrator
+:ADMINISTRATOR:ADMINISTRATOR
+:administrator:Amx1234!
+:administrator:asecret
+:Administrator:changeme
+:Administrator:Fiery.1
+:Administrator:Gateway
+:Administrator:ggdaseuaimhrke
+:Administrator:letmein
+:Administrator:manage
+:administrator:password
+:Administrator:password
+:administrator:PlsChgMe!
+:Administrator:p@ssw0rd
+:Administrator:public
+:administrator:root
+:administrator:RSAAppliance
+:Administrator:smcadmin
+:Administrator:storageserver
+:Administrator:Unidesk1
+:Administrator:vision2
+:Administrator:Vision2
+:admin:j5Brn9
+:admin:Janitza
+:admin:jboss4
+:ADMIN:JETSPEED
+:admin:jvc
+:admin:leviton
+:admin:linga
+:admin:ManagementConsole2015
+:admin:meinsm
+:admin:michelangelo
+:admin:microbusiness
+:admin:mono
+:admin:motorola
+:admin:mp3mystic
+:admin:mu
+:admin:muze
+:admin:my_DEMARC
+:admin:netadmin
+:admin:NetCache
+:admin:netscreen
+:admin:NetSeq
+:admin:NetSurvibox
+:Admin:No
+:admin:none
+:admin:novell
+:admin:noway
+:admin:OCS
+:admin:OkiLAN
+:admin:pass
+:Admin:Pass
+:admin:password
+:Admin:password
+:ADMIN:PASSWORD
+:admin:passwort
+:admin:peribit
+:admin:pfsense
+:admin:phplist
+:admin:private
+:admin:public
+:admin:pwp
+:admin:rainbow
+:admin:raritan
+:admin:readwrite
+:admin:rmnetlm
+:admin:root
+:Admin:SECRET123
+:admin:secure
+:admin:security
+:admin:setup
+:admin:Sharp
+:admin:smallbusiness
+:admin:smcadmin
+:adminstat:OCS
+:adminstrator:changeme
+:Admin:Su
+:admin:superuser
+:admin:su@psir
+:admin:surecom
+:admin:switch
+:admin:symantec
+:admin:symbol
+:admin:Symbol
+:admin:synnet
+:admin:sysAdmin
+:admin:system
+:admin:TANDBERG
+:admin:tegile
+:admin:tlJwpbo6
+:admin:tomcat
+:admin:tsunami
+:adminttd:adminttd
+:admin:urchin
+:adminuser:OCS
+:admin:utstar
+:adminview:OCS
+:admin:waav
+:Admin:wago
+:admin:welcome
+:ADMIN:WELCOME
+:admin:x-admin
+:admin:year2000
+:admin:ZmqVfoSIP
+:admin:zoomadsl
+:adsl:adsl1234
+:adtec:none
+:ADVMAIL:HP
+:Alphanetworks:wapnd03cm_dkbs_dap2555
+:Alphanetworks:wapnd04cm_dkbs_dap3525
+:Alphanetworks:wapnd15_dlob_dap1522b
+:Alphanetworks:wrgac01_dlob.hans_dir865
+:Alphanetworks:wrgg15_di524
+:Alphanetworks:wrgg19_c_dlwbr_dir300
+:Alphanetworks:wrgn22_dlwbr_dir615
+:Alphanetworks:wrgn23_dlwbr_dir300b
+:Alphanetworks:wrgn23_dlwbr_dir600b
+:Alphanetworks:wrgn28_dlob_dir412
+:Alphanetworks:wrgn39_dlob.hans_dir645
+:Alphanetworks:wrgn39_dlob.hans_dir645_V1
+:Alphanetworks:wrgn49_dlob_dir600b
+:Alphanetworks:wrgnd08_dlob_dir815
+:amx:Amx1234!
+:amx:password
+:ANDY:SWORDFISH
+:anon:anon
+:anonymous:anonymous
+:anonymous:any
+:anonymous:any@
+:anonymous:Exabyte
+:anonymous:password
+:Any:12345
+:Any:Any
+:(any):TENmanUFactOryPOWER
+:AP:AP
+:aparker@geometrixx.info:aparker
+:apc:apc
+:APPLSYS:APPLSYS
+:APPLSYS:FND
+:APPLSYSPUB:FNDPUB
+:APPS:APPS
+:APPUSER:APPUSER
+:AQ:AQ
+:AQDEMO:AQDEMO
+:AQJAVA:AQJAVA
+:AQUSER:AQUSER
+:ARAdmin:AR#Admin#
+:ARCHIVIST:ARCHIVIST
+:AUDIOUSER:AUDIOUSER
+:AURORA@ORB@UNAUTHENTICATED:INVALID
+:AURORA$ORB$UNAUTHENTICATED:INVALID
+:author:author
+:autocad:autocad
+:BACKUP:BACKUP
+:backuponly:backuponly1
+:backuprestore:backuprestore1
+:basisk:basisk
+:Basisk:Basisk
+:bbs:bbs
+:bbsd-client:changeme2
+:bbsd-client:NULL
+:BC4J:BC4J
+:bciim:bciimpw
+:bcim:bcimpw
+:bcms:bcmspw
+:bcnas:bcnaspw
+:bewan:bewan
+:bin:sys
+:Blaeri:22332323
+:BLAKE:PAPER
+:blue:bluepw
+:Bobo:hello
+:both:tomcat
+:bpel:bpel
+:BRIO_ADMIN:BRIO_ADMIN
+:browse:browsepw
+:browse:looker
+:bubba:(unknown)
+:cablecom:router
+:cac_admin:cacadmin
+:CATALOG:CATALOG
+:c-comatic:xrtwk318
+:ccrusr:ccrusr
+:CDEMO82:CDEMO82
+:CDEMOCOR:CDEMOCOR
+:CDEMORID:CDEMORID
+:CDEMOUCB:CDEMOUCB
+:cellit:cellit
+:CENTRA:CENTRA
+:cgadmin:cgadmin
+:checkfs:checkfs
+:checkfsys:checkfsys
+:checksys:checksys
+:CHEY_ARCHSVR:CHEY_ARCHSVR
+:CICSUSER:CISSUS
+:CIDS:CIDS
+:cirros:cubswin:)
+:CIS:CIS
+:CISCO15:otbu+1
+:cisco:cisco
+:Cisco:Cisco
+:CISINFO:CISINFO
+:citel:password
+:CLARK:CLOTH
+:client:client
+:cloudera:cloudera
+:cmaker:cmaker
+:CMSBATCH:CMSBATCH
+:cn=orcladmin:welcome
+:Coco:hello
+:comcast:1234
+:COMPANY:COMPANY
+:COMPIERE:COMPIERE
+:computer:repair
+:conferencing:admin
+:config:biodata
+:corecess:corecess
+:core:phpreactor
+:CQSCHEMAUSER:PASSWORD
+:craft:craft
+:craft:craftpw
+:craft:crftpw
+:Craft:crftpw
+:(created):telus00
+:(created):telus99
+:crowdÂ-openid-Âserver:password
+:Crowd:password
+:CSG:SESAME
+:CSMIG:CSMIG
+:ctb_admin:sap123
+:CTXDEMO:CTXDEMO
+:CTXSYS:CTXSYS
+:cusadmin:highspeed
+:cust:custpw
+:customer:none
+:dadmin:dadmin
+:dadmin:dadmin01
+:daemon:daemon
+:davox:davox
+:db2fenc1:db2fenc1
+:db2inst1:db2inst1
+:dbase:dbase
+:DBA:SQL
+:DBDCCICS:DBDCCIC
+:DBI:MUMBLEFRATZ
+:DBSNMP:DBSNMP
+:DDIC:19920706
+:debian:debian
+:debian:sixaola
+:debian:temppwd
+:debug:d.e.b.u.g
+:debug:gubed
+:debug:synnet
+:d.e.b.u.g:User
+:default:
+:default:antslq
+:default:OxhlwSG8
+:default:S2fGqNFs
+:default:video
+:default:WLAN_AP
+:defug:synnet
+:DEMO8:DEMO8
+:DEMO9:DEMO9
+:demo:demo
+:DEMO:DEMO
+:demo:fai
+:Demo:password
+:demos:demos
+:DES:DES
+:deskalt:password
+:deskman:changeme
+:desknorm:password
+:deskres:password
+:DEV2000_DEMOS:DEV2000_DEMOS
+:dev:dev
+:Developer:isdev
+:device:apc
+:device:device
+:diag:danger
+:diag:switch
+:DIP:DIP
+:DISCOVERER_ADMIN:DISCOVERER_ADMIN
+:distrib:distrib0
+:disttech:4tas
+:disttech:disttech
+:disttech:etas
+:D-Link:D-Link
+:dm:telnet
+:dni:dni
+:dos:dos
+:dpn:changeme
+:draytek:1234
+:Draytek:1234
+:DSGATEWAY:DSGATEWAY
+:DSL:DSL
+:DSSYS:DSSYS
+:DTA:TJM
+:dvstation:dvst10n
+:eagle:eagle
+:EARLYWATCH:SUPPORT
+:echo:echo
+:echo:User
+:egcr:ergc
+:EJSADMIN:EJSADMIN
+:elk_user:forensics
+:emaq:4133
+:EMP:EMP
+:enable:cisco
+:eng:engineer
+:engmode:hawk201
+:enisa:enisa
+:enquiry:enquirypw
+:ESTOREUSER:ESTORE
+:eurek:eurek
+:EVENT:EVENT
+:EXFSYS:EXFSYS
+:expert:expert
+:Factory:56789
+:factory:Fact4EMC
+:fal:fal
+:fam:fam
+:fastwire:fw
+:fax:fax
+:FAX:FAX
+:FAXUSER:FAXUSER
+:FAXWORKS:FAXWORKS
+:fg_sysadmin:password
+:field:field
+:FIELD:HPONLY
+:FIELD:LOTUS
+:FIELD:MANAGER
+:FIELD:MGR
+:FIELD:SERVICE
+:field:support
+:FIELD:SUPPORT
+:FINANCE:FINANCE
+:firstsite:firstsite
+:Flo:hello
+:FND:FND
+:FORSE:FORSE
+:FROSTY:SNOWMAN
+:ftp_admi:kilo1987
+:ftp:ftp
+:ftp_inst:pbxk1064
+:ftp_nmc:tuxalize
+:ftp_oper:help1954
+:ftpuser:password
+:ftp:video
+:fwadmin:xceladmin
+:fwupgrade:fwupgrade
+:games:games
+:GATEWAY:GATEWAY
+:Gearguy:Geardog
+:GE:GE
+:geosolutions:Geos
+:glftpd:glftpd
+:GL:GL
+:god1:12345
+:god2:12345
+:gopher:gopher
+:GPFD:GPFD
+:GPLD:GPLD
+:guest:1234
+:guest:12345
+:guest1:guest
+:guest1:guest1
+:guest:guest
+:Guest:guest
+:Guest:Guest
+:GUEST:GUEST
+:guest:guestgue
+:GUEST:GUESTGUE
+:GUEST:GUESTGUEST
+:guest:Janitza
+:guest:truetime
+:GUEST:TSEUG
+:guest:User
+:guru:*3noguru
+:halt:halt
+:HCPARK:HCPARK
+:HELLO:FIELD.SUPPORT
+:hello:hello
+:HELLO:MANAGER.SYS
+:HELLO:MGR.SYS
+:HELLO:OP.OPERATOR
+:helpdesk:OCS
+:HLW:HLW
+:(hostname/ipaddress):sysadmin
+:HPLASER:HPLASER
+:HPSupport:badg3r5
+:HR:HR
+:hsa:hsadb
+:hscroot:abc123
+:HTTP:HTTP
+:hunter:hunter
+:hxeadm:HXEHana1
+:ibm:2222
+:ibm:password
+:ibm:service
+:IBMUSER:SYS1
+:iclock:timely
+:ilom-admin:ilom-admin
+:ilom-operator:ilom-operator
+:images:images
+:IMAGEUSER:IMAGEUSER
+:IMEDIA:IMEDIA
+:inads:inads
+:inads:indspw
+:informix:informix
+:init:initpw
+:installer:1000
+:installer:installer
+:install:install
+:install:secret
+:intel:intel
+:intermec:intermec
+:internal:oracle
+:IntraStack:Asante
+:IntraSwitch:Asante
+:ioFTPD:ioFTPD
+:IS_$hostname:IS_$hostname
+:itsadmin:init
+:james:james
+:jdoe@geometrixx.info:jdoe
+:JMUSER:JMUSER
+:Joe:hello
+:joe:password
+:JONES:STEEL
+:JWARD:AIROPLANE
+:keyscan:KEYSCAN
+:khan:kahn
+:kodi:kodi
+:l2:l2
+:L2LDEMO:L2LDEMO
+:l3:l3
+:LASER:LASER
+:LASERWRITER:LASERWRITER
+:LBACSYS:LBACSYS
+:LDAP_Anonymous:LdapPassword_1
+:leo:leo
+:LIBRARIAN:SHELVES
+:Liebert:Liebert
+:live:live
+:LocalAdministrator:#l@$ak#.lk;0@P
+:localadmin:localadmin
+:locate:locatepw
+:login:0000
+:login:access
+:login:admin
+:login:password
+:lpadmin:lpadmin
+:lpadm:lpadm
+:lp:bin
+:lp:lineprin
+:lp:lp
+:LR-ISDN:LR-ISDN
+:lynx:lynx
+:m1122:m1122
+:m202:m202
+:MAIL:HPOFFICE
+:mail:mail
+:MAIL:MAIL
+:MAIL:MPE
+:MAIL:REMOTE
+:MAIL:TELESUP
+:maintainer:admin
+:maintainer:pbcpbn(add-serial-number)
+:maint:maint
+:MAINT:MAINT
+:maint:maintpw
+:maint:ntacdmax
+:maint:password
+:maint:rwmaint
+:Manager:Admin
+:MANAGER:COGNOS
+:manager:friend
+:MANAGER:HPOFFICE
+:MANAGER:ITF3000
+:manager:manager
+:Manager:Manager
+:MANAGER:SECURITY
+:managers:managers
+:MANAGER:SYS
+:MANAGER:TCH
+:MANAGER:TELESUP
+:man:man
+:manuf:xxyyzz
+:mary:password
+:master:master
+:MASTER:PASSWORD
+:master:themaster01
+:MayGion:maygion.com
+:McdataSE:redips
+:MCUser:MCUser1
+:MD110:help
+:MDDEMO_CLERK:CLERK
+:MDDEMO:MDDEMO
+:MDDEMO_MGR:MGR
+:MDSYS:MDSYS
+:mediator:mediator
+:me:me
+:memotec:supervisor
+:Menara:Menara
+:mfd:mfd
+:MFG:MFG
+:mg3500:merlin
+:MGE:VESOFT
+:MGR:CAROLIAN
+:MGR:CCC
+:MGR:CNAS
+:MGR:COGNOS
+:MGR:CONV
+:MGR:HPDESK
+:MGR:HPOFFICE
+:MGR:HPONLY
+:MGR:HPP187
+:MGR:HPP189
+:MGR:HPP196
+:MGR:INTX3
+:MGR:ITF3000
+:MGR:NETBASE
+:MGR:REGO
+:MGR:RJE
+:MGR:ROBELLE
+:MGR:SECURITY
+:MGR:SYS
+:MGR:TELESUP
+:MGR:VESOFT
+:MGR:WORD
+:MGR:XLSERVER
+:MGWUSER:MGWUSER
+:MICRO:RSX
+:MIGRATE:MIGRATE
+:MILLER:MILLER
+:misp:Password1234
+:mlusr:mlusr
+:MMO2:MMO2
+:mobile:dottie
+:MODTEST:YES
+:Moe:hello
+:monitor:monitor
+:MOREAU:MOREAU
+:mountfs:mountfs
+:mountfsys:mountfsys
+:mountsys:mountsys
+:MSHOME:MSHOME
+:mso:w0rkplac3rul3s
+:MTSSYS:MTSSYS
+:MTS_USER:MTS_PASSWORD
+:MTYSYS:MTYSYS
+:museadmin:Muse!Admin
+:musi1921:Musi%1921
+:musi1921:Musii%1921
+:MXAGENT:MXAGENT
+:myshake:shakeme
+:naadmin:naadmin
+:n.a:guardone
+:NAMES:NAMES
+:nao:nao
+:NAU:NAU
+:ncrm:ncrm
+:netbotz:netbotz
+:netlink:netlink
+:NetLinx:password
+:netman:netman
+:netopia:netopia
+:netrangr:attack
+:netscreen:netscreen
+:NETWORK:NETWORK
+:news:news
+:newuser:wampp
+:nexthink:123456
+:NICONEX:NICONEX
+:nm2user:nm2user
+:nms:nmspw
+:nobody:nobody
+:none:0
+:none:4321
+:none:admin
+:none:blank
+:none:none
+:none:private
+:none:sysadm
+:nop:12345
+:nop:123454
+:NSA:nsa
+:OAS_PUBLIC:OAS_PUBLIC
+:OCITEST:OCITEST
+:ODM_MTR:MTRPW
+:ODM:ODM
+:ODSCOMMON:ODSCOMMON
+:ods:ods
+:ODS:ODS
+:OEMADM:OEMADM
+:OEMREP:OEMREP
+:OE:OE
+:OLAPDBA:OLAPDBA
+:OLAPSVR:INSTANCE
+:OLAPSYS:MANAGER
+:OMWB_EMULATION:ORACLE
+:onlime_r:12345
+:OO:OO
+:openhabian:openhabian
+:OPENSPIRIT:OPENSPIRIT
+:OPERATIONS:OPERATIONS
+:OPERATNS:OPERATNS
+:operator:admin
+:operator:$chwarzepumpe
+:OPERATOR:COGNOS
+:OPERATOR:DISC
+:operator:mercury
+:operator:operator
+:Operator:Operator
+:OPERATOR:SUPPORT
+:OPERATOR:SYS
+:OPERATOR:SYSTEM
+:Oper:Oper
+:OPER:OPER
+:op:op
+:op:operator
+:oracle:oracle
+:ORAREGSYS:ORAREGSYS
+:ORASSO:ORASSO
+:ORDPLUGINS:ORDPLUGINS
+:ORDSYS:ORDSYS
+:osbash:osbash
+:osboxes:osboxes.org
+:osmc:osmc
+:OSP22:OSP22
+:OUTLN:OUTLN
+:overseer:overseer
+:OWA:OWA
+:OWA_PUBLIC:OWA_PUBLIC
+:OWNER:OWNER
+:PACSLinkIP:NetServer
+:PANAMA:PANAMA
+:patrol:patrol
+:PATROL:PATROL
+:PBX:PBX
+:PCUSER:SYS
+:pepino:pepino
+:PERFSTAT:PERFSTAT
+:PFCUser:240653C9467E45
+:piranha:piranha
+:piranha:q
+:pi:raspberry
+:PLEX:PLEX
+:plexuser:rasplex
+:PLMIMService:NetServer
+:PLSQL:SUPERSECRET
+:PM:PM
+:pnadmin:pnadmin
+:PO7:PO7
+:PO8:PO8
+:politically:correct
+:poll:tech
+:Polycom:SpIp
+:PO:PO
+:PORTAL30_DEMO:PORTAL30_DEMO
+:PORTAL30:PORTAL30
+:PORTAL30:PORTAL31
+:PORTAL30_PUBLIC:PORTAL30_PUBLIC
+:PORTAL30_SSO:PORTAL30_SSO
+:PORTAL30_SSO_PS:PORTAL30_SSO_PS
+:PORTAL30_SSO_PUBLIC:PORTAL30_SSO_PUBLIC
+:POST:BASE
+:postmaster:postmast
+:POST:POST
+:POWERCARTUSER:POWERCARTUSER
+:POWERCHUTE:APC
+:powerdown:powerdown
+:praisenetwork:perfectpraise
+:PRIMARY:PRIMARY
+:primenet:primenet
+:primenet:primeos
+:primeos:prime
+:primeos:primeos
+:prime:prime
+:prime:primeos
+:primos_cs:prime
+:primos_cs:primos
+:PRINTER:PRINTER
+:PRINT:PRINT
+:PRODCICS:PRODCICS
+:PRODDTA:PRODDTA
+:PROG:PROG
+:prtgadmin:prtgadmin
+:PSEAdmin:$secure$
+:public:publicpass
+:PUBSUB1:PUBSUB1
+:PUBSUB:PUBSUB
+:pw:pwpw
+:pwrchute:pwrchute
+:pyimagesearch:deeplearning
+:qbf77101:hexakisoctahedron
+:QDBA:QDBA
+:qpgmr:qpgmr
+:QS_ADM:QS_ADM
+:QS_CBADM:QS_CBADM
+:QS_CB:QS_CB
+:QS_CS:QS_CS
+:qsecofr:11111111
+:qsecofr:22222222
+:qsecofr:qsecofr
+:qserv:qserv
+:QS_ES:QS_ES
+:QS_OS:QS_OS
+:QS:QS
+:QSRV:11111111
+:QSRV:22222222
+:qsrvbas:qsrvbas
+:qsrv:qsrv
+:QSRV:QSRV
+:qsvr:ibmcel
+:qsvr:qsvr
+:QS_WS:QS_WS
+:qsysopr:qsysopr
+:quser:quser
+:radware:radware
+:RAID:hpt
+:rapport:r@p8p0r+
+:rcust:rcustpw
+:rdc123:rdc123
+:readonly:apc
+:readonly:lucenttech2
+:read:synnet
+:readwrite:lucenttech1
+:recover:recover
+:redline:redline
+:remnux:malware
+:REPADMIN:REPADMIN
+:replication-receiver:replication-receiver
+:Replicator:iscopy
+:replicator:replicator
+:REP_MANAGER:DEMO
+:REPORTS_USER:OEM_TEMP
+:REP_OWNER:DEMO
+:REP_OWNER:REP_OWNER
+:RE:RE
+:restoreonly:restoreonly1
+:rje:rje
+:RMAIL:RMAIL
+:RMAN:RMAN
+:RMUser1:password
+:RNIServiceManager:NetServer
+:Rodopi:Rodopi
+:role1:role1
+:role1:tomcat
+:role:changethis
+:root:00000000
+:root:1001chin
+:root:1111
+:root:1234
+:root:12345
+:root:123456
+:root:20080826
+:root:3ep5w2u
+:root:54321
+:root:5up
+:root:666666
+:root:7ujMko0admin
+:root:7ujMko0vizxv
+:root:888888
+:root:8RttoTriz
+:root:admin
+:root:ahetzip8
+:root:alpine
+:root:anko
+:root:anni2013
+:root:arcsight
+:root:ascend
+:root:attack
+:root:ax400
+:root:bagabu
+:root:blablabla
+:root:blackarch
+:root:blender
+:root:brightmail
+:root:calvin
+:root:cat1029
+:root:ceadmin
+:root:changeme
+:root:changeonfirstlogin
+:root:changethis
+:root:china123
+:root:Cisco
+:root:ciwuxe
+:root:cms500
+:root:cubox-i
+:root:cxlinux
+:root:D13HH[
+:root:dasdec1
+:root:davox
+:root:debian
+:root:default
+:root:dottie
+:root:dreambox
+:root:fai
+:root:fibranne
+:root:fidel123
+:root:freenas
+:root:ggdaseuaimhrke
+:root:GM8182
+:root:hi3518
+:root:hp
+:root:ikwb
+:root:indigo
+:root:juantech
+:root:jvbzd
+:root:klv123
+:root:klv1234
+:root:kn1TG7psLu
+:root:leostream
+:root:libreelec
+:root:linux
+:root:logapp
+:root:manager
+:root:max2play
+:root:mozart
+:root:mpegvideo
+:root:Mua'dib
+:root:MuZhlo9n%8!G
+:root:nas4free
+:root:NeXT
+:root:NM1$88
+:root:nokia
+:root:nosoup4u
+:root:nsi
+:root:oelinux123
+:root:openelec
+:root:openmediavault
+:root:orion99
+:root:osboxes.org
+:root:palosanto
+:root:par0t
+:root:pass
+:root:passw0rd
+:root:password
+:root:p@ck3tf3nc3
+:root:pixmet2003
+:root:plex
+:root:qwasyx21
+:root:rasplex
+:root:realtek
+:root:resumix
+:root:root
+:root:!root
+:ROOT:ROOT
+:root:root01
+:root:ROOT500
+:root:rootme
+:root:rootpasswd
+:root:screencast
+:root:secur4u
+:root:Serv4EMC
+:root:sipwise
+:root:sixaola
+:root:stxadmin
+:root:sun123
+:root:system
+:root:t00lk1t
+:root:t0talc0ntr0l4!
+:root:TANDBERG
+:root:timeserver
+:root:toor
+:root:tslinux
+:root:ubnt
+:root:ubuntu1404
+:root:uClinux
+:root:unitrends1
+:root:user
+:root:vagrant
+:root:vertex25
+:root:video
+:root:vizxv
+:Root:wago
+:root:wyse
+:root:xc3511
+:root:xmhdipc
+:root:xoa
+:root:ys123456
+:root:zlxx
+:root:zlxx.
+:root:Zte521
+:ro:ro
+:RSBCMON:SYS
+:rwa:rwa
+:rw:rw
+:sa:changeonfirstlogin
+:SAMPLE:SAMPLE
+:sansforensics:forensics
+:sans:training
+:SAP*:06071992
+:SAP*:7061992
+:SA:PASSWORD
+:SAPCPIC:admin
+:SAPCPIC:ADMIN
+:SAP*:PASS
+:SAPR3:SAP
+:SAP:SAPR3
+:sa:sasasa
+:savelogs:crash
+:scmadmin:scmchangeme
+:sconsole:12345
+:SCOTT:TIGER
+:SDOS_ICSAP:SDOS_ICSAP
+:SECDEMO:SECDEMO
+:secofr:secofr
+:security:security
+:sedacm:secacm
+:self:system
+:SERVICECONSUMER1:SERVICECONSUMER1
+:service:service
+:Service:Service
+:service:smile
+:servlet:manager
+:setpriv:system
+:setup:changeme
+:setup:changeme!
+:setup:setup
+:SH:SH
+:shutdown:shutdown
+:signa:signa
+:siteadmin:siteadmin
+:siteadmin:toplayer
+:SITEMINDER:SITEMINDER
+:SLIDE:SLIDEPW
+:smc:smcadmin
+:SMDR:SECONDARY
+:snmp:nopasswd
+:snmp:snmp
+:spcl:0000
+:SPOOLMAN:HPOFFICE
+:$SRV:$SRV
+:ssladmin:ssladmin
+:ssp:ssp
+:stackato:stackato
+:STARTER:STARTER
+:status:readonly
+:stratacom:stratauser
+:STRAT_USER:STRAT_PASSWD
+:stuccoboy:100198
+:super:5777364
+:__super:(caclulated)
+:superdba:admin
+:super:juniper123
+:superman:21241036
+:superman:talent
+:super:super
+:super.super:master
+:super:superpass
+:super:surt
+:superuser:123456
+:superuser:admin
+:SUPERUSER:ANS#150
+:superuser:asante
+:SuperUser:kronites
+:superuser:superuser
+:SUPERVISOR:HARRIS
+:SUPERVISOR:NETFRAME
+:SUPERVISOR:NF
+:SUPERVISOR:NFI
+:supervisor:PlsChgMe!
+:supervisor:supervisor
+:SUPERVISOR:SUPERVISOR
+:SUPERVISOR:SYSTEM
+:supervisor:visor
+:support:h179350
+:support:support
+:support:supportpw
+:support:symantec
+:su:super
+:sweex:mysweex
+:SWPRO:SWPRO
+:SWUSER:SWUSER
+:Symbol:Symbol
+:SYMPA:SYMPA
+:sync:sync
+:sysadm:admin
+:sysadm:Admin
+:sysadm:admpw
+:sysadmin:master
+:sysadmin:nortel
+:sysadmin:password
+:sysadmin:sysadmin
+:sysadm:sysadm
+:SYSADM:SYSADM
+:sysadm:sysadmpw
+:sysadm:syspw
+:SYSA:SYSA
+:sys:bin
+:sysbin:sysbin
+:sys:change_on_install
+:SYS:CHANGE_ON_INSTALL
+:SYSDBA:masterkey
+:SYS:D_SYSPW
+:SYSMAN:oem_temp
+:SYSMAN:OEM_TEMP
+:sysopr:sysopr
+:Sysop:Sysop
+:sys:sys
+:sys:system
+:system:adminpwd
+:system_admin:system_admin
+:SYSTEM:D_SYSTPW
+:system:isp
+:system:manager
+:SYSTEM:MANAGER
+:system/manager:sys/change_on_install
+:system:mnet
+:system:password
+:system:prime
+:system:security
+:system:sys
+:system:system
+:system:weblogic
+:sys:uplink
+:t3admin:Trintech
+:TAHITI:TAHITI
+:target:password
+:tasman:tasmannet
+:Tasman:Tasmannet
+:TDOS_ICSAP:TDOS_ICSAP
+:teacher:password
+:tech:field
+:tech:nician
+:technician:yZgO8Bvj
+:tech:tech
+:telecom:telecom
+:Telecom:Telecom
+:tele:tele
+:tellabs:tellabs#1
+:temp1:password
+:TESTPILOT:TESTPILOT
+:test:test
+:TEST:TEST
+:tiger:tiger123
+:tomcat:changethis
+:tomcat:tomcat
+:toor:logapp
+:topicalt:password
+:topicnorm:password
+:topicres:password
+:TRACESRV:TRACE
+:TRACESVR:TRACE
+:TRAVEL:TRAVEL
+:trmcnfg:trmcnfg
+:trouble:trouble
+:TSDEV:TSDEV
+:TSUSER:TSUSER
+:TURBINE:TURBINE
+:ubnt:ubnt
+:ucenik23:ucenik
+:ULTIMATE:ULTIMATE
+:umountfs:umountfs
+:umountfsys:umountfsys
+:umountsys:umountsys
+:unix:unix
+:USER0:USER0
+:User:1001
+:User:1234
+:User:19750407
+:USER1:USER1
+:USER2:USER2
+:USER3:USER3
+:USER4:USER4
+:USER5:USER5
+:USER6:USER6
+:USER7:USER7
+:USER8:USER8
+:USER9:USER9
+:user_analyst:demo
+:user_approver:demo
+:user_author:demo
+:user_checker:demo
+:user_designer:demo
+:user_editor:demo
+:user_expert:demo
+:USERID:PASSW0RD
+:USERID:PASSWORD
+:user:Janitza
+:user_marketer:demo
+:username:password
+:Username:password
+:Username:Password
+:user:none
+:userNotUsed:userNotU
+:user:password
+:User:Password
+:user_pricer:demo
+:user:public
+:user_publisher:demo
+:USER_TEMPLATE:USER_TEMPLATE
+:user:tivonpw
+:user:user
+:User:user
+:User:User
+:USER:USER
+:user:user0000
+:user:USERP
+:UTLBSTATU:UTLESTAT
+:uucpadm:uucpadm
+:uucp:uucp
+:uwmadmin:password
+:vagrant:vagrant
+:VCSRV:VCSRV
+:veda:12871
+:vgnadmin:vgnadmin
+:viewuser:viewuser1
+:VIF_DEVELOPER:VIF_DEV_PWD
+:vikram:singh
+:VIRUSER:VIRUSER
+:VNC:winterm
+:volition:volition
+:vpasp:vpasp
+:VRR1:VRR1
+:VTAM:VTAM
+:WANGTEK:WANGTEK
+:webadmin:1234
+:WebAdmin:Admin
+:webadmin:webadmin
+:WebAdmin:WebBoard
+:webadmin:webibm
+:WEBADM:password
+:WEBCAL01:WEBCAL01
+:webdb:webdb
+:WEBDB:WEBDB
+:webguest:1
+:weblogic:weblogic
+:webmaster:webmaster
+:WEBREAD:WEBREAD
+:webshield:webshieldchangeme
+:web:web
+:whd:whd
+:WINDOWS_PASSTHRU:WINDOWS_PASSTHRU
+:WINSABRE:SABRE
+:WINSABRE:WINSABRE
+:WKSYS:WKSYS
+:wlcsystem:wlcsystem
+:wlpisystem:wlpisystem
+:wlseuser:wlsepassword
+:wlse:wlsedb
+:WP:HPOFFICE
+:wpsadmin:wpsadmin
+:wradmin:trancell
+:write:private
+:write:synnet
+:wVQxyQec:eomjbOBLLwbZeiKV
+:WWWUSER:WWWUSER
+:www:www
+:WWW:WWW
+:xd:xd
+:xmi_demo:sap123
+:XPRT:XPRT
+:XXSESS_MGRYY:X#1833
diff --git a/subset/security/password/resources/default/passwords.txt b/subset/security/password/resources/default/passwords.txt
new file mode 100644
index 0000000000..d9f319e79c
--- /dev/null
+++ b/subset/security/password/resources/default/passwords.txt
@@ -0,0 +1,1271 @@
+11111111
+x-admin
+234
+1234
+22222222
+23646
+266344
+266344
+2800
+31994
+666666
+7654321
+11223344
+888888
+acer
+acitoolkit
+29111991
+WOOD
+ADLDEMO
+adm
+0
+000000
+1111
+1111111
+123
+123123
+1234
+12345
+123456
+123456
+1234admin
+123qwe
+1988
+1988
+Admin1
+password
+2222
+22222
+changeme
+4321
+5001
+abc123
+access
+admin
+!admin
+admin
+Admin
+admin
+ADMIN
+admin000
+admin1
+admin123
+admin1234
+adminadmin
+adslolitec
+adslroot
+AitbISP4eCiG
+allot
+alphaadmin
+alphacom
+AlpheusDigital1010
+amigosw1
+asante
+Ascend
+asd
+atc456
+atlantis
+avocent
+axis2
+barney
+barricade
+Barricade
+bintec
+broadband
+brocade1
+cat1029
+changeit
+changeme
+cisco
+comcomcom
+conexant
+default
+demo
+detmond
+diamond
+dmr99
+draadloos
+Emerson1
+epicrouter
+epicrouter
+admin
+extendnet
+funkwerk
+gvt12345
+hagpolm1
+hello
+hipchat
+hp.com
+ImageFolio
+imss7.0
+infrant1
+insecure
+ip20
+ip21
+ip3000
+ip305Beheer
+ip400
+ironport
+isee
+0000
+1234
+3ware
+adaptec
+admin
+admin
+administrator
+Administrator
+ADMINISTRATOR
+Amx1234!
+asecret
+changeme
+Fiery.1
+Gateway
+ggdaseuaimhrke
+letmein
+manage
+password
+password
+PlsChgMe!
+p@ssw0rd
+public
+root
+RSAAppliance
+smcadmin
+storageserver
+Unidesk1
+vision2
+Vision2
+j5Brn9
+Janitza
+jboss4
+JETSPEED
+jvc
+leviton
+linga
+ManagementConsole2015
+meinsm
+michelangelo
+microbusiness
+mono
+motorola
+mp3mystic
+mu
+muze
+my_DEMARC
+netadmin
+NetCache
+netscreen
+NetSeq
+NetSurvibox
+No
+none
+novell
+noway
+OCS
+OkiLAN
+pass
+Pass
+password
+password
+PASSWORD
+passwort
+peribit
+pfsense
+phplist
+private
+public
+pwp
+rainbow
+raritan
+readwrite
+rmnetlm
+root
+SECRET123
+secure
+security
+setup
+Sharp
+smallbusiness
+smcadmin
+OCS
+changeme
+Su
+superuser
+su@psir
+surecom
+switch
+symantec
+symbol
+Symbol
+synnet
+sysAdmin
+system
+TANDBERG
+tegile
+tlJwpbo6
+tomcat
+tsunami
+adminttd
+urchin
+OCS
+utstar
+OCS
+waav
+wago
+welcome
+WELCOME
+x-admin
+year2000
+ZmqVfoSIP
+zoomadsl
+adsl1234
+none
+HP
+wapnd03cm_dkbs_dap2555
+wapnd04cm_dkbs_dap3525
+wapnd15_dlob_dap1522b
+wrgac01_dlob.hans_dir865
+wrgg15_di524
+wrgg19_c_dlwbr_dir300
+wrgn22_dlwbr_dir615
+wrgn23_dlwbr_dir300b
+wrgn23_dlwbr_dir600b
+wrgn28_dlob_dir412
+wrgn39_dlob.hans_dir645
+wrgn39_dlob.hans_dir645_V1
+wrgn49_dlob_dir600b
+wrgnd08_dlob_dir815
+Amx1234!
+password
+SWORDFISH
+anon
+anonymous
+any
+any@
+Exabyte
+password
+12345
+Any
+TENmanUFactOryPOWER
+AP
+aparker
+apc
+APPLSYS
+FND
+FNDPUB
+APPS
+APPUSER
+AQ
+AQDEMO
+AQJAVA
+AQUSER
+AR#Admin#
+ARCHIVIST
+AUDIOUSER
+INVALID
+INVALID
+author
+autocad
+BACKUP
+backuponly1
+backuprestore1
+basisk
+Basisk
+bbs
+changeme2
+NULL
+BC4J
+bciimpw
+bcimpw
+bcmspw
+bcnaspw
+bewan
+sys
+22332323
+PAPER
+bluepw
+hello
+tomcat
+bpel
+BRIO_ADMIN
+browsepw
+looker
+(unknown)
+router
+cacadmin
+CATALOG
+xrtwk318
+ccrusr
+CDEMO82
+CDEMOCOR
+CDEMORID
+CDEMOUCB
+cellit
+CENTRA
+cgadmin
+checkfs
+checkfsys
+checksys
+CHEY_ARCHSVR
+CISSUS
+CIDS
+)
+CIS
+otbu+1
+cisco
+Cisco
+CISINFO
+password
+CLOTH
+client
+cloudera
+cmaker
+CMSBATCH
+welcome
+hello
+1234
+COMPANY
+COMPIERE
+repair
+admin
+biodata
+corecess
+phpreactor
+PASSWORD
+craft
+craftpw
+crftpw
+crftpw
+telus00
+telus99
+password
+password
+SESAME
+CSMIG
+sap123
+CTXDEMO
+CTXSYS
+highspeed
+custpw
+none
+dadmin
+dadmin01
+daemon
+davox
+db2fenc1
+db2inst1
+dbase
+SQL
+DBDCCIC
+MUMBLEFRATZ
+DBSNMP
+19920706
+debian
+sixaola
+temppwd
+d.e.b.u.g
+gubed
+synnet
+User
+
+antslq
+OxhlwSG8
+S2fGqNFs
+video
+WLAN_AP
+synnet
+DEMO8
+DEMO9
+demo
+DEMO
+fai
+password
+demos
+DES
+password
+changeme
+password
+password
+DEV2000_DEMOS
+dev
+isdev
+apc
+device
+danger
+switch
+DIP
+DISCOVERER_ADMIN
+distrib0
+4tas
+disttech
+etas
+D-Link
+telnet
+dni
+dos
+changeme
+1234
+1234
+DSGATEWAY
+DSL
+DSSYS
+TJM
+dvst10n
+eagle
+SUPPORT
+echo
+User
+ergc
+EJSADMIN
+forensics
+4133
+EMP
+cisco
+engineer
+hawk201
+enisa
+enquirypw
+ESTORE
+eurek
+EVENT
+EXFSYS
+expert
+56789
+Fact4EMC
+fal
+fam
+fw
+fax
+FAX
+FAXUSER
+FAXWORKS
+password
+field
+HPONLY
+LOTUS
+MANAGER
+MGR
+SERVICE
+support
+SUPPORT
+FINANCE
+firstsite
+hello
+FND
+FORSE
+SNOWMAN
+kilo1987
+ftp
+pbxk1064
+tuxalize
+help1954
+password
+video
+xceladmin
+fwupgrade
+games
+GATEWAY
+Geardog
+GE
+Geos
+glftpd
+GL
+12345
+12345
+gopher
+GPFD
+GPLD
+1234
+12345
+guest
+guest1
+guest
+guest
+Guest
+GUEST
+guestgue
+GUESTGUE
+GUESTGUEST
+Janitza
+truetime
+TSEUG
+User
+*3noguru
+halt
+HCPARK
+FIELD.SUPPORT
+hello
+MANAGER.SYS
+MGR.SYS
+OP.OPERATOR
+OCS
+HLW
+sysadmin
+HPLASER
+badg3r5
+HR
+hsadb
+abc123
+HTTP
+hunter
+HXEHana1
+2222
+password
+service
+SYS1
+timely
+ilom-admin
+ilom-operator
+images
+IMAGEUSER
+IMEDIA
+inads
+indspw
+informix
+initpw
+1000
+installer
+install
+secret
+intel
+intermec
+oracle
+Asante
+Asante
+ioFTPD
+IS_$hostname
+init
+james
+jdoe
+JMUSER
+hello
+password
+STEEL
+AIROPLANE
+KEYSCAN
+kahn
+kodi
+l2
+L2LDEMO
+l3
+LASER
+LASERWRITER
+LBACSYS
+LdapPassword_1
+leo
+SHELVES
+Liebert
+live
+#l@$ak#.lk;0@P
+localadmin
+locatepw
+0000
+access
+admin
+password
+lpadmin
+lpadm
+bin
+lineprin
+lp
+LR-ISDN
+lynx
+m1122
+m202
+HPOFFICE
+mail
+MAIL
+MPE
+REMOTE
+TELESUP
+admin
+pbcpbn(add-serial-number)
+maint
+MAINT
+maintpw
+ntacdmax
+password
+rwmaint
+Admin
+COGNOS
+friend
+HPOFFICE
+ITF3000
+manager
+Manager
+SECURITY
+managers
+SYS
+TCH
+TELESUP
+man
+xxyyzz
+password
+master
+PASSWORD
+themaster01
+maygion.com
+redips
+MCUser1
+help
+CLERK
+MDDEMO
+MGR
+MDSYS
+mediator
+me
+supervisor
+Menara
+mfd
+MFG
+merlin
+VESOFT
+CAROLIAN
+CCC
+CNAS
+COGNOS
+CONV
+HPDESK
+HPOFFICE
+HPONLY
+HPP187
+HPP189
+HPP196
+INTX3
+ITF3000
+NETBASE
+REGO
+RJE
+ROBELLE
+SECURITY
+SYS
+TELESUP
+VESOFT
+WORD
+XLSERVER
+MGWUSER
+RSX
+MIGRATE
+MILLER
+Password1234
+mlusr
+MMO2
+dottie
+YES
+hello
+monitor
+MOREAU
+mountfs
+mountfsys
+mountsys
+MSHOME
+w0rkplac3rul3s
+MTSSYS
+MTS_PASSWORD
+MTYSYS
+Muse!Admin
+Musi%1921
+Musii%1921
+MXAGENT
+shakeme
+naadmin
+guardone
+NAMES
+nao
+NAU
+ncrm
+netbotz
+netlink
+password
+netman
+netopia
+attack
+netscreen
+NETWORK
+news
+wampp
+123456
+NICONEX
+nm2user
+nmspw
+nobody
+0
+4321
+admin
+blank
+none
+private
+sysadm
+12345
+123454
+nsa
+OAS_PUBLIC
+OCITEST
+MTRPW
+ODM
+ODSCOMMON
+ods
+ODS
+OEMADM
+OEMREP
+OE
+OLAPDBA
+INSTANCE
+MANAGER
+ORACLE
+12345
+OO
+openhabian
+OPENSPIRIT
+OPERATIONS
+OPERATNS
+admin
+$chwarzepumpe
+COGNOS
+DISC
+mercury
+operator
+Operator
+SUPPORT
+SYS
+SYSTEM
+Oper
+OPER
+op
+operator
+oracle
+ORAREGSYS
+ORASSO
+ORDPLUGINS
+ORDSYS
+osbash
+osboxes.org
+osmc
+OSP22
+OUTLN
+overseer
+OWA
+OWA_PUBLIC
+OWNER
+NetServer
+PANAMA
+patrol
+PATROL
+PBX
+SYS
+pepino
+PERFSTAT
+240653C9467E45
+piranha
+q
+raspberry
+PLEX
+rasplex
+NetServer
+SUPERSECRET
+PM
+pnadmin
+PO7
+PO8
+correct
+tech
+SpIp
+PO
+PORTAL30_DEMO
+PORTAL30
+PORTAL31
+PORTAL30_PUBLIC
+PORTAL30_SSO
+PORTAL30_SSO_PS
+PORTAL30_SSO_PUBLIC
+BASE
+postmast
+POST
+POWERCARTUSER
+APC
+powerdown
+perfectpraise
+PRIMARY
+primenet
+primeos
+prime
+primeos
+prime
+primeos
+prime
+primos
+PRINTER
+PRINT
+PRODCICS
+PRODDTA
+PROG
+prtgadmin
+$secure$
+publicpass
+PUBSUB1
+PUBSUB
+pwpw
+pwrchute
+deeplearning
+hexakisoctahedron
+QDBA
+qpgmr
+QS_ADM
+QS_CBADM
+QS_CB
+QS_CS
+11111111
+22222222
+qsecofr
+qserv
+QS_ES
+QS_OS
+QS
+11111111
+22222222
+qsrvbas
+qsrv
+QSRV
+ibmcel
+qsvr
+QS_WS
+qsysopr
+quser
+radware
+hpt
+r@p8p0r+
+rcustpw
+rdc123
+apc
+lucenttech2
+synnet
+lucenttech1
+recover
+redline
+malware
+REPADMIN
+replication-receiver
+iscopy
+replicator
+DEMO
+OEM_TEMP
+DEMO
+REP_OWNER
+RE
+restoreonly1
+rje
+RMAIL
+RMAN
+password
+NetServer
+Rodopi
+role1
+tomcat
+changethis
+00000000
+1001chin
+1111
+1234
+12345
+123456
+20080826
+3ep5w2u
+54321
+5up
+666666
+7ujMko0admin
+7ujMko0vizxv
+888888
+8RttoTriz
+admin
+ahetzip8
+alpine
+anko
+anni2013
+arcsight
+ascend
+attack
+ax400
+bagabu
+blablabla
+blackarch
+blender
+brightmail
+calvin
+cat1029
+ceadmin
+changeme
+changeonfirstlogin
+changethis
+china123
+Cisco
+ciwuxe
+cms500
+cubox-i
+cxlinux
+D13HH[
+dasdec1
+davox
+debian
+default
+dottie
+dreambox
+fai
+fibranne
+fidel123
+freenas
+ggdaseuaimhrke
+GM8182
+hi3518
+hp
+ikwb
+indigo
+juantech
+jvbzd
+klv123
+klv1234
+kn1TG7psLu
+leostream
+libreelec
+linux
+logapp
+manager
+max2play
+mozart
+mpegvideo
+Mua'dib
+MuZhlo9n%8!G
+nas4free
+NeXT
+NM1$88
+nokia
+nosoup4u
+nsi
+oelinux123
+openelec
+openmediavault
+orion99
+osboxes.org
+palosanto
+par0t
+pass
+passw0rd
+password
+p@ck3tf3nc3
+pixmet2003
+plex
+qwasyx21
+rasplex
+realtek
+resumix
+root
+!root
+ROOT
+root01
+ROOT500
+rootme
+rootpasswd
+screencast
+secur4u
+Serv4EMC
+sipwise
+sixaola
+stxadmin
+sun123
+system
+t00lk1t
+t0talc0ntr0l4!
+TANDBERG
+timeserver
+toor
+tslinux
+ubnt
+ubuntu1404
+uClinux
+unitrends1
+user
+vagrant
+vertex25
+video
+vizxv
+wago
+wyse
+xc3511
+xmhdipc
+xoa
+ys123456
+zlxx
+zlxx.
+Zte521
+ro
+SYS
+rwa
+rw
+changeonfirstlogin
+SAMPLE
+forensics
+training
+06071992
+7061992
+PASSWORD
+admin
+ADMIN
+PASS
+SAP
+SAPR3
+sasasa
+crash
+scmchangeme
+12345
+TIGER
+SDOS_ICSAP
+SECDEMO
+secofr
+security
+secacm
+system
+SERVICECONSUMER1
+service
+Service
+smile
+manager
+system
+changeme
+changeme!
+setup
+SH
+shutdown
+signa
+siteadmin
+toplayer
+SITEMINDER
+SLIDEPW
+smcadmin
+SECONDARY
+nopasswd
+snmp
+0000
+HPOFFICE
+$SRV
+ssladmin
+ssp
+stackato
+STARTER
+readonly
+stratauser
+STRAT_PASSWD
+100198
+5777364
+(caclulated)
+admin
+juniper123
+21241036
+talent
+super
+master
+superpass
+surt
+123456
+admin
+ANS#150
+asante
+kronites
+superuser
+HARRIS
+NETFRAME
+NF
+NFI
+PlsChgMe!
+supervisor
+SUPERVISOR
+SYSTEM
+visor
+h179350
+support
+supportpw
+symantec
+super
+mysweex
+SWPRO
+SWUSER
+Symbol
+SYMPA
+sync
+admin
+Admin
+admpw
+master
+nortel
+password
+sysadmin
+sysadm
+SYSADM
+sysadmpw
+syspw
+SYSA
+bin
+sysbin
+change_on_install
+CHANGE_ON_INSTALL
+masterkey
+D_SYSPW
+oem_temp
+OEM_TEMP
+sysopr
+Sysop
+sys
+system
+adminpwd
+system_admin
+D_SYSTPW
+isp
+manager
+MANAGER
+sys/change_on_install
+mnet
+password
+prime
+security
+sys
+system
+weblogic
+uplink
+Trintech
+TAHITI
+password
+tasmannet
+Tasmannet
+TDOS_ICSAP
+password
+field
+nician
+yZgO8Bvj
+tech
+telecom
+Telecom
+tele
+tellabs#1
+password
+TESTPILOT
+test
+TEST
+tiger123
+changethis
+tomcat
+logapp
+password
+password
+password
+TRACE
+TRACE
+TRAVEL
+trmcnfg
+trouble
+TSDEV
+TSUSER
+TURBINE
+ubnt
+ucenik
+ULTIMATE
+umountfs
+umountfsys
+umountsys
+unix
+USER0
+1001
+1234
+19750407
+USER1
+USER2
+USER3
+USER4
+USER5
+USER6
+USER7
+USER8
+USER9
+demo
+demo
+demo
+demo
+demo
+demo
+demo
+PASSW0RD
+PASSWORD
+Janitza
+demo
+password
+password
+Password
+none
+userNotU
+password
+Password
+demo
+public
+demo
+USER_TEMPLATE
+tivonpw
+user
+user
+User
+USER
+user0000
+USERP
+UTLESTAT
+uucpadm
+uucp
+password
+vagrant
+VCSRV
+12871
+vgnadmin
+viewuser1
+VIF_DEV_PWD
+singh
+VIRUSER
+winterm
+volition
+vpasp
+VRR1
+VTAM
+WANGTEK
+1234
+Admin
+webadmin
+WebBoard
+webibm
+password
+WEBCAL01
+webdb
+WEBDB
+1
+weblogic
+webmaster
+WEBREAD
+webshieldchangeme
+web
+whd
+WINDOWS_PASSTHRU
+SABRE
+WINSABRE
+WKSYS
+wlcsystem
+wlpisystem
+wlsepassword
+wlsedb
+HPOFFICE
+wpsadmin
+trancell
+private
+synnet
+eomjbOBLLwbZeiKV
+WWWUSER
+www
+WWW
+xd
+sap123
+XPRT
+X#1833
diff --git a/subset/security/password/resources/default/usernames.txt b/subset/security/password/resources/default/usernames.txt
new file mode 100644
index 0000000000..cd9867b8cb
--- /dev/null
+++ b/subset/security/password/resources/default/usernames.txt
@@ -0,0 +1,1271 @@
+11111111
+11111
+123
+1234
+22222222
+**23646
+**266344
+266344
+2800
+31994
+666666
+7654321
+880175445
+888888
+acer
+acitoolkit
+Adam
+ADAMS
+ADLDEMO
+adm
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+Admin
+admin
+Admin
+Admin1
+admin1
+admin
+admin
+admin2
+admin
+Admin
+admin
+admin
+admin
+admin
+Admin
+Admin
+ADMIN
+ADMIN
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+ADMIN
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+Admin
+admin@example.com
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Administrator
+administrator
+Administrator
+Administrator
+Administrator
+ADMINISTRATOR
+administrator
+Administrator
+ADMINISTRATOR
+administrator
+administrator
+Administrator
+Administrator
+Administrator
+Administrator
+Administrator
+Administrator
+administrator
+Administrator
+administrator
+Administrator
+Administrator
+administrator
+administrator
+Administrator
+Administrator
+Administrator
+Administrator
+Administrator
+admin
+admin
+admin
+ADMIN
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+Admin
+ADMIN
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+adminstat
+adminstrator
+Admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+admin
+adminttd
+admin
+adminuser
+admin
+adminview
+admin
+Admin
+admin
+ADMIN
+admin
+admin
+admin
+admin
+adsl
+adtec
+ADVMAIL
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+Alphanetworks
+amx
+amx
+ANDY
+anon
+anonymous
+anonymous
+anonymous
+anonymous
+anonymous
+Any
+Any
+(any)
+AP
+aparker@geometrixx.info
+apc
+APPLSYS
+APPLSYS
+APPLSYSPUB
+APPS
+APPUSER
+AQ
+AQDEMO
+AQJAVA
+AQUSER
+ARAdmin
+ARCHIVIST
+AUDIOUSER
+AURORA@ORB@UNAUTHENTICATED
+AURORA$ORB$UNAUTHENTICATED
+author
+autocad
+BACKUP
+backuponly
+backuprestore
+basisk
+Basisk
+bbs
+bbsd-client
+bbsd-client
+BC4J
+bciim
+bcim
+bcms
+bcnas
+bewan
+bin
+Blaeri
+BLAKE
+blue
+Bobo
+both
+bpel
+BRIO_ADMIN
+browse
+browse
+bubba
+cablecom
+cac_admin
+CATALOG
+c-comatic
+ccrusr
+CDEMO82
+CDEMOCOR
+CDEMORID
+CDEMOUCB
+cellit
+CENTRA
+cgadmin
+checkfs
+checkfsys
+checksys
+CHEY_ARCHSVR
+CICSUSER
+CIDS
+cirros
+CIS
+CISCO15
+cisco
+Cisco
+CISINFO
+citel
+CLARK
+client
+cloudera
+cmaker
+CMSBATCH
+cn=orcladmin
+Coco
+comcast
+COMPANY
+COMPIERE
+computer
+conferencing
+config
+corecess
+core
+CQSCHEMAUSER
+craft
+craft
+craft
+Craft
+(created)
+(created)
+crowdÂ-openid-Âserver
+Crowd
+CSG
+CSMIG
+ctb_admin
+CTXDEMO
+CTXSYS
+cusadmin
+cust
+customer
+dadmin
+dadmin
+daemon
+davox
+db2fenc1
+db2inst1
+dbase
+DBA
+DBDCCICS
+DBI
+DBSNMP
+DDIC
+debian
+debian
+debian
+debug
+debug
+debug
+d.e.b.u.g
+default
+default
+default
+default
+default
+default
+defug
+DEMO8
+DEMO9
+demo
+DEMO
+demo
+Demo
+demos
+DES
+deskalt
+deskman
+desknorm
+deskres
+DEV2000_DEMOS
+dev
+Developer
+device
+device
+diag
+diag
+DIP
+DISCOVERER_ADMIN
+distrib
+disttech
+disttech
+disttech
+D-Link
+dm
+dni
+dos
+dpn
+draytek
+Draytek
+DSGATEWAY
+DSL
+DSSYS
+DTA
+dvstation
+eagle
+EARLYWATCH
+echo
+echo
+egcr
+EJSADMIN
+elk_user
+emaq
+EMP
+enable
+eng
+engmode
+enisa
+enquiry
+ESTOREUSER
+eurek
+EVENT
+EXFSYS
+expert
+Factory
+factory
+fal
+fam
+fastwire
+fax
+FAX
+FAXUSER
+FAXWORKS
+fg_sysadmin
+field
+FIELD
+FIELD
+FIELD
+FIELD
+FIELD
+field
+FIELD
+FINANCE
+firstsite
+Flo
+FND
+FORSE
+FROSTY
+ftp_admi
+ftp
+ftp_inst
+ftp_nmc
+ftp_oper
+ftpuser
+ftp
+fwadmin
+fwupgrade
+games
+GATEWAY
+Gearguy
+GE
+geosolutions
+glftpd
+GL
+god1
+god2
+gopher
+GPFD
+GPLD
+guest
+guest
+guest1
+guest1
+guest
+Guest
+Guest
+GUEST
+guest
+GUEST
+GUEST
+guest
+guest
+GUEST
+guest
+guru
+halt
+HCPARK
+HELLO
+hello
+HELLO
+HELLO
+HELLO
+helpdesk
+HLW
+(hostname/ipaddress)
+HPLASER
+HPSupport
+HR
+hsa
+hscroot
+HTTP
+hunter
+hxeadm
+ibm
+ibm
+ibm
+IBMUSER
+iclock
+ilom-admin
+ilom-operator
+images
+IMAGEUSER
+IMEDIA
+inads
+inads
+informix
+init
+installer
+installer
+install
+install
+intel
+intermec
+internal
+IntraStack
+IntraSwitch
+ioFTPD
+IS_$hostname
+itsadmin
+james
+jdoe@geometrixx.info
+JMUSER
+Joe
+joe
+JONES
+JWARD
+keyscan
+khan
+kodi
+l2
+L2LDEMO
+l3
+LASER
+LASERWRITER
+LBACSYS
+LDAP_Anonymous
+leo
+LIBRARIAN
+Liebert
+live
+LocalAdministrator
+localadmin
+locate
+login
+login
+login
+login
+lpadmin
+lpadm
+lp
+lp
+lp
+LR-ISDN
+lynx
+m1122
+m202
+MAIL
+mail
+MAIL
+MAIL
+MAIL
+MAIL
+maintainer
+maintainer
+maint
+MAINT
+maint
+maint
+maint
+maint
+Manager
+MANAGER
+manager
+MANAGER
+MANAGER
+manager
+Manager
+MANAGER
+managers
+MANAGER
+MANAGER
+MANAGER
+man
+manuf
+mary
+master
+MASTER
+master
+MayGion
+McdataSE
+MCUser
+MD110
+MDDEMO_CLERK
+MDDEMO
+MDDEMO_MGR
+MDSYS
+mediator
+me
+memotec
+Menara
+mfd
+MFG
+mg3500
+MGE
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGR
+MGWUSER
+MICRO
+MIGRATE
+MILLER
+misp
+mlusr
+MMO2
+mobile
+MODTEST
+Moe
+monitor
+MOREAU
+mountfs
+mountfsys
+mountsys
+MSHOME
+mso
+MTSSYS
+MTS_USER
+MTYSYS
+museadmin
+musi1921
+musi1921
+MXAGENT
+myshake
+naadmin
+n.a
+NAMES
+nao
+NAU
+ncrm
+netbotz
+netlink
+NetLinx
+netman
+netopia
+netrangr
+netscreen
+NETWORK
+news
+newuser
+nexthink
+NICONEX
+nm2user
+nms
+nobody
+none
+none
+none
+none
+none
+none
+none
+nop
+nop
+NSA
+OAS_PUBLIC
+OCITEST
+ODM_MTR
+ODM
+ODSCOMMON
+ods
+ODS
+OEMADM
+OEMREP
+OE
+OLAPDBA
+OLAPSVR
+OLAPSYS
+OMWB_EMULATION
+onlime_r
+OO
+openhabian
+OPENSPIRIT
+OPERATIONS
+OPERATNS
+operator
+operator
+OPERATOR
+OPERATOR
+operator
+operator
+Operator
+OPERATOR
+OPERATOR
+OPERATOR
+Oper
+OPER
+op
+op
+oracle
+ORAREGSYS
+ORASSO
+ORDPLUGINS
+ORDSYS
+osbash
+osboxes
+osmc
+OSP22
+OUTLN
+overseer
+OWA
+OWA_PUBLIC
+OWNER
+PACSLinkIP
+PANAMA
+patrol
+PATROL
+PBX
+PCUSER
+pepino
+PERFSTAT
+PFCUser
+piranha
+piranha
+pi
+PLEX
+plexuser
+PLMIMService
+PLSQL
+PM
+pnadmin
+PO7
+PO8
+politically
+poll
+Polycom
+PO
+PORTAL30_DEMO
+PORTAL30
+PORTAL30
+PORTAL30_PUBLIC
+PORTAL30_SSO
+PORTAL30_SSO_PS
+PORTAL30_SSO_PUBLIC
+POST
+postmaster
+POST
+POWERCARTUSER
+POWERCHUTE
+powerdown
+praisenetwork
+PRIMARY
+primenet
+primenet
+primeos
+primeos
+prime
+prime
+primos_cs
+primos_cs
+PRINTER
+PRINT
+PRODCICS
+PRODDTA
+PROG
+prtgadmin
+PSEAdmin
+public
+PUBSUB1
+PUBSUB
+pw
+pwrchute
+pyimagesearch
+qbf77101
+QDBA
+qpgmr
+QS_ADM
+QS_CBADM
+QS_CB
+QS_CS
+qsecofr
+qsecofr
+qsecofr
+qserv
+QS_ES
+QS_OS
+QS
+QSRV
+QSRV
+qsrvbas
+qsrv
+QSRV
+qsvr
+qsvr
+QS_WS
+qsysopr
+quser
+radware
+RAID
+rapport
+rcust
+rdc123
+readonly
+readonly
+read
+readwrite
+recover
+redline
+remnux
+REPADMIN
+replication-receiver
+Replicator
+replicator
+REP_MANAGER
+REPORTS_USER
+REP_OWNER
+REP_OWNER
+RE
+restoreonly
+rje
+RMAIL
+RMAN
+RMUser1
+RNIServiceManager
+Rodopi
+role1
+role1
+role
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+ROOT
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+root
+Root
+root
+root
+root
+root
+root
+root
+root
+root
+ro
+RSBCMON
+rwa
+rw
+sa
+SAMPLE
+sansforensics
+sans
+SAP*
+SAP*
+SA
+SAPCPIC
+SAPCPIC
+SAP*
+SAPR3
+SAP
+sa
+savelogs
+scmadmin
+sconsole
+SCOTT
+SDOS_ICSAP
+SECDEMO
+secofr
+security
+sedacm
+self
+SERVICECONSUMER1
+service
+Service
+service
+servlet
+setpriv
+setup
+setup
+setup
+SH
+shutdown
+signa
+siteadmin
+siteadmin
+SITEMINDER
+SLIDE
+smc
+SMDR
+snmp
+snmp
+spcl
+SPOOLMAN
+$SRV
+ssladmin
+ssp
+stackato
+STARTER
+status
+stratacom
+STRAT_USER
+stuccoboy
+super
+__super
+superdba
+super
+superman
+superman
+super
+super.super
+super
+super
+superuser
+superuser
+SUPERUSER
+superuser
+SuperUser
+superuser
+SUPERVISOR
+SUPERVISOR
+SUPERVISOR
+SUPERVISOR
+supervisor
+supervisor
+SUPERVISOR
+SUPERVISOR
+supervisor
+support
+support
+support
+support
+su
+sweex
+SWPRO
+SWUSER
+Symbol
+SYMPA
+sync
+sysadm
+sysadm
+sysadm
+sysadmin
+sysadmin
+sysadmin
+sysadmin
+sysadm
+SYSADM
+sysadm
+sysadm
+SYSA
+sys
+sysbin
+sys
+SYS
+SYSDBA
+SYS
+SYSMAN
+SYSMAN
+sysopr
+Sysop
+sys
+sys
+system
+system_admin
+SYSTEM
+system
+system
+SYSTEM
+system/manager
+system
+system
+system
+system
+system
+system
+system
+sys
+t3admin
+TAHITI
+target
+tasman
+Tasman
+TDOS_ICSAP
+teacher
+tech
+tech
+technician
+tech
+telecom
+Telecom
+tele
+tellabs
+temp1
+TESTPILOT
+test
+TEST
+tiger
+tomcat
+tomcat
+toor
+topicalt
+topicnorm
+topicres
+TRACESRV
+TRACESVR
+TRAVEL
+trmcnfg
+trouble
+TSDEV
+TSUSER
+TURBINE
+ubnt
+ucenik23
+ULTIMATE
+umountfs
+umountfsys
+umountsys
+unix
+USER0
+User
+User
+User
+USER1
+USER2
+USER3
+USER4
+USER5
+USER6
+USER7
+USER8
+USER9
+user_analyst
+user_approver
+user_author
+user_checker
+user_designer
+user_editor
+user_expert
+USERID
+USERID
+user
+user_marketer
+username
+Username
+Username
+user
+userNotUsed
+user
+User
+user_pricer
+user
+user_publisher
+USER_TEMPLATE
+user
+user
+User
+User
+USER
+user
+user
+UTLBSTATU
+uucpadm
+uucp
+uwmadmin
+vagrant
+VCSRV
+veda
+vgnadmin
+viewuser
+VIF_DEVELOPER
+vikram
+VIRUSER
+VNC
+volition
+vpasp
+VRR1
+VTAM
+WANGTEK
+webadmin
+WebAdmin
+webadmin
+WebAdmin
+webadmin
+WEBADM
+WEBCAL01
+webdb
+WEBDB
+webguest
+weblogic
+webmaster
+WEBREAD
+webshield
+web
+whd
+WINDOWS_PASSTHRU
+WINSABRE
+WINSABRE
+WKSYS
+wlcsystem
+wlpisystem
+wlseuser
+wlse
+WP
+wpsadmin
+wradmin
+write
+write
+wVQxyQec
+WWWUSER
+www
+WWW
+xd
+xmi_demo
+XPRT
+XXSESS_MGRYY
diff --git a/subset/security/password/resources/faux/dictionary.txt b/subset/security/password/resources/faux/dictionary.txt
new file mode 100644
index 0000000000..8caf47c4a5
--- /dev/null
+++ b/subset/security/password/resources/faux/dictionary.txt
@@ -0,0 +1,4 @@
+:user:pass
+:admin:default
+:root:user
+:default:pass
\ No newline at end of file
diff --git a/subset/security/password/resources/faux/passwords.txt b/subset/security/password/resources/faux/passwords.txt
new file mode 100644
index 0000000000..d2cc81dc7d
--- /dev/null
+++ b/subset/security/password/resources/faux/passwords.txt
@@ -0,0 +1,4 @@
+pass
+default
+user
+pass
\ No newline at end of file
diff --git a/subset/security/password/resources/faux/usernames.txt b/subset/security/password/resources/faux/usernames.txt
new file mode 100644
index 0000000000..5b38a1c32c
--- /dev/null
+++ b/subset/security/password/resources/faux/usernames.txt
@@ -0,0 +1,4 @@
+user
+admin
+root
+default
\ No newline at end of file
diff --git a/subset/security/password/resources/raw/manufacturer.csv b/subset/security/password/resources/raw/manufacturer.csv
new file mode 100644
index 0000000000..5f1ac893c3
--- /dev/null
+++ b/subset/security/password/resources/raw/manufacturer.csv
@@ -0,0 +1,2850 @@
+Vendor,Username,Password,Comments
+"2Wire, Inc.",http,,
+360 Systems,factory,factory,
+3COM,3comcso,RIP000,Resets all passwords to defaults
+3COM,,12345,
+3COM,,1234admin,
+3COM,,,
+3COM,,ANYCOM,
+3COM,,ILMI,
+3COM,,PASSWORD,
+3COM,,admin,
+3COM,,comcomcom,
+3COM,,,
+3COM,,PASSWORD,
+3COM,,admin,
+3COM,Admin,Admin,
+3COM,Administrator,,
+3COM,Administrator,admin,
+3COM,Type User: FORCE,,
+3COM,User,Password,
+3COM,adm,,
+3COM,admin,1234admin,
+3COM,admin,,
+3COM,admin,admin,
+3COM,admin,comcomcom,
+3COM,admin,password,
+3COM,admin,synnet,
+3COM,adminttd,adminttd,
+3COM,debug,synnet,
+3COM,defug,synnet,
+3COM,manager,manager,
+3COM,monitor,monitor,
+3COM,none,admin,
+3COM,read,synnet,
+3COM,recover,recover,http://support.3com.com/infodeli/tools/switches/ss3/4900/dha1770-0aaa04/htm/support/problemsolving/cliproblems.htm
+3COM,recovery,recovery,Unit must be powered off
+3COM,root,!root,http://support.3com.com/infodeli/tools/remote/ocradsl/20/812_cli20.pdfhttp://support.3com.com/infodeli/tools/remote/ocremote/brouters/840/2sysadmin.htm
+3COM,security,security,
+3COM,tech,,
+3COM,tech,tech,
+3COM,write,synnet,
+3M,VOL-0215,,http://multimedia.3m.com/mws/mediawebserver?6666660Zjcf6lVs6EVs666xa9COrrrrQ-
+3M,volition,,http://multimedia.3m.com/mws/mediawebserver?6666660Zjcf6lVs6EVs666xa9COrrrrQ-
+3M,volition,volition,
+3ware,Administrator,3ware,
+ACCTON,,0000,
+ACCTON,__super,(caclulated),http://www.vettebak.nl/hak/
+ACCTON,admin,,
+ACCTON,manager,manager,
+ACCTON,monitor,monitor,
+ACCTON,none,0,
+ADC Kentrox,,secret,
+ADC Kentrox,,secret,
+ADIC,admin,password,
+ADIC,admin,secure,
+ADP,sysadmin,master,
+ADT,,2580,http://krebsonsecurity.com/2013/01/does-your-alarm-have-a-default-duress-code/
+ADTRAN,admin,password,
+AIRAYA Corp,Airaya,Airaya,http://www.airaya.com/support/guides/WirelessGRID-Manual_O.pdf
+ALLNET,admin,admin,
+ALLNET,admin,password,
+ALLNET,none,admin,
+AMI,,A.M.I,
+AMI,,AM,
+AMI,,AMI,
+AMI,,AMI!SW,
+AMI,,AMI.KEY,
+AMI,,AMI.KEZ,
+AMI,,AMI?SW,
+AMI,,AMIPSWD,
+AMI,,AMISETUP,
+AMI,,AMI_SW,
+AMI,,AMI~,
+AMI,,BIOSPASS,
+AMI,,CMOSPWD,
+AMI,,HEWITT RAND,
+AMI,,aammii,
+AMI,,A.M.I,
+AMI,,AM,
+AMI,,AMI,
+AMI,,AMI!SW,
+AMI,,AMI.KEY,
+AMI,,AMI.KEZ,
+AMI,,AMI?SW,
+AMI,,AMIAMI,
+AMI,,AMIDECOD,
+AMI,,AMIPSWD,
+AMI,,AMISETUP,
+AMI,,AMI_SW,
+AMI,,AMI~,
+AMI,,BIOSPASS,
+AMI,,HEWITT RAND,
+AMI,,aammii,
+AMX,,1988,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,,,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,,admin,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,Admin,1988,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,Administrator,vision2,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,NetLinx,password,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,admin,1988,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,admin,admin,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,administrator,password,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,guest,guest,http://www.amx.com/techsupport/PDFs/981.pdf
+AMX,root,mozart,http://www.amx.com/techsupport/PDFs/981.pdf
+AOC,,admin,
+APACHE,admin,jboss4,
+APC,(any),TENmanUFactOryPOWER,
+APC,,serial number of the Call-UPS,
+APC,,serial number of the Share-UPS,
+APC,,TENmanUFactOryPOWER,
+APC,,backdoor,
+APC,POWERCHUTE,APC,
+APC,apc,apc,
+APC,device,apc,https://www.jlab.org/Hall-D/Documents/manuals/APC%20stuff/AP9630%209631%20UPS%20Network%20Management%20Card%202%20User's%20Guide%20firmware%20V5.1.1.pdf
+APC,device,device,
+APC,readonly,apc,https://www.jlab.org/Hall-D/Documents/manuals/APC%20stuff/AP9630%209631%20UPS%20Network%20Management%20Card%202%20User's%20Guide%20firmware%20V5.1.1.pdf
+ARtem,,admin,
+ASMAX,admin,epicrouter,
+AST,,SnuFG5,
+AST,,SnuFG5,
+AT&T,,mcp,
+ATL,Service,5678,Tape Library Service Access
+ATL,operator,1234,Tape Library Operator Access
+AVM,,0,
+AVM,,,
+AWARD,,1322222,
+AWARD,,256256,
+AWARD,,589589,
+AWARD,,589721,
+AWARD,,,
+AWARD,,?award,
+AWARD,,AWARD SW,
+AWARD,,AWARD?SW,
+AWARD,,AWARD_PW,
+AWARD,,AWARD_SW,
+AWARD,,Award,
+AWARD,,BIOS,
+AWARD,,CONCAT,
+AWARD,,HELGA-S,
+AWARD,,HEWITT RAND,
+AWARD,,HLT,
+AWARD,,PASSWORD,
+AWARD,,SER,
+AWARD,,SKY_FOX,
+AWARD,,SWITCHES_SW,
+AWARD,,SW_AWARD,
+AWARD,,SZYX,
+AWARD,,Sxyz,
+AWARD,,TTPTHA,
+AWARD,,TzqF,
+AWARD,,ZAAADA,
+AWARD,,aLLy,
+AWARD,,aPAf,
+AWARD,,admin,
+AWARD,,alfarome,
+AWARD,,award.sw,
+AWARD,,award_?,
+AWARD,,award_ps,
+AWARD,,awkward,
+AWARD,,biosstar,
+AWARD,,biostar,
+AWARD,,condo,
+AWARD,,djonet,
+AWARD,,efmukl,
+AWARD,,g6PJ,
+AWARD,,h6BB,
+AWARD,,j09F,
+AWARD,,j256,
+AWARD,,j262,
+AWARD,,j322,
+AWARD,,j64,
+AWARD,,lkw peter,
+AWARD,,lkwpeter,
+AWARD,,setup,
+AWARD,,t0ch20x,
+AWARD,,t0ch88,
+AWARD,,wodj,
+AWARD,,zbaaaca,
+AWARD,,zjaaadc,
+AXUS,,0,Storage DAS SATA to SCSI/FC
+Accelerated Networks,sysadm,anicust,
+Aceex,admin,,
+Acer,,,
+Actiontec,,,
+Actiontec,admin,password,Verizon Fios Setup
+AdComplete.com,Admin1,Admin1,
+Adaptec,Administrator,adaptec,
+AddPac Technology,root,router,
+Addon,admin,admin,
+Adobe,admin,admin,https://docs.adobe.com/docs/v5_2/html-resources/cq5_guide_power_user/ch07s02.html#sect_default_users_and_groups
+Adobe,anonymous,anonymous,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/
+Adobe,aparker@geometrixx.info,aparker,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/
+Adobe,author,author,https://docs.adobe.com/docs/v5_2/html-resources/cq5_guide_power_user/ch07s02.html#sect_default_users_and_groups
+Adobe,jdoe@geometrixx.info,jdoe,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/
+Adobe,replication-receiver,replication-receiver,http://resources.infosecinstitute.com/adobe-cq-pentesting-guide-part-1/
+Adobe,vgnadmin,vgnadmin,http://dev.day.com/content/docs/en/crx/connectors/vignette/current.html
+Adtech,root,ax400,
+Adtran,,adtran,
+Adtran,admin,password,http://www.adtran.com/pub/Library/Quick_Start_Guides/Public_View/NetVanta%203430%20Quick%20Start%20Guide.pdf
+Advanced Integration,,Advance,
+Advanced Integration,,Advance,
+Advantek Networks,admin,,
+Aethra,admin,password,
+AirLink Plus,,admin,
+AirTies RT-210,admin,admin,
+Airlink,,admin,
+Aironet,,,
+Airway,,0000,
+Aladdin,root,kn1TG7psLu,
+Alcatel,,,http://www.speedtouch.com/support.htm
+Alcatel,,admin,
+Alcatel,,1064,
+Alcatel,SUPERUSER,ANS#150,
+Alcatel Thomson,admin,admin,
+Alcatel,adfexc,adfexc,thanks to Nicolas Gregoire
+Alcatel,admin,switch,
+Alcatel,at4400,at4400,thanks to Nicolas Gregoire
+Alcatel,client,client,
+Alcatel,dhs3mt,dhs3mt,thanks to Nicolas Gregoire
+Alcatel,dhs3pms,dhs3pms,thanks to Nicolas Gregoire
+Alcatel,diag,switch,
+Alcatel,ftp_admi,kilo1987,
+Alcatel,ftp_inst,pbxk1064,
+Alcatel,ftp_nmc,tuxalize,
+Alcatel,ftp_oper,help1954,
+Alcatel,halt,tlah,thanks to Nicolas Gregoire
+Alcatel,install,llatsni,thanks to Nicolas Gregoire
+Alcatel,kermit,kermit,thanks to Nicolas Gregoire
+Alcatel,mtch,mtch,thanks to Nicolas Gregoire
+Alcatel,mtcl,,
+Alcatel,mtcl,mtcl,thanks to Nicolas Gregoire
+Alcatel,root,letacla,thanks to Nicolas Gregoire
+Alcatel,root,permit,Perm/Config port 38036
+Alcatel,superuser,superuser,
+Alien Technology,alien,alien,http://seclists.org/fulldisclosure/2010/May/63
+Alien Technology,root,alien,http://seclists.org/fulldisclosure/2010/May/63
+Allied Telesyn,,manager,
+Allied Telesyn,,admin,
+Allied Telesyn,admin,,
+Allied Telesyn,manager,admin,
+Allied Telesyn,manager,friend,
+Allied Telesyn,manager,manager,
+Allied Telesyn,root,,
+Allied Telesyn,secoff,secoff,
+Allnet,admin,admin,http://www.allnet.de/
+Allot,admin,allot,
+Allot,root,bagabu,
+Alteon,admin,,
+Alteon,admin,admin,
+Alteon,admin,linga,
+Ambit,root,,
+Ambit,root,root,
+Ambit,user,user,
+Amigo,admin,epicrouter,
+Amino,,leaves,http://www.vsicam.com/files/documents/AmiNet/AmiNet_and_AVN_Configuration_Manual.pdf
+Amino,,snake,http://www.vsicam.com/files/documents/AmiNet/AmiNet_and_AVN_Configuration_Manual.pdf
+Amitech,admin,admin,
+AmpJuke,admin,pass,
+Amptron,,Polrty,
+Amptron,,Polrty,
+Andover Controls,acc,acc,
+Apache Project,jj,,
+Apache,admin,,
+Apache,admin,admin,
+Apache,admin,j5Brn9,
+Apache,admin,tomcat,
+Apache,both,tomcat,
+Apache,role,changethis,
+Apache,role1,role1,
+Apache,role1,tomcat,
+Apache,root,changethis,
+Apache,root,root,
+Apache,tomcat,changethis,
+Apache,tomcat,tomcat,
+Apple,,public,See Apple article number 58613
+Apple,,xyzzy,
+Apple,,admin,see Apple article number 107518
+Apple,,password,See Apple article number 106597
+Apple Computer,,public,
+Apple Computer,,xyzzy,
+Apple,admin,public,
+Apple,mobile,dottie,
+Apple,root,admin,
+Apple,root,alpine,
+Applied Innovations,scout,scout,
+Areca,admin,0,
+Arescom,,atc123,
+Arlotto,admin,123456,
+Arris,admin,password,
+Arrowpoint,,,
+Arrowpoint,admin,system,
+Aruba,admin,admin,
+Arun,123,234,
+Asante,IntraStack,Asante,
+Asante,IntraSwitch,Asante,
+Asante,admin,asante,
+Asante,superuser,,
+Asante,superuser,asante,
+Ascend,,ascend,
+Ascend,,ascend,
+Ascend,readonly,lucenttech2,
+Ascend,readwrite,lucenttech1,
+Ascend,root,ascend,
+Ascom,,3ascotel,
+Aspect,DTA,TJM,
+Aspect,customer,none,
+Asus,,admin,
+Asus,admin,admin,
+Asus,adsl,adsl1234,
+Atlantis,admin,atlantis,
+Atlassian,Crowd,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf
+Atlassian,Demo,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf
+Atlassian,Username,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf
+Atlassian,crowdÂ-openid-Âserver,password,http://www.commandfive.com/papers/C5_TA_2013_3925_AtlassianCrowd.pdf
+Attachmate,,PASSWORD,
+Audioactive,,telos,
+Autodesk,autocad,autocad,
+Avaya,,Craftr4,
+Avaya,,,
+Avaya,,admin,
+Avaya,Administrator,ggdaseuaimhrke,
+Avaya,Craft,crftpw,
+Avaya,admin,admin,https://downloads.avaya.com/css/P8/documents/100173462
+Avaya,admin,admin123,
+Avaya,admin,barney,
+Avaya,admin,password,https://downloads.avaya.com/css/P8/documents/100181785
+Avaya,craft,,
+Avaya,craft,crftpw,
+Avaya,dadmin,dadmin,
+Avaya,dadmin,dadmin01,
+Avaya,diag,danger,
+Avaya,manuf,xxyyzz,
+Avaya,root,ROOT500,
+Avaya,root,cms500,
+Avaya,root,ggdaseuaimhrke,
+Avaya,root,root,
+Avenger News System (ANS),,Administrative,
+Avocent,root,tslinux,
+Award,,lkwpeter,
+Award,,1322222,
+Award,,256256,
+Award,,?award,
+Award,,AWARD_SW,
+Award,,BIOS,
+Award,,CONCAT,
+Award,,CONDO,
+Award,,HELGA-S,
+Award,,HEWITT RAND,
+Award,,HLT,
+Award,,PASSWORD,
+Award,,SER,
+Award,,SKY_FOX,
+Award,,SWITCHES_SW,
+Award,,SY_MB,
+Award,,SZYX,
+Award,,Sxyz,
+Award,,TTPTHA,
+Award,,TzqF,
+Award,,aLLy,
+Award,,aPAf,
+Award,,admin,
+Award,,alfarome,
+Award,,award,
+Award,,awkward,
+Award,,biosstar,
+Award,,biostar,
+Award,,g6PJ,
+Award,,h6BB,
+Award,,j09F,
+Award,,j256,
+Award,,j262,
+Award,,j322,
+Award,,j64,
+Award,,lkw peter,
+Award,,lkwpeter,
+Award,,setup,
+Award,,t0ch20x,
+Award,,t0ch88,
+Award,,wodj,
+Award,,zbaaaca,
+Axis,,,
+Axis Communications,root,pass,
+Axis,root,pass,
+Axway,setup,setup,https://cdn.axway.com/u/documentation/secure_transport/5.3.0/SecureTransport_GettingStartedGuide_allOS_en.pdf
+Aztech,admin,admin,
+Aztech,isp,isp,backdoor � not in all f/w versions
+Aztech,root,admin,
+BBR-4MG and,root,