1
0
Fork 0

Adding upstream version 2.2.1.

Signed-off-by: Daniel Baumann <daniel@debian.org>
This commit is contained in:
Benjamin Drung 2025-02-16 12:41:59 +01:00 committed by Daniel Baumann
parent 1d36de0179
commit 757b718eff
Signed by: daniel
GPG key ID: FBB4F0E80A80222F
129 changed files with 16110 additions and 0 deletions

0
test/__init__.py Normal file
View file

152
test/meson.build Normal file
View file

@ -0,0 +1,152 @@
# Copyright (c) 2022, Dell Inc. or its subsidiaries. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
# See the LICENSE file for details.
#
# This file is part of NVMe STorage Appliance Services (nvme-stas).
#
# Authors: Martin Belanger <Martin.Belanger@dell.com>
#
test_env = environment({'MALLOC_PERTURB_': '0'})
libnvme_location = '?'
# We require libnvme in order to run the tests. We have two choices, either
# run the tests using a pre-installed version of libnvme (i.e. from /usr) or
# build libnvme as a meson subproject and run the tests using that version
# of libnvme. The decision to use one method over the other is controlled
# by the option "libnvme-sel". Note that if a pre-intalled libnvme is selected
# but one cannot be found, then we fall back to using the subproject libnvme.
if get_option('libnvme-sel') == 'pre-installed'
# Check if a pre-installed libnvme can be found
rr = run_command(python3, '-c', 'import libnvme; print(f"{libnvme.__path__[0]}")', check: false, env: test_env)
if rr.returncode() == 0
libnvme_location = rr.stdout().strip()
endif
endif
if libnvme_location == '?'
# Second, if libnvme is not already installed or "libnvme-sel" is not
# set to "pre-installed", let's fallback to using the subproject.
libnvme_dep = dependency('python3-libnvme', fallback: ['libnvme', 'libnvme_dep'], required: false)
test_env.prepend('PYTHONPATH', PYTHONPATH) # This sets the path to look in the build directory
rr = run_command(python3, '-c', 'import libnvme; print(f"{libnvme.__path__[0]}")', check: false, env: test_env)
if rr.returncode() == 0
libnvme_location = rr.stdout().strip()
endif
endif
if libnvme_location == '?'
warning('Missing runtime package needed to run the tests: python3-libnvme.')
else
message('\n\n\u001b[32m\u001b[1mNOTE: Tests will be using @0@\u001b[0m\n'.format(libnvme_location))
#---------------------------------------------------------------------------
# pylint and pyflakes
if modules_to_lint.length() != 0
pylint = find_program('pylint', required: false)
pyflakes = find_program('pyflakes3', required: false)
if not pyflakes.found()
temp = find_program('pyflakes', required: false)
if temp.found() and run_command(temp, '--version', check: false).stdout().contains('Python 3')
pyflakes = temp
endif
endif
rcfile = meson.current_source_dir() / 'pylint.rc'
if pylint.found()
test('pylint', pylint, args: ['--rcfile=' + rcfile] + modules_to_lint, env: test_env)
else
warning('Skiping some of the tests because "pylint" is missing.')
endif
if pyflakes.found()
test('pyflakes', pyflakes, args: modules_to_lint, env: test_env)
else
warning('Skiping some of the tests because "pyflakes" is missing.')
endif
endif
#---------------------------------------------------------------------------
# Check dependencies
dbus_is_active = false
avahi_is_active = false
systemctl = find_program('systemctl', required: false)
if systemctl.found()
rr = run_command(systemctl, 'is-active', 'dbus.service', check: false)
dbus_is_active = rr.returncode() == 0 and rr.stdout().strip() == 'active'
if not dbus_is_active
warning('Dbus daemon is not running')
endif
rr = run_command(systemctl, 'is-active', 'avahi-daemon.service', check: false)
avahi_is_active = rr.returncode() == 0 and rr.stdout().strip() == 'active'
if not avahi_is_active
warning('Avahi daemon is not running')
endif
endif
want_avahi_test = dbus_is_active and avahi_is_active
#---------------------------------------------------------------------------
# Unit tests
things_to_test = [
['Test Configuration', 'test-config.py', []],
['Test Controller', 'test-controller.py', ['pyfakefs']],
['Test GTimer', 'test-gtimer.py', []],
['Test iputil', 'test-iputil.py', []],
['Test KernelVersion', 'test-version.py', []],
['Test log', 'test-log.py', ['pyfakefs']],
['Test NvmeOptions', 'test-nvme_options.py', ['pyfakefs']],
['Test Service', 'test-service.py', ['pyfakefs']],
['Test TID', 'test-transport_id.py', []],
['Test Udev', 'test-udev.py', []],
['Test timeparse', 'test-timeparse.py', []],
]
# The Avahi test requires the Avahi and the Dbus daemons to be running.
if want_avahi_test
things_to_test += [['Test Avahi', 'test-avahi.py', []]]
else
warning('Skip Avahi Test due to missing dependencies')
endif
foreach thing: things_to_test
msg = thing[0]
# Check whether all dependencies can be found
missing_deps = []
deps = thing[2]
foreach dep : deps
rr = run_command(python3, '-c', 'import @0@'.format(dep), check: false)
if rr.returncode() != 0
missing_deps += [dep]
endif
endforeach
if missing_deps.length() == 0
# Allow the test to run if all dependencies are available
script = meson.current_source_dir() / thing[1]
test(msg, python3, args: script, env: test_env)
else
warning('"@0@" requires python module "@1@"'.format(msg, missing_deps))
endif
endforeach
endif
#-------------------------------------------------------------------------------
# Make sure code complies with minimum Python version requirement.
tools = [
meson.current_source_dir() / '../doc',
meson.current_source_dir() / '../utils',
]
vermin = find_program('vermin', required: false)
if vermin.found()
if modules_to_lint.length() != 0
test('vermin code', vermin, args: ['--config-file', meson.current_source_dir() / 'vermin.conf'] + modules_to_lint, env: test_env)
endif
test('vermin tools', vermin, args: ['--config-file', meson.current_source_dir() / 'vermin-tools.conf'] + tools, env: test_env)
else
warning('Skiping some of the tests because "vermin" is missing.')
endif

500
test/pylint.rc Normal file
View file

@ -0,0 +1,500 @@
# Copyright (c) 2021, Dell Inc. or its subsidiaries. All rights reserved.
# SPDX-License-Identifier: Apache-2.0
# See the LICENSE file for details.
#
# This file is part of NVMe STorage Appliance Services (nvme-stas).
#
# Authors: Martin Belanger <Martin.Belanger@dell.com>
[MASTER]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=1
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Pickle collected data for later comparisons.
persistent=yes
# Specify a configuration file.
#rcfile=
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
raw-checker-failed,
bad-inline-option,
locally-disabled,
locally-enabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
use-list-literal,
use-dict-literal,
bad-option-value,
R0801,
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable=
[REPORTS]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Tells whether to display a full report or only the messages
reports=no
# Activate the evaluation score.
score=yes
[REFACTORING]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[TYPECHECK]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference=yes
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint=yes
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[VARIABLES]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables=yes
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*|^ignored_|^unused_
# Tells whether we should check for unused import in __init__ files.
init-import=no
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[SIMILARITIES]
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
# Minimum lines number of a similarity.
min-similarity-lines=4
[FORMAT]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=200
# Maximum number of lines in a module
max-module-lines=2000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt=no
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[BASIC]
# Naming hint for argument names
argument-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct argument names
argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Naming hint for attribute names
attr-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct attribute names
attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming hint for function names
function-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct function names
function-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_,op,ls,f,ip,id
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for method names
method-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct method names
method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Naming hint for variable names
variable-name-hint=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
# Regular expression matching correct variable names
variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$
[DESIGN]
# Maximum number of arguments for function / method
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
[IMPORTS]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all=no
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

41
test/test-avahi.py Executable file
View file

@ -0,0 +1,41 @@
#!/usr/bin/python3
import shutil
import logging
import unittest
from staslib import avahi
import dasbus.connection
import subprocess
SYSTEMCTL = shutil.which('systemctl')
class Test(unittest.TestCase):
'''Unit tests for class Avahi'''
def test_new(self):
sysbus = dasbus.connection.SystemMessageBus()
srv = avahi.Avahi(sysbus, lambda: "ok")
self.assertEqual(srv.info(), {'avahi wake up timer': '60.0s [off]', 'service types': [], 'services': {}})
self.assertEqual(srv.get_controllers(), [])
try:
# Check that the Avahi daemon is running
subprocess.run([SYSTEMCTL, 'is-active', 'avahi-daemon.service'], check=True)
self.assertFalse(srv._on_kick_avahi())
except subprocess.CalledProcessError:
self.assertTrue(srv._on_kick_avahi())
with self.assertLogs(logger=logging.getLogger(), level='INFO') as captured:
srv._avahi_available(None)
self.assertEqual(len(captured.records), 1)
self.assertEqual(captured.records[0].getMessage(), "avahi-daemon service available, zeroconf supported.")
with self.assertLogs(logger=logging.getLogger(), level='WARN') as captured:
srv._avahi_unavailable(None)
self.assertEqual(len(captured.records), 1)
self.assertEqual(captured.records[0].getMessage(), "avahi-daemon not available, zeroconf not supported.")
srv.kill()
self.assertEqual(srv.info(), {'avahi wake up timer': 'None', 'service types': [], 'services': {}})
if __name__ == '__main__':
unittest.main()

264
test/test-config.py Executable file
View file

@ -0,0 +1,264 @@
#!/usr/bin/python3
import os
import unittest
from staslib import conf
class StasProcessConfUnitTest(unittest.TestCase):
'''Process config unit tests'''
FNAME = '/tmp/stas-process-config-test'
@classmethod
def setUpClass(cls):
'''Create a temporary configuration file'''
data = [
'[Global]\n',
'tron=true\n',
'kato=200\n',
'ip-family=ipv6\n',
'\n',
'[I/O controller connection management]\n',
'disconnect-scope = joe\n',
'disconnect-trtypes = bob\n',
'connect-attempts-on-ncc = 1\n',
'\n',
'[Controllers]\n',
'controller=transport=tcp;traddr=100.100.100.100;host-iface=enp0s8\n',
'controller=transport=tcp;traddr=100.100.100.200;host-iface=enp0s7;dhchap-ctrl-secret=super-secret;hdr-digest=true;data-digest=true;nr-io-queues=8;nr-write-queues=6;nr-poll-queues=4;queue-size=400;kato=71;reconnect-delay=13;ctrl-loss-tmo=666;disable-sqflow=true\n',
'exclude=transport=tcp;traddr=10.10.10.10\n',
]
with open(StasProcessConfUnitTest.FNAME, 'w') as f: # # pylint: disable=unspecified-encoding
f.writelines(data)
@classmethod
def tearDownClass(cls):
'''Delete the temporary configuration file'''
if os.path.exists(StasProcessConfUnitTest.FNAME):
os.remove(StasProcessConfUnitTest.FNAME)
def test_config(self):
'''Check we can read the temporary configuration file'''
default_conf = {
('Global', 'tron'): False,
('Global', 'hdr-digest'): False,
('Global', 'data-digest'): False,
('Global', 'kato'): None, # None to let the driver decide the default
('Global', 'nr-io-queues'): None, # None to let the driver decide the default
('Global', 'nr-write-queues'): None, # None to let the driver decide the default
('Global', 'nr-poll-queues'): None, # None to let the driver decide the default
('Global', 'queue-size'): None, # None to let the driver decide the default
('Global', 'reconnect-delay'): None, # None to let the driver decide the default
('Global', 'ctrl-loss-tmo'): None, # None to let the driver decide the default
('Global', 'disable-sqflow'): None, # None to let the driver decide the default
('Global', 'ignore-iface'): False,
('Global', 'ip-family'): (4, 6),
('Global', 'persistent-connections'): False, # Deprecated
('Discovery controller connection management', 'persistent-connections'): True,
('Global', 'pleo'): True,
('Service Discovery', 'zeroconf'): True,
('Controllers', 'controller'): list(),
('Controllers', 'exclude'): list(),
('I/O controller connection management', 'disconnect-scope'): 'only-stas-connections',
('I/O controller connection management', 'disconnect-trtypes'): ['tcp'],
('I/O controller connection management', 'connect-attempts-on-ncc'): 0,
}
service_conf = conf.SvcConf(default_conf=default_conf)
service_conf.set_conf_file(StasProcessConfUnitTest.FNAME)
self.assertEqual(service_conf.conf_file, StasProcessConfUnitTest.FNAME)
self.assertTrue(service_conf.tron)
self.assertTrue(getattr(service_conf, 'tron'))
self.assertFalse(service_conf.hdr_digest)
self.assertFalse(service_conf.data_digest)
self.assertTrue(service_conf.persistent_connections)
self.assertTrue(service_conf.pleo_enabled)
self.assertEqual(service_conf.disconnect_scope, 'only-stas-connections')
self.assertEqual(service_conf.disconnect_trtypes, ['tcp'])
self.assertFalse(service_conf.ignore_iface)
self.assertIn(6, service_conf.ip_family)
self.assertNotIn(4, service_conf.ip_family)
self.assertEqual(service_conf.kato, 200)
self.assertEqual(
service_conf.get_controllers(),
[
{
'transport': 'tcp',
'traddr': '100.100.100.100',
'host-iface': 'enp0s8',
},
{
'transport': 'tcp',
'traddr': '100.100.100.200',
'host-iface': 'enp0s7',
'dhchap-ctrl-secret': 'super-secret',
'hdr-digest': True,
'data-digest': True,
'nr-io-queues': 8,
'nr-write-queues': 6,
'nr-poll-queues': 4,
'queue-size': 400,
'kato': 71,
'reconnect-delay': 13,
'ctrl-loss-tmo': 666,
'disable-sqflow': True,
},
],
)
self.assertEqual(service_conf.get_excluded(), [{'transport': 'tcp', 'traddr': '10.10.10.10'}])
stypes = service_conf.stypes
self.assertIn('_nvme-disc._tcp', stypes)
self.assertTrue(service_conf.zeroconf_enabled)
self.assertEqual(service_conf.connect_attempts_on_ncc, 2)
data = [
'[I/O controller connection management]\n',
'disconnect-trtypes = tcp+rdma+fc\n',
'connect-attempts-on-ncc = hello\n',
]
with open(StasProcessConfUnitTest.FNAME, 'w') as f: # pylint: disable=unspecified-encoding
f.writelines(data)
service_conf.reload()
self.assertEqual(service_conf.connect_attempts_on_ncc, 0)
self.assertEqual(set(service_conf.disconnect_trtypes), set(['fc', 'tcp', 'rdma']))
data = [
'[Global]\n',
'ip-family=ipv4\n',
]
with open(StasProcessConfUnitTest.FNAME, 'w') as f: # pylint: disable=unspecified-encoding
f.writelines(data)
service_conf.reload()
self.assertIn(4, service_conf.ip_family)
self.assertNotIn(6, service_conf.ip_family)
data = [
'[Global]\n',
'ip-family=ipv4+ipv6\n',
]
with open(StasProcessConfUnitTest.FNAME, 'w') as f: # pylint: disable=unspecified-encoding
f.writelines(data)
service_conf.reload()
self.assertIn(4, service_conf.ip_family)
self.assertIn(6, service_conf.ip_family)
data = [
'[Global]\n',
'ip-family=ipv6+ipv4\n',
]
with open(StasProcessConfUnitTest.FNAME, 'w') as f: # pylint: disable=unspecified-encoding
f.writelines(data)
service_conf.reload()
self.assertIn(4, service_conf.ip_family)
self.assertIn(6, service_conf.ip_family)
self.assertRaises(KeyError, service_conf.get_option, 'Babylon', 5)
class StasSysConfUnitTest(unittest.TestCase):
'''Sys config unit tests'''
FNAME_1 = '/tmp/stas-sys-config-test-1'
FNAME_2 = '/tmp/stas-sys-config-test-2'
FNAME_3 = '/tmp/stas-sys-config-test-3'
FNAME_4 = '/tmp/stas-sys-config-test-4'
NQN = 'nqn.2014-08.org.nvmexpress:uuid:9aae2691-b275-4b64-8bfe-5da429a2bab9'
ID = '56529e15-0f3e-4ede-87e2-63932a4adb99'
KEY = 'DHHC-1:03:qwertyuioplkjhgfdsazxcvbnm0123456789QWERTYUIOPLKJHGFDSAZXCVBNM010101010101010101010101010101:'
SYMNAME = 'Bart-Simpson'
DATA = {
FNAME_1: [
'[Host]\n',
f'nqn={NQN}\n',
f'id={ID}\n',
f'key={KEY}\n',
f'symname={SYMNAME}\n',
],
FNAME_2: [
'[Host]\n',
'nqn=file:///dev/null\n',
],
FNAME_3: [
'[Host]\n',
'nqn=qnq.2014-08.org.nvmexpress:uuid:9aae2691-b275-4b64-8bfe-5da429a2bab9\n',
f'id={ID}\n',
],
FNAME_4: [
'[Host]\n',
'nqn=file:///some/non/exisiting/file/!@#\n',
'id=file:///some/non/exisiting/file/!@#\n',
'symname=file:///some/non/exisiting/file/!@#\n',
],
}
@classmethod
def setUpClass(cls):
'''Create a temporary configuration file'''
for file, data in StasSysConfUnitTest.DATA.items():
with open(file, 'w') as f: # pylint: disable=unspecified-encoding
f.writelines(data)
@classmethod
def tearDownClass(cls):
'''Delete the temporary configuration file'''
for file in StasSysConfUnitTest.DATA.keys():
if os.path.exists(file):
os.remove(file)
def test_config_1(self):
'''Check we can read the temporary configuration file'''
system_conf = conf.SysConf()
system_conf.set_conf_file(StasSysConfUnitTest.FNAME_1)
self.assertEqual(system_conf.conf_file, StasSysConfUnitTest.FNAME_1)
self.assertEqual(system_conf.hostnqn, StasSysConfUnitTest.NQN)
self.assertEqual(system_conf.hostid, StasSysConfUnitTest.ID)
self.assertEqual(system_conf.hostsymname, StasSysConfUnitTest.SYMNAME)
self.assertEqual(
system_conf.as_dict(),
{
'hostnqn': StasSysConfUnitTest.NQN,
'hostid': StasSysConfUnitTest.ID,
'hostkey': StasSysConfUnitTest.KEY,
'symname': StasSysConfUnitTest.SYMNAME,
},
)
def test_config_2(self):
'''Check we can read from /dev/null or missing 'id' definition'''
system_conf = conf.SysConf()
system_conf.set_conf_file(StasSysConfUnitTest.FNAME_2)
self.assertEqual(system_conf.conf_file, StasSysConfUnitTest.FNAME_2)
self.assertIsNone(system_conf.hostnqn)
self.assertIsNone(system_conf.hostsymname)
def test_config_3(self):
'''Check we can read an invalid NQN string'''
system_conf = conf.SysConf()
system_conf.set_conf_file(StasSysConfUnitTest.FNAME_3)
self.assertEqual(system_conf.conf_file, StasSysConfUnitTest.FNAME_3)
self.assertRaises(SystemExit, lambda: system_conf.hostnqn)
self.assertEqual(system_conf.hostid, StasSysConfUnitTest.ID)
self.assertIsNone(system_conf.hostsymname)
def test_config_4(self):
'''Check we can read the temporary configuration file'''
system_conf = conf.SysConf()
system_conf.set_conf_file(StasSysConfUnitTest.FNAME_4)
self.assertEqual(system_conf.conf_file, StasSysConfUnitTest.FNAME_4)
self.assertRaises(SystemExit, lambda: system_conf.hostnqn)
self.assertRaises(SystemExit, lambda: system_conf.hostid)
self.assertIsNone(system_conf.hostsymname)
def test_config_missing_file(self):
'''Check what happens when conf file is missing'''
system_conf = conf.SysConf()
system_conf.set_conf_file('/just/some/ramdom/file/name')
self.assertIsNone(system_conf.hostsymname)
if __name__ == '__main__':
unittest.main()

302
test/test-controller.py Executable file
View file

@ -0,0 +1,302 @@
#!/usr/bin/python3
import logging
import unittest
from staslib import conf, ctrl, timeparse, trid
from pyfakefs.fake_filesystem_unittest import TestCase
class TestController(ctrl.Controller):
def _find_existing_connection(self):
pass
def _on_aen(self, aen: int):
pass
def _on_nvme_event(self, nvme_event):
pass
def reload_hdlr(self):
pass
class TestDc(ctrl.Dc):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._connected = True
class Ctrl:
def __init__(this):
this.name = 'nvme666'
def connected(this):
return self._connected
def disconnect(this):
pass
self._ctrl = Ctrl()
def _find_existing_connection(self):
pass
def _on_aen(self, aen: int):
pass
def _on_nvme_event(self, nvme_event):
pass
def reload_hdlr(self):
pass
def set_connected(self, value):
self._connected = value
def connected(self):
return self._connected
class TestStaf:
def is_avahi_reported(self, tid):
return False
def controller_unresponsive(self, tid):
pass
@property
def tron(self):
return True
stafd_conf_1 = '''
[Global]
tron=false
hdr-digest=false
data-digest=false
kato=30
queue-size=128
reconnect-delay=10
ctrl-loss-tmo=600
disable-sqflow=false
ignore-iface=false
ip-family=ipv4+ipv6
pleo=enabled
[Service Discovery]
zeroconf=enabled
[Discovery controller connection management]
persistent-connections=true
zeroconf-connections-persistence=10 seconds
'''
stafd_conf_2 = '''
[Discovery controller connection management]
zeroconf-connections-persistence=-1
'''
class Test(TestCase):
'''Unit tests for class Controller'''
def setUp(self):
self.setUpPyfakefs()
self.fs.create_file(
'/etc/nvme/hostnqn', contents='nqn.2014-08.org.nvmexpress:uuid:01234567-0123-0123-0123-0123456789ab\n'
)
self.fs.create_file('/etc/nvme/hostid', contents='01234567-89ab-cdef-0123-456789abcdef\n')
self.fs.create_file(
'/dev/nvme-fabrics',
contents='instance=-1,cntlid=-1,transport=%s,traddr=%s,trsvcid=%s,nqn=%s,queue_size=%d,nr_io_queues=%d,reconnect_delay=%d,ctrl_loss_tmo=%d,keep_alive_tmo=%d,hostnqn=%s,host_traddr=%s,host_iface=%s,hostid=%s,disable_sqflow,hdr_digest,data_digest,nr_write_queues=%d,nr_poll_queues=%d,tos=%d,fast_io_fail_tmo=%d,discovery,dhchap_secret=%s,dhchap_ctrl_secret=%s\n',
)
self.NVME_TID = trid.TID(
{
'transport': 'tcp',
'traddr': '10.10.10.10',
'subsysnqn': 'nqn.1988-11.com.dell:SFSS:2:20220208134025e8',
'trsvcid': '8009',
'host-traddr': '1.2.3.4',
'host-iface': 'wlp0s20f3',
}
)
default_conf = {
('Global', 'tron'): False,
('Global', 'hdr-digest'): False,
('Global', 'data-digest'): False,
('Global', 'kato'): None, # None to let the driver decide the default
('Global', 'queue-size'): None, # None to let the driver decide the default
('Global', 'reconnect-delay'): None, # None to let the driver decide the default
('Global', 'ctrl-loss-tmo'): None, # None to let the driver decide the default
('Global', 'disable-sqflow'): None, # None to let the driver decide the default
('Global', 'persistent-connections'): True,
('Discovery controller connection management', 'persistent-connections'): True,
('Discovery controller connection management', 'zeroconf-connections-persistence'): timeparse.timeparse(
'72hours'
),
('Global', 'ignore-iface'): False,
('Global', 'ip-family'): (4, 6),
('Global', 'pleo'): True,
('Service Discovery', 'zeroconf'): True,
('Controllers', 'controller'): list(),
('Controllers', 'exclude'): list(),
}
self.stafd_conf_file1 = '/etc/stas/stafd1.conf'
self.fs.create_file(self.stafd_conf_file1, contents=stafd_conf_1)
self.stafd_conf_file2 = '/etc/stas/stafd2.conf'
self.fs.create_file(self.stafd_conf_file2, contents=stafd_conf_2)
self.svcconf = conf.SvcConf(default_conf=default_conf)
self.svcconf.set_conf_file(self.stafd_conf_file1)
def tearDown(self):
pass
def test_cannot_instantiate_concrete_classes_if_abstract_method_are_not_implemented(self):
# Make sure we can't instantiate the ABC directly (Abstract Base Class).
class Controller(ctrl.Controller):
pass
self.assertRaises(TypeError, lambda: ctrl.Controller(tid=self.NVME_TID))
def test_get_device(self):
controller = TestController(tid=self.NVME_TID, service=TestStaf())
self.assertEqual(controller._connect_attempts, 0)
controller._try_to_connect()
self.assertEqual(controller._connect_attempts, 1)
self.assertEqual(
controller.id, "(tcp, 10.10.10.10, 8009, nqn.1988-11.com.dell:SFSS:2:20220208134025e8, wlp0s20f3, 1.2.3.4)"
)
# raise Exception(controller._connect_op)
self.assertEqual(
str(controller.tid),
"(tcp, 10.10.10.10, 8009, nqn.1988-11.com.dell:SFSS:2:20220208134025e8, wlp0s20f3, 1.2.3.4)",
)
self.assertEqual(controller.device, 'nvme?')
self.assertEqual(
controller.controller_id_dict(),
{
'transport': 'tcp',
'traddr': '10.10.10.10',
'trsvcid': '8009',
'host-traddr': '1.2.3.4',
'host-iface': 'wlp0s20f3',
'subsysnqn': 'nqn.1988-11.com.dell:SFSS:2:20220208134025e8',
'device': 'nvme?',
},
)
self.assertEqual(
controller.info(),
{
'transport': 'tcp',
'traddr': '10.10.10.10',
'subsysnqn': 'nqn.1988-11.com.dell:SFSS:2:20220208134025e8',
'trsvcid': '8009',
'host-traddr': '1.2.3.4',
'host-iface': 'wlp0s20f3',
'device': 'nvme?',
'connect attempts': '1',
'retry connect timer': '60.0s [off]',
'connect operation': "{'fail count': 0, 'completed': False, 'alive': True}",
},
)
self.assertEqual(
controller.details(),
{
'dctype': '',
'cntrltype': '',
'connected': 'False',
'transport': 'tcp',
'traddr': '10.10.10.10',
'trsvcid': '8009',
'host-traddr': '1.2.3.4',
'host-iface': 'wlp0s20f3',
'subsysnqn': 'nqn.1988-11.com.dell:SFSS:2:20220208134025e8',
'device': 'nvme?',
'connect attempts': '1',
'retry connect timer': '60.0s [off]',
'hostid': '',
'hostnqn': '',
'model': '',
'serial': '',
'connect operation': "{'fail count': 0, 'completed': False, 'alive': True}",
},
)
# print(controller._connect_op)
self.assertEqual(controller.cancel(), None)
self.assertEqual(controller.kill(), None)
self.assertIsNone(controller.disconnect(lambda *args: None, True))
def test_connect(self):
controller = TestController(tid=self.NVME_TID, service=TestStaf())
self.assertEqual(controller._connect_attempts, 0)
controller._find_existing_connection = lambda: None
with self.assertLogs(logger=logging.getLogger(), level='DEBUG') as captured:
controller._try_to_connect()
self.assertTrue(len(captured.records) > 0)
self.assertTrue(
captured.records[0]
.getMessage()
.startswith(
"Controller._do_connect() - (tcp, 10.10.10.10, 8009, nqn.1988-11.com.dell:SFSS:2:20220208134025e8, wlp0s20f3, 1.2.3.4) Connecting to nvme control with cfg={"
)
)
self.assertEqual(controller._connect_attempts, 1)
def test_dlp_supp_opts_as_string(self):
dlp_supp_opts = 0x7
opts = ctrl.dlp_supp_opts_as_string(dlp_supp_opts)
self.assertEqual(['EXTDLPES', 'PLEOS', 'ALLSUBES'], opts)
def test_ncc(self):
dlpe = {'eflags': '4'}
ncc = ctrl.get_ncc(ctrl.get_eflags(dlpe))
self.assertTrue(ncc)
dlpe = {}
ncc = ctrl.get_ncc(ctrl.get_eflags(dlpe))
self.assertFalse(ncc)
def test_dc(self):
self.svcconf.set_conf_file(self.stafd_conf_file1)
controller = TestDc(TestStaf(), tid=self.NVME_TID)
controller.set_connected(True)
controller.origin = 'discovered'
with self.assertLogs(logger=logging.getLogger(), level='DEBUG') as captured:
controller.origin = 'blah'
self.assertEqual(len(captured.records), 1)
self.assertNotEqual(-1, captured.records[0].getMessage().find("Trying to set invalid origin to blah"))
controller.set_connected(False)
with self.assertLogs(logger=logging.getLogger(), level='DEBUG') as captured:
controller.origin = 'discovered'
self.assertEqual(len(captured.records), 1)
self.assertNotEqual(
-1, captured.records[0].getMessage().find("Controller is not responding. Will be removed by")
)
self.svcconf.set_conf_file(self.stafd_conf_file2)
with self.assertLogs(logger=logging.getLogger(), level='DEBUG') as captured:
controller.origin = 'discovered'
self.assertEqual(len(captured.records), 1)
self.assertNotEqual(-1, captured.records[0].getMessage().find("Controller not responding. Retrying..."))
controller.set_connected(True)
with self.assertLogs(logger=logging.getLogger(), level='DEBUG') as captured:
controller.disconnect(lambda *args: None, keep_connection=False)
self.assertEqual(len(captured.records), 2)
self.assertNotEqual(-1, captured.records[0].getMessage().find("nvme666: keep_connection=False"))
self.assertNotEqual(-1, captured.records[1].getMessage().find("nvme666 - Disconnect initiated"))
# def test_disconnect(self):
if __name__ == '__main__':
unittest.main()

43
test/test-gtimer.py Executable file
View file

@ -0,0 +1,43 @@
#!/usr/bin/python3
import unittest
from staslib import gutil
class Test(unittest.TestCase):
'''Unit tests for class GTimer'''
def test_new_timer(self):
tmr = gutil.GTimer(interval_sec=5)
self.assertEqual(tmr.get_timeout(), 5)
self.assertEqual(tmr.time_remaining(), 0)
self.assertEqual(str(tmr), '5.0s [off]')
tmr.set_timeout(new_interval_sec=18)
self.assertEqual(tmr.get_timeout(), 18)
self.assertEqual(tmr.time_remaining(), 0)
def test_callback(self):
tmr = gutil.GTimer(interval_sec=1, user_cback=lambda: "ok")
self.assertEqual(tmr._callback(), "ok")
tmr.set_callback(user_cback=lambda: "notok")
self.assertEqual(tmr._callback(), "notok")
tmr.kill()
self.assertEqual(tmr._user_cback, None)
self.assertRaises(TypeError, tmr._user_cback)
def test_start_timer(self):
tmr = gutil.GTimer(interval_sec=1, user_cback=lambda: "ok")
self.assertEqual(str(tmr), '1.0s [off]')
tmr.start()
self.assertNotEqual(tmr.time_remaining(), 0)
self.assertNotEqual(str(tmr), '1.0s [off]')
def test_clear(self):
tmr = gutil.GTimer(interval_sec=1, user_cback=lambda: "ok")
tmr.start()
tmr.clear()
self.assertEqual(tmr.time_remaining(), 0)
self.assertEqual(str(tmr), '1.0s [0s]')
if __name__ == '__main__':
unittest.main()

66
test/test-iputil.py Executable file
View file

@ -0,0 +1,66 @@
#!/usr/bin/python3
import json
import shutil
import logging
import unittest
import ipaddress
import subprocess
from staslib import iputil, log, trid
IP = shutil.which('ip')
class Test(unittest.TestCase):
'''iputil.py unit tests'''
def setUp(self):
log.init(syslog=False)
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
# Retrieve the list of Interfaces and all the associated IP addresses
# using standard bash utility (ip address). We'll use this to make sure
# iputil.get_interface() returns the same data as "ip address".
try:
cmd = [IP, '-j', 'address', 'show']
p = subprocess.run(cmd, stdout=subprocess.PIPE, check=True)
self.ifaces = json.loads(p.stdout.decode().strip())
except subprocess.CalledProcessError:
self.ifaces = []
def test_get_interface(self):
'''Check that get_interface() returns the right info'''
for iface in self.ifaces:
for addr_entry in iface['addr_info']:
addr = ipaddress.ip_address(addr_entry['local'])
# Link local addresses may appear on more than one interface and therefore cannot be used.
if not addr.is_link_local:
self.assertEqual(iface['ifname'], iputil.get_interface(str(addr)))
self.assertEqual('', iputil.get_interface('255.255.255.255'))
def test_remove_invalid_addresses(self):
good_tcp = trid.TID({'transport': 'tcp', 'traddr': '1.1.1.1', 'subsysnqn': '', 'trsvcid': '8009'})
bad_tcp = trid.TID({'transport': 'tcp', 'traddr': '555.555.555.555', 'subsysnqn': '', 'trsvcid': '8009'})
any_fc = trid.TID({'transport': 'fc', 'traddr': 'blah', 'subsysnqn': ''})
bad_trtype = trid.TID({'transport': 'whatever', 'traddr': 'blah', 'subsysnqn': ''})
l1 = [
good_tcp,
bad_tcp,
any_fc,
bad_trtype,
]
l2 = iputil.remove_invalid_addresses(l1)
self.assertNotEqual(l1, l2)
self.assertIn(good_tcp, l2)
self.assertIn(any_fc, l2) # We currently don't check for invalid FC (all FCs are allowed)
self.assertNotIn(bad_tcp, l2)
self.assertNotIn(bad_trtype, l2)
if __name__ == "__main__":
unittest.main()

101
test/test-log.py Executable file
View file

@ -0,0 +1,101 @@
#!/usr/bin/python3
import logging
import unittest
from pyfakefs.fake_filesystem_unittest import TestCase
from staslib import log
class StaslibLogTest(TestCase):
'''Test for log.py module'''
def setUp(self):
self.setUpPyfakefs()
def test_log_with_systemd_journal(self):
'''Check that we can set the handler to systemd.journal.JournalHandler'''
try:
# We can't proceed with this test if the
# module systemd.journal is not installed.
import systemd.journal # pylint: disable=import-outside-toplevel
except ModuleNotFoundError:
return
log.init(syslog=True)
logger = logging.getLogger()
handler = logger.handlers[-1]
self.assertIsInstance(handler, systemd.journal.JournalHandler)
self.assertEqual(log.level(), 'INFO')
log.set_level_from_tron(tron=True)
self.assertEqual(log.level(), 'DEBUG')
log.set_level_from_tron(tron=False)
self.assertEqual(log.level(), 'INFO')
logger.removeHandler(handler)
handler.close()
def test_log_with_syslog_handler(self):
'''Check that we can set the handler to logging.handlers.SysLogHandler'''
try:
# The log.py module uses systemd.journal.JournalHandler() as the
# default logging handler (if present). Therefore, in order to force
# log.py to use SysLogHandler as the handler, we need to mock
# systemd.journal.JournalHandler() with an invalid class.
import systemd.journal # pylint: disable=import-outside-toplevel
except ModuleNotFoundError:
original_handler = None
else:
class MockJournalHandler:
def __new__(cls, *args, **kwargs):
raise ModuleNotFoundError
original_handler = systemd.journal.JournalHandler
systemd.journal.JournalHandler = MockJournalHandler
log.init(syslog=True)
logger = logging.getLogger()
handler = logger.handlers[-1]
self.assertIsInstance(handler, logging.handlers.SysLogHandler)
self.assertEqual(log.level(), 'INFO')
log.set_level_from_tron(tron=True)
self.assertEqual(log.level(), 'DEBUG')
log.set_level_from_tron(tron=False)
self.assertEqual(log.level(), 'INFO')
logger.removeHandler(handler)
handler.close()
if original_handler is not None:
# Restore original systemd.journal.JournalHandler()
systemd.journal.JournalHandler = original_handler
def test_log_with_stdout(self):
'''Check that we can set the handler to logging.StreamHandler (i.e. stdout)'''
log.init(syslog=False)
logger = logging.getLogger()
handler = logger.handlers[-1]
self.assertIsInstance(handler, logging.StreamHandler)
self.assertEqual(log.level(), 'DEBUG')
log.set_level_from_tron(tron=True)
self.assertEqual(log.level(), 'DEBUG')
log.set_level_from_tron(tron=False)
self.assertEqual(log.level(), 'INFO')
logger.removeHandler(handler)
handler.close()
if __name__ == '__main__':
unittest.main()

61
test/test-nvme_options.py Executable file
View file

@ -0,0 +1,61 @@
#!/usr/bin/python3
import os
import logging
import unittest
from staslib import conf, log
from pyfakefs.fake_filesystem_unittest import TestCase
class Test(TestCase):
"""Unit tests for class NvmeOptions"""
def setUp(self):
self.setUpPyfakefs()
log.init(syslog=False)
self.logger = logging.getLogger()
self.logger.setLevel(logging.INFO)
def tearDown(self):
# No longer need self.tearDownPyfakefs()
pass
def test_fabrics_empty_file(self):
self.assertFalse(os.path.exists("/dev/nvme-fabrics"))
# TODO: this is a bug
self.fs.create_file("/dev/nvme-fabrics")
self.assertTrue(os.path.exists('/dev/nvme-fabrics'))
nvme_options = conf.NvmeOptions()
self.assertIsInstance(nvme_options.discovery_supp, bool)
self.assertIsInstance(nvme_options.host_iface_supp, bool)
del nvme_options
def test_fabrics_wrong_file(self):
self.assertFalse(os.path.exists("/dev/nvme-fabrics"))
self.fs.create_file("/dev/nvme-fabrics", contents="blah")
self.assertTrue(os.path.exists('/dev/nvme-fabrics'))
nvme_options = conf.NvmeOptions()
self.assertIsInstance(nvme_options.discovery_supp, bool)
self.assertIsInstance(nvme_options.host_iface_supp, bool)
del nvme_options
def test_fabrics_correct_file(self):
self.assertFalse(os.path.exists("/dev/nvme-fabrics"))
self.fs.create_file(
'/dev/nvme-fabrics', contents='host_iface=%s,discovery,dhchap_secret=%s,dhchap_ctrl_secret=%s\n'
)
self.assertTrue(os.path.exists('/dev/nvme-fabrics'))
nvme_options = conf.NvmeOptions()
self.assertTrue(nvme_options.discovery_supp)
self.assertTrue(nvme_options.host_iface_supp)
self.assertTrue(nvme_options.dhchap_hostkey_supp)
self.assertTrue(nvme_options.dhchap_ctrlkey_supp)
self.assertEqual(
nvme_options.get(),
{'discovery': True, 'host_iface': True, 'dhchap_secret': True, 'dhchap_ctrl_secret': True},
)
self.assertTrue(str(nvme_options).startswith("supported options:"))
del nvme_options
if __name__ == "__main__":
unittest.main()

71
test/test-service.py Executable file
View file

@ -0,0 +1,71 @@
#!/usr/bin/python3
import os
import unittest
from staslib import service
from pyfakefs.fake_filesystem_unittest import TestCase
class Args:
def __init__(self):
self.tron = True
self.syslog = True
self.conf_file = '/dev/null'
class TestService(service.Service):
def _config_ctrls_finish(self, configured_ctrl_list):
pass
def _dump_last_known_config(self, controllers):
pass
def _keep_connections_on_exit(self):
pass
def _load_last_known_config(self):
return dict()
class Test(TestCase):
'''Unit tests for class Service'''
def setUp(self):
self.setUpPyfakefs()
os.environ['RUNTIME_DIRECTORY'] = "/run"
self.fs.create_file(
'/etc/nvme/hostnqn', contents='nqn.2014-08.org.nvmexpress:uuid:01234567-0123-0123-0123-0123456789ab\n'
)
self.fs.create_file('/etc/nvme/hostid', contents='01234567-89ab-cdef-0123-456789abcdef\n')
self.fs.create_file(
'/dev/nvme-fabrics',
contents='instance=-1,cntlid=-1,transport=%s,traddr=%s,trsvcid=%s,nqn=%s,queue_size=%d,nr_io_queues=%d,reconnect_delay=%d,ctrl_loss_tmo=%d,keep_alive_tmo=%d,hostnqn=%s,host_traddr=%s,host_iface=%s,hostid=%s,disable_sqflow,hdr_digest,data_digest,nr_write_queues=%d,nr_poll_queues=%d,tos=%d,fast_io_fail_tmo=%d,discovery,dhchap_secret=%s,dhchap_ctrl_secret=%s\n',
)
def test_cannot_instantiate_concrete_classes_if_abstract_method_are_not_implemented(self):
# Make sure we can't instantiate the ABC directly (Abstract Base Class).
class Service(service.Service):
pass
self.assertRaises(TypeError, lambda: Service(Args(), reload_hdlr=lambda x: x))
def test_get_controller(self):
srv = TestService(Args(), default_conf={}, reload_hdlr=lambda x: x)
self.assertEqual(list(srv.get_controllers()), list())
self.assertEqual(
srv.get_controller(
transport='tcp',
traddr='10.10.10.10',
trsvcid='8009',
host_traddr='1.2.3.4',
host_iface='wlp0s20f3',
subsysnqn='nqn.1988-11.com.dell:SFSS:2:20220208134025e8',
),
None,
)
self.assertEqual(srv.remove_controller(controller=None, success=True), None)
if __name__ == '__main__':
unittest.main()

32
test/test-timeparse.py Executable file
View file

@ -0,0 +1,32 @@
#!/usr/bin/python3
import unittest
from staslib import timeparse
class StasTimeparseUnitTest(unittest.TestCase):
'''Time parse unit tests'''
def test_timeparse(self):
'''Check that timeparse() converts time spans properly'''
self.assertEqual(timeparse.timeparse('1'), 1)
self.assertEqual(timeparse.timeparse('1s'), 1)
self.assertEqual(timeparse.timeparse('1 sec'), 1)
self.assertEqual(timeparse.timeparse('1 second'), 1)
self.assertEqual(timeparse.timeparse('1 seconds'), 1)
self.assertEqual(timeparse.timeparse('1:01'), 61)
self.assertEqual(timeparse.timeparse('1 day'), 24 * 60 * 60)
self.assertEqual(timeparse.timeparse('1 hour'), 60 * 60)
self.assertEqual(timeparse.timeparse('1 min'), 60)
self.assertEqual(timeparse.timeparse('0.5'), 0.5)
self.assertEqual(timeparse.timeparse('-1'), -1)
self.assertEqual(timeparse.timeparse(':22'), 22)
self.assertEqual(timeparse.timeparse('1 minute, 24 secs'), 84)
self.assertEqual(timeparse.timeparse('1.2 minutes'), 72)
self.assertEqual(timeparse.timeparse('1.2 seconds'), 1.2)
self.assertEqual(timeparse.timeparse('- 1 minute'), -60)
self.assertEqual(timeparse.timeparse('+ 1 minute'), 60)
self.assertIsNone(timeparse.timeparse('blah'))
if __name__ == '__main__':
unittest.main()

87
test/test-transport_id.py Executable file
View file

@ -0,0 +1,87 @@
#!/usr/bin/python3
import unittest
from staslib import trid
class Test(unittest.TestCase):
'''Unit test for class TRID'''
TRANSPORT = 'tcp'
TRADDR = '10.10.10.10'
OTHER_TRADDR = '1.1.1.1'
SUBSYSNQN = 'nqn.1988-11.com.dell:SFSS:2:20220208134025e8'
TRSVCID = '8009'
HOST_TRADDR = '1.2.3.4'
HOST_IFACE = 'wlp0s20f3'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cid = {
'transport': Test.TRANSPORT,
'traddr': Test.TRADDR,
'subsysnqn': Test.SUBSYSNQN,
'trsvcid': Test.TRSVCID,
'host-traddr': Test.HOST_TRADDR,
'host-iface': Test.HOST_IFACE,
}
self.other_cid = {
'transport': Test.TRANSPORT,
'traddr': Test.OTHER_TRADDR,
'subsysnqn': Test.SUBSYSNQN,
'trsvcid': Test.TRSVCID,
'host-traddr': Test.HOST_TRADDR,
'host-iface': Test.HOST_IFACE,
}
self.tid = trid.TID(self.cid)
self.other_tid = trid.TID(self.other_cid)
def test_hash(self):
'''Check that a hash exists'''
self.assertIsInstance(self.tid._hash, int)
def test_transport(self):
'''Check that transport is set'''
self.assertEqual(self.tid.transport, Test.TRANSPORT)
def test_traddr(self):
'''Check that traddr is set'''
self.assertEqual(self.tid.traddr, Test.TRADDR)
def test_trsvcid(self):
'''Check that trsvcid is set'''
self.assertEqual(self.tid.trsvcid, Test.TRSVCID)
def test_host_traddr(self):
'''Check that host_traddr is set'''
self.assertEqual(self.tid.host_traddr, Test.HOST_TRADDR)
def test_host_iface(self):
'''Check that host_iface is set'''
self.assertEqual(self.tid.host_iface, Test.HOST_IFACE)
def test_subsysnqn(self):
'''Check that subsysnqn is set'''
self.assertEqual(self.tid.subsysnqn, Test.SUBSYSNQN)
def test_as_dict(self):
'''Check that a TRID can be converted back to the original Dict it was created with'''
self.assertDictEqual(self.tid.as_dict(), self.cid)
def test_str(self):
'''Check that a TRID can be represented as a string'''
self.assertTrue(str(self.tid).startswith(f'({Test.TRANSPORT},'))
def test_eq(self):
'''Check that two TRID objects can be tested for equality'''
self.assertEqual(self.tid, trid.TID(self.cid))
self.assertFalse(self.tid == 'blah')
def test_ne(self):
'''Check that two TID objects can be tested for non-equality'''
self.assertNotEqual(self.tid, self.other_tid)
self.assertNotEqual(self.tid, 'hello')
if __name__ == '__main__':
unittest.main()

41
test/test-udev.py Executable file
View file

@ -0,0 +1,41 @@
#!/usr/bin/python3
import unittest
from staslib import udev
class Test(unittest.TestCase):
'''Unit tests for class Udev'''
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
@classmethod
def tearDownClass(cls):
'''Release resources'''
udev.shutdown()
def test_get_device(self):
dev = udev.UDEV.get_nvme_device('null')
self.assertEqual(dev.device_node, '/dev/null')
def test_get_bad_device(self):
self.assertIsNone(udev.UDEV.get_nvme_device('bozo'))
def test_get_key_from_attr(self):
device = udev.UDEV.get_nvme_device('null')
devname = udev.UDEV.get_key_from_attr(device, 'uevent', 'DEVNAME=', '\n')
self.assertEqual(devname, 'null')
devname = udev.UDEV.get_key_from_attr(device, 'uevent', 'DEVNAME', '\n')
self.assertEqual(devname, 'null')
devmode = udev.UDEV.get_key_from_attr(device, 'uevent', 'DEVMODE', '\n')
self.assertEqual(devmode, '0666')
bogus = udev.UDEV.get_key_from_attr(device, 'bogus', 'BOGUS', '\n')
self.assertEqual(bogus, '')
if __name__ == '__main__':
unittest.main()

46
test/test-version.py Executable file
View file

@ -0,0 +1,46 @@
#!/usr/bin/python3
import unittest
from staslib.version import KernelVersion
class VersionUnitTests(unittest.TestCase):
'''Unit tests for class KernelVersion'''
version = KernelVersion('5.8.0-63-generic')
def test_str(self):
self.assertIsInstance(str(self.version), str)
def test_repr(self):
self.assertIsInstance(repr(self.version), str)
def test_eq(self):
'''Test equality'''
self.assertEqual(self.version, '5.8.0-63')
self.assertNotEqual(self.version, '5.8.0')
def test_lt(self):
'''Test lower than'''
self.assertTrue(self.version < '5.9')
self.assertFalse(self.version < '5.7')
def test_le(self):
'''Test lower equal'''
self.assertTrue(self.version <= '5.8.0-63')
self.assertTrue(self.version <= '5.8.1')
self.assertFalse(self.version <= '5.7')
def test_gt(self):
'''Test greater than'''
self.assertTrue(self.version > '5.8')
self.assertFalse(self.version > '5.9')
def test_ge(self):
'''Test greater equal'''
self.assertTrue(self.version >= '5.8.0-63')
self.assertTrue(self.version >= '5.7.0')
self.assertFalse(self.version >= '5.9')
if __name__ == '__main__':
unittest.main()

144
test/vermin-tools.conf Normal file
View file

@ -0,0 +1,144 @@
[vermin]
### Quiet mode ###
# It only prints the final versions verdict.
#
#quiet = no
### Verbosity ###
# Verbosity level 1 to 4. -v, -vv, -vvv, and -vvvv shows increasingly more information. Turned off
# at level 0.
#
#verbose = 0
verbose = 3
### Dump AST node visits ###
# Only for debugging.
#
#print_visits = no
### Matching target versions ###
# Target version that files must abide by. Can be specified once or twice.
# A '-' can be appended to match target version or smaller, like '3.5-'.
# If not met Vermin will exit with code 1.
# Note that the amount of target versions must match the amount of minimum required versions
# detected.
#
# Examples:
#targets = 2.6-
#targets = 2.3
# 3,4
#targets = 2,7
# 3,9-
targets = 3.8
### Concurrent processing ###
# Use N concurrent processes to detect and analyze files. Defaults to 0, meaning all cores
# available.
#
#processes = 0
### Ignore incompatible versions and warnings ###
# However, if no compatible versions are found then incompatible versions will be shown in the end
# to not have an absence of results.
#
#ignore_incomp = no
### Lax mode ###
# It ignores conditionals (if, ternary, for, async for, while, with, try, bool op) on AST traversal,
# which can be useful when minimum versions are detected in conditionals that it is known does not
# affect the results.
#
# Note: It is better to use excludes or `# novermin`/`# novm` in the source code instead.
#
#lax = no
### Hidden analysis ###
# Analyze 'hidden' files and folders starting with '.' (ignored by default when not specified
# directly).
#
#analyze_hidden = no
### Tips ###
# Possibly show helpful tips at the end, like those relating to backports or lax mode.
#
#show_tips = yes
show_tips = no
### Pessimistic mode ###
# Syntax errors are interpreted as the major Python version in use being incompatible.
#
#pessimistic = no
### Exclusions ###
# Exclude full names, like 'email.parser.FeedParser', from analysis. Useful to ignore conditional
# logic that can trigger incompatible results. It's more fine grained than lax mode.
#
# Exclude 'foo.bar.baz' module/member: foo.bar.baz
# Exclude 'foo' kwarg: somemodule.func(foo)
# Exclude 'bar' codecs error handler: ceh=bar
# Exclude 'baz' codecs encoding: ce=baz
#
# Example exclusions:
#exclusions =
# email.parser.FeedParser
# argparse.ArgumentParser(allow_abbrev)
exclusions =
importlib.resources
importlib.resources.files
importlib_resources
importlib_resources.files
### Backports ###
# Some features are sometimes backported into packages, in repositories such as PyPi, that are
# widely used but aren't in the standard language. If such a backport is specified as being used,
# the results will reflect that instead.
#
# Get full list via `--help`.
#
# Example backports:
#backports =
# typing
# argparse
### Features ###
# Some features are disabled by default due to being unstable but can be enabled explicitly.
#
# Get full list via `--help`.
#
# Example features:
#features =
# fstring-self-doc
### Format ###
# Format to show results and output in.
#
# Get full list via `--help`.
#
#format = default
### Annotations evaluation ###
# Instructs parser that annotations will be manually evaluated in code, which changes minimum
# versions in certain cases. Otherwise, function and variable annotations are not evaluated at
# definition time. Apply this argument if code uses `typing.get_type_hints` or
# `eval(obj.__annotations__)` or otherwise forces evaluation of annotations.
#
#eval_annotations = no
### Violations ###
#
#only_show_violations = no
only_show_violations = yes
### Parse comments ###
# Whether or not to parse comments, searching for "# novm" and "# novermin" to exclude anslysis of
# specific lines. If these comments aren't used in a particular code base, not parsing them can
# sometimes yield a speedup of 30-40%+.
#
#parse_comments = yes
parse_comments = no
### Scan symlink folders ###
# Scan symlinks to folders to include in analysis. Symlinks to non-folders or top-level folders will
# always be scanned.
#
#scan_symlink_folders = no

144
test/vermin.conf Normal file
View file

@ -0,0 +1,144 @@
[vermin]
### Quiet mode ###
# It only prints the final versions verdict.
#
#quiet = no
### Verbosity ###
# Verbosity level 1 to 4. -v, -vv, -vvv, and -vvvv shows increasingly more information. Turned off
# at level 0.
#
#verbose = 0
verbose = 3
### Dump AST node visits ###
# Only for debugging.
#
#print_visits = no
### Matching target versions ###
# Target version that files must abide by. Can be specified once or twice.
# A '-' can be appended to match target version or smaller, like '3.5-'.
# If not met Vermin will exit with code 1.
# Note that the amount of target versions must match the amount of minimum required versions
# detected.
#
# Examples:
#targets = 2.6-
#targets = 2.3
# 3,4
#targets = 2,7
# 3,9-
targets = 3.6
### Concurrent processing ###
# Use N concurrent processes to detect and analyze files. Defaults to 0, meaning all cores
# available.
#
#processes = 0
### Ignore incompatible versions and warnings ###
# However, if no compatible versions are found then incompatible versions will be shown in the end
# to not have an absence of results.
#
#ignore_incomp = no
### Lax mode ###
# It ignores conditionals (if, ternary, for, async for, while, with, try, bool op) on AST traversal,
# which can be useful when minimum versions are detected in conditionals that it is known does not
# affect the results.
#
# Note: It is better to use excludes or `# novermin`/`# novm` in the source code instead.
#
#lax = no
### Hidden analysis ###
# Analyze 'hidden' files and folders starting with '.' (ignored by default when not specified
# directly).
#
#analyze_hidden = no
### Tips ###
# Possibly show helpful tips at the end, like those relating to backports or lax mode.
#
#show_tips = yes
show_tips = no
### Pessimistic mode ###
# Syntax errors are interpreted as the major Python version in use being incompatible.
#
#pessimistic = no
### Exclusions ###
# Exclude full names, like 'email.parser.FeedParser', from analysis. Useful to ignore conditional
# logic that can trigger incompatible results. It's more fine grained than lax mode.
#
# Exclude 'foo.bar.baz' module/member: foo.bar.baz
# Exclude 'foo' kwarg: somemodule.func(foo)
# Exclude 'bar' codecs error handler: ceh=bar
# Exclude 'baz' codecs encoding: ce=baz
#
# Example exclusions:
#exclusions =
# email.parser.FeedParser
# argparse.ArgumentParser(allow_abbrev)
exclusions =
importlib.resources
importlib.resources.files
importlib_resources
importlib_resources.files
### Backports ###
# Some features are sometimes backported into packages, in repositories such as PyPi, that are
# widely used but aren't in the standard language. If such a backport is specified as being used,
# the results will reflect that instead.
#
# Get full list via `--help`.
#
# Example backports:
#backports =
# typing
# argparse
### Features ###
# Some features are disabled by default due to being unstable but can be enabled explicitly.
#
# Get full list via `--help`.
#
# Example features:
#features =
# fstring-self-doc
### Format ###
# Format to show results and output in.
#
# Get full list via `--help`.
#
#format = default
### Annotations evaluation ###
# Instructs parser that annotations will be manually evaluated in code, which changes minimum
# versions in certain cases. Otherwise, function and variable annotations are not evaluated at
# definition time. Apply this argument if code uses `typing.get_type_hints` or
# `eval(obj.__annotations__)` or otherwise forces evaluation of annotations.
#
#eval_annotations = no
### Violations ###
#
#only_show_violations = no
only_show_violations = yes
### Parse comments ###
# Whether or not to parse comments, searching for "# novm" and "# novermin" to exclude anslysis of
# specific lines. If these comments aren't used in a particular code base, not parsing them can
# sometimes yield a speedup of 30-40%+.
#
#parse_comments = yes
parse_comments = no
### Scan symlink folders ###
# Scan symlinks to folders to include in analysis. Symlinks to non-folders or top-level folders will
# always be scanned.
#
#scan_symlink_folders = no