Zuul/Tox: Replace openstack-tox-linters job
The openstack-tox-linters job uses the openstack constraints file by default. This sometimes causes dependency conflicts with StarlingX requirements. Furthermore, starlingx has it's own constraints file which must be observed. - Created a custom job to run 'linters' tox env, replacing openstack-tox-linters - Replaced parent jobs from openstack ones to the generic 'tox', adjusting parameters - Removed references to constraints files, as those are already defined in their respective tox.ini files. - Removed upper bound on test requirements and fixed the errors that were reported by the updated tests: - E721 do not compare types, for instance checks use `isinstance()` - E275 missing whitespace after keyword Test Plan: pass - Run Tox tests locally pass - Zuul passes pass - All Zuul logs show the correct constraints file in 'Run Tox without tests' Story: 2011326 Task: 51848 Change-Id: Ia45ae6aecba388502e28887d7329b0e33eb6d884 Signed-off-by: Leonardo Fagundes Luz Serrano <Leonardo.FagundesLuzSerrano@windriver.com>
This commit is contained in:
28
.zuul.yaml
28
.zuul.yaml
@@ -6,7 +6,7 @@
|
|||||||
- stx-release-notes-jobs
|
- stx-release-notes-jobs
|
||||||
check:
|
check:
|
||||||
jobs:
|
jobs:
|
||||||
- openstack-tox-linters
|
- nfv-tox-linters
|
||||||
- nfv-tox-pep8
|
- nfv-tox-pep8
|
||||||
- nfv-tox-py39
|
- nfv-tox-py39
|
||||||
- nfv-tox-pylint
|
- nfv-tox-pylint
|
||||||
@@ -14,7 +14,7 @@
|
|||||||
- nova-api-proxy-tox-pylint
|
- nova-api-proxy-tox-pylint
|
||||||
gate:
|
gate:
|
||||||
jobs:
|
jobs:
|
||||||
- openstack-tox-linters
|
- nfv-tox-linters
|
||||||
- nfv-tox-pep8
|
- nfv-tox-pep8
|
||||||
- nfv-tox-py39
|
- nfv-tox-py39
|
||||||
- nfv-tox-pylint
|
- nfv-tox-pylint
|
||||||
@@ -24,26 +24,34 @@
|
|||||||
jobs:
|
jobs:
|
||||||
- stx-nfv-upload-git-mirror
|
- stx-nfv-upload-git-mirror
|
||||||
|
|
||||||
|
- job:
|
||||||
|
name: nfv-tox-linters
|
||||||
|
parent: tox
|
||||||
|
nodeset: debian-bullseye
|
||||||
|
description: Run linters
|
||||||
|
vars:
|
||||||
|
tox_envlist: linters
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: nfv-tox-py39
|
name: nfv-tox-py39
|
||||||
parent: openstack-tox-py39
|
parent: tox
|
||||||
description: Run py39 for nfv
|
description: Run py39 for nfv
|
||||||
nodeset: debian-bullseye
|
nodeset: debian-bullseye
|
||||||
required-projects:
|
required-projects:
|
||||||
- starlingx/config
|
- starlingx/config
|
||||||
- starlingx/fault
|
- starlingx/fault
|
||||||
- starlingx/root
|
|
||||||
- starlingx/update
|
- starlingx/update
|
||||||
vars:
|
vars:
|
||||||
|
tox_envlist: py39
|
||||||
tox_extra_args: -c nfv/tox.ini
|
tox_extra_args: -c nfv/tox.ini
|
||||||
tox_constraints_file: '{{ ansible_user_dir }}/src/opendev.org/starlingx/root/build-tools/requirements/debian/upper-constraints.txt'
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: nfv-tox-pep8
|
name: nfv-tox-pep8
|
||||||
parent: openstack-tox-pep8
|
parent: tox
|
||||||
description: Run pep8 for nfv
|
description: Run pep8 for nfv
|
||||||
nodeset: debian-bullseye
|
nodeset: debian-bullseye
|
||||||
vars:
|
vars:
|
||||||
|
tox_envlist: pep8
|
||||||
tox_extra_args: -c nfv/tox.ini
|
tox_extra_args: -c nfv/tox.ini
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
@@ -51,12 +59,9 @@
|
|||||||
parent: tox
|
parent: tox
|
||||||
description: Run bandit for nfv
|
description: Run bandit for nfv
|
||||||
nodeset: debian-bullseye
|
nodeset: debian-bullseye
|
||||||
required-projects:
|
|
||||||
- starlingx/root
|
|
||||||
vars:
|
vars:
|
||||||
tox_envlist: bandit
|
tox_envlist: bandit
|
||||||
tox_extra_args: -c nfv/tox.ini
|
tox_extra_args: -c nfv/tox.ini
|
||||||
tox_constraints_file: '{{ ansible_user_dir }}/src/opendev.org/starlingx/root/build-tools/requirements/debian/upper-constraints.txt'
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: nfv-tox-pylint
|
name: nfv-tox-pylint
|
||||||
@@ -66,12 +71,10 @@
|
|||||||
required-projects:
|
required-projects:
|
||||||
- starlingx/config
|
- starlingx/config
|
||||||
- starlingx/fault
|
- starlingx/fault
|
||||||
- starlingx/root
|
|
||||||
- starlingx/update
|
- starlingx/update
|
||||||
vars:
|
vars:
|
||||||
tox_envlist: pylint
|
tox_envlist: pylint
|
||||||
tox_extra_args: -c nfv/tox.ini
|
tox_extra_args: -c nfv/tox.ini
|
||||||
tox_constraints_file: '{{ ansible_user_dir }}/src/opendev.org/starlingx/root/build-tools/requirements/debian/upper-constraints.txt'
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: nova-api-proxy-tox-pep8
|
name: nova-api-proxy-tox-pep8
|
||||||
@@ -87,12 +90,9 @@
|
|||||||
parent: tox
|
parent: tox
|
||||||
description: Run pylint for nova-api-proxy
|
description: Run pylint for nova-api-proxy
|
||||||
nodeset: debian-bullseye
|
nodeset: debian-bullseye
|
||||||
required-projects:
|
|
||||||
- starlingx/root
|
|
||||||
vars:
|
vars:
|
||||||
tox_envlist: pylint
|
tox_envlist: pylint
|
||||||
tox_extra_args: -c nova-api-proxy/tox.ini
|
tox_extra_args: -c nova-api-proxy/tox.ini
|
||||||
tox_constraints_file: '{{ ansible_user_dir }}/src/opendev.org/starlingx/root/build-tools/requirements/debian/upper-constraints.txt'
|
|
||||||
|
|
||||||
- job:
|
- job:
|
||||||
name: stx-nfv-upload-git-mirror
|
name: stx-nfv-upload-git-mirror
|
||||||
|
@@ -56,7 +56,7 @@ class Result(object):
|
|||||||
self.ancillary_data = ancillary_data
|
self.ancillary_data = ancillary_data
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return("Result: result-data: %s ancillary-data: %s"
|
return ("Result: result-data: %s ancillary-data: %s"
|
||||||
% (self.result_data, self.ancillary_data))
|
% (self.result_data, self.ancillary_data))
|
||||||
|
|
||||||
|
|
||||||
|
@@ -47,13 +47,13 @@ def unit_test(title):
|
|||||||
def _task_non_coroutine(arg1):
|
def _task_non_coroutine(arg1):
|
||||||
global _test_complete
|
global _test_complete
|
||||||
_test_complete = True
|
_test_complete = True
|
||||||
assert(arg1 == 'arg1')
|
assert (arg1 == 'arg1')
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _task_work_func(arg1, arg2):
|
def _task_work_func(arg1, arg2):
|
||||||
assert(arg1 == 'arg1')
|
assert (arg1 == 'arg1')
|
||||||
assert(arg2 == 'arg2')
|
assert (arg2 == 'arg2')
|
||||||
return "FUNCTION PASSED"
|
return "FUNCTION PASSED"
|
||||||
|
|
||||||
|
|
||||||
@@ -61,13 +61,13 @@ def _task_work_func(arg1, arg2):
|
|||||||
def _task_coroutine_callback():
|
def _task_coroutine_callback():
|
||||||
global _test_complete, _test_result
|
global _test_complete, _test_result
|
||||||
result = (yield)
|
result = (yield)
|
||||||
assert(result == "FUNCTION PASSED")
|
assert (result == "FUNCTION PASSED")
|
||||||
_test_complete = True
|
_test_complete = True
|
||||||
_test_result = True
|
_test_result = True
|
||||||
|
|
||||||
|
|
||||||
def _task_coroutine(future, arg1, callback):
|
def _task_coroutine(future, arg1, callback):
|
||||||
assert(arg1 == 'arg1')
|
assert (arg1 == 'arg1')
|
||||||
future.work(_task_work_func, 'arg1', 'arg2')
|
future.work(_task_work_func, 'arg1', 'arg2')
|
||||||
future.result = (yield)
|
future.result = (yield)
|
||||||
if future.result.is_complete():
|
if future.result.is_complete():
|
||||||
@@ -77,7 +77,7 @@ def _task_coroutine(future, arg1, callback):
|
|||||||
|
|
||||||
|
|
||||||
def _task_coroutine_with_timer(future, arg1, callback):
|
def _task_coroutine_with_timer(future, arg1, callback):
|
||||||
assert(arg1 == 'arg1')
|
assert (arg1 == 'arg1')
|
||||||
timer_id = future.timer('timer-test', 2)
|
timer_id = future.timer('timer-test', 2)
|
||||||
start_ms = timers.get_monotonic_timestamp_in_ms()
|
start_ms = timers.get_monotonic_timestamp_in_ms()
|
||||||
future.result = (yield)
|
future.result = (yield)
|
||||||
|
@@ -227,7 +227,7 @@ def pCommand(pList):
|
|||||||
isFile = True
|
isFile = True
|
||||||
procList.append(pList[i])
|
procList.append(pList[i])
|
||||||
else:
|
else:
|
||||||
while(not isFile):
|
while (not isFile):
|
||||||
print("\nFiles containing keyword: %s" % (str(procName)))
|
print("\nFiles containing keyword: %s" % (str(procName)))
|
||||||
csvFile = str(procName) + ".csv"
|
csvFile = str(procName) + ".csv"
|
||||||
for root, directories, filenames in os.walk(pth):
|
for root, directories, filenames in os.walk(pth):
|
||||||
@@ -351,7 +351,7 @@ def setFilename(graphName):
|
|||||||
graphName = time.strftime("%m-%d-%Y")
|
graphName = time.strftime("%m-%d-%Y")
|
||||||
if os.path.exists(str(graphName + ".html")):
|
if os.path.exists(str(graphName + ".html")):
|
||||||
n = 1
|
n = 1
|
||||||
while(not validName):
|
while (not validName):
|
||||||
if os.path.exists(str(graphName + "(" + str(n) + ").html")):
|
if os.path.exists(str(graphName + "(" + str(n) + ").html")):
|
||||||
n += 1
|
n += 1
|
||||||
else:
|
else:
|
||||||
@@ -468,7 +468,7 @@ if config:
|
|||||||
# If only one of execution time and delta hits was specified, generate one graph.
|
# If only one of execution time and delta hits was specified, generate one graph.
|
||||||
if procs:
|
if procs:
|
||||||
if (execTime and hits):
|
if (execTime and hits):
|
||||||
if(not oneAxis):
|
if (not oneAxis):
|
||||||
fig = tools.make_subplots(rows=2, cols=1)
|
fig = tools.make_subplots(rows=2, cols=1)
|
||||||
storeGraphData(procs, dateRange, execTime, False, 1)
|
storeGraphData(procs, dateRange, execTime, False, 1)
|
||||||
storeGraphData(procs, dateRange, False, hits, 2)
|
storeGraphData(procs, dateRange, False, hits, 2)
|
||||||
|
@@ -285,9 +285,9 @@ class TestNeutronDHCPRebalance(testcase.NFVTestCase):
|
|||||||
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
|
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
|
||||||
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
|
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
|
||||||
if _DHCPRebalance.num_dhcp_agents < 2:
|
if _DHCPRebalance.num_dhcp_agents < 2:
|
||||||
assert(new_state == DHCP_REBALANCE_STATE.DONE)
|
assert (new_state == DHCP_REBALANCE_STATE.DONE)
|
||||||
else:
|
else:
|
||||||
assert(new_state ==
|
assert (new_state ==
|
||||||
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT)
|
DHCP_REBALANCE_STATE.GET_NETWORKS_HOSTED_ON_AGENT)
|
||||||
|
|
||||||
if ((old_state ==
|
if ((old_state ==
|
||||||
@@ -376,7 +376,7 @@ class TestNeutronDHCPRebalance(testcase.NFVTestCase):
|
|||||||
doing_abort = False
|
doing_abort = False
|
||||||
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
|
if (old_state != DHCP_REBALANCE_STATE.DONE) and \
|
||||||
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
|
(old_state != DHCP_REBALANCE_STATE.HOLD_OFF):
|
||||||
assert(new_state ==
|
assert (new_state ==
|
||||||
DHCP_REBALANCE_STATE.HOLD_OFF)
|
DHCP_REBALANCE_STATE.HOLD_OFF)
|
||||||
|
|
||||||
if ((old_state ==
|
if ((old_state ==
|
||||||
|
@@ -327,9 +327,9 @@ class TestNeutronRebalance2(testcase.NFVTestCase):
|
|||||||
if (old_state != L3_REBALANCE_STATE.DONE) and \
|
if (old_state != L3_REBALANCE_STATE.DONE) and \
|
||||||
(old_state != L3_REBALANCE_STATE.HOLD_OFF):
|
(old_state != L3_REBALANCE_STATE.HOLD_OFF):
|
||||||
if _L3Rebalance.num_l3agents < 2:
|
if _L3Rebalance.num_l3agents < 2:
|
||||||
assert(new_state == L3_REBALANCE_STATE.DONE)
|
assert (new_state == L3_REBALANCE_STATE.DONE)
|
||||||
else:
|
else:
|
||||||
assert(new_state ==
|
assert (new_state ==
|
||||||
L3_REBALANCE_STATE.GET_ROUTERS_HOSTED_ON_AGENT)
|
L3_REBALANCE_STATE.GET_ROUTERS_HOSTED_ON_AGENT)
|
||||||
|
|
||||||
if ((old_state ==
|
if ((old_state ==
|
||||||
@@ -476,7 +476,7 @@ class TestNeutronRebalance2(testcase.NFVTestCase):
|
|||||||
doing_abort = False
|
doing_abort = False
|
||||||
if (old_state != L3_REBALANCE_STATE.DONE) and \
|
if (old_state != L3_REBALANCE_STATE.DONE) and \
|
||||||
(old_state != L3_REBALANCE_STATE.HOLD_OFF):
|
(old_state != L3_REBALANCE_STATE.HOLD_OFF):
|
||||||
assert(new_state ==
|
assert (new_state ==
|
||||||
L3_REBALANCE_STATE.HOLD_OFF)
|
L3_REBALANCE_STATE.HOLD_OFF)
|
||||||
|
|
||||||
if ((old_state ==
|
if ((old_state ==
|
||||||
|
@@ -1985,7 +1985,7 @@ class MigrateInstancesFromHostStep(strategy.StrategyStep):
|
|||||||
"""
|
"""
|
||||||
from nfv_vim import directors
|
from nfv_vim import directors
|
||||||
|
|
||||||
if(self._instance_names):
|
if (self._instance_names):
|
||||||
DLOG.info("Step (%s) apply for instances %s running on hosts %s." % (
|
DLOG.info("Step (%s) apply for instances %s running on hosts %s." % (
|
||||||
self._name,
|
self._name,
|
||||||
self._instance_names,
|
self._instance_names,
|
||||||
|
@@ -1,7 +1,7 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
# The order of packages is significant, because pip processes them in the order
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
hacking>=1.1.0 # Apache-2.0
|
||||||
bandit
|
bandit
|
||||||
coverage>=3.6
|
coverage>=3.6
|
||||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||||
|
@@ -93,14 +93,14 @@ class APIController(Middleware):
|
|||||||
self._log_message(environ)
|
self._log_message(environ)
|
||||||
|
|
||||||
def _print_data(self, obj):
|
def _print_data(self, obj):
|
||||||
if type(obj) == dict:
|
if isinstance(obj, dict):
|
||||||
for k, v in obj.items():
|
for k, v in obj.items():
|
||||||
if hasattr(v, '__iter__'):
|
if hasattr(v, '__iter__'):
|
||||||
LOG.info("%s" % k)
|
LOG.info("%s" % k)
|
||||||
self._print_data(v)
|
self._print_data(v)
|
||||||
else:
|
else:
|
||||||
LOG.info('%s : %s' % (k, v))
|
LOG.info('%s : %s' % (k, v))
|
||||||
elif type(obj) == list:
|
elif isinstance(obj, list):
|
||||||
for v in obj:
|
for v in obj:
|
||||||
if hasattr(v, '__iter__'):
|
if hasattr(v, '__iter__'):
|
||||||
self._print_data(v)
|
self._print_data(v)
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
# The order of packages is significant, because pip processes them in the order
|
# The order of packages is significant, because pip processes them in the order
|
||||||
# of appearance. Changing the order has an impact on the overall integration
|
# of appearance. Changing the order has an impact on the overall integration
|
||||||
# process, which may cause wedges in the gate later.
|
# process, which may cause wedges in the gate later.
|
||||||
hacking>=1.1.0,<=2.0.0 # Apache-2.0
|
hacking>=1.1.0 # Apache-2.0
|
||||||
bandit<1.6.0;python_version>="3.0"
|
bandit
|
||||||
coverage>=3.6
|
coverage>=3.6
|
||||||
fixtures>=3.0.0 # Apache-2.0/BSD
|
fixtures>=3.0.0 # Apache-2.0/BSD
|
||||||
pylint
|
pylint
|
||||||
|
@@ -1,4 +1,4 @@
|
|||||||
bashate < 1.0.0
|
bashate
|
||||||
PyYAML >= 3.1.0
|
PyYAML >= 3.1.0
|
||||||
yamllint<1.26.1;python_version>="3.0" # GPLv2
|
yamllint
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user