Merge "Fix flake8 H404 errors"
This commit is contained in:
commit
8c05cd31ef
@ -30,7 +30,8 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
class RouterFipRateLimitMaps(qos_base.RateLimitMaps):
|
||||
def __init__(self):
|
||||
"""
|
||||
"""Initialize RouterFipRateLimitMaps
|
||||
|
||||
The router_floating_ips will be:
|
||||
router_floating_ips = {
|
||||
router_id_1: set(fip1, fip2),
|
||||
|
@ -760,7 +760,8 @@ class Dnsmasq(DhcpLocalProcess):
|
||||
return leases
|
||||
|
||||
def _read_leases_file_leases(self, filename, ip_version=None):
|
||||
"""
|
||||
"""Read dnsmasq dhcp leases file
|
||||
|
||||
Read information from leases file, which is needed to pass to
|
||||
dhcp_release6 command line utility if some of these leases are not
|
||||
needed anymore
|
||||
|
@ -257,8 +257,7 @@ def remove_conf_files(cfg_root, uuid):
|
||||
|
||||
|
||||
def get_root_helper_child_pid(pid, expected_cmd, run_as_root=False):
|
||||
"""
|
||||
Get the first non root_helper child pid in the process hierarchy.
|
||||
"""Get the first non root_helper child pid in the process hierarchy.
|
||||
|
||||
If root helper was used, two or more processes would be created:
|
||||
|
||||
|
@ -31,8 +31,8 @@ Connection = moves.moved_class(_connection.Connection, 'Connection', __name__)
|
||||
|
||||
|
||||
def configure_ssl_conn():
|
||||
"""
|
||||
Configures required settings for an SSL based OVSDB client connection
|
||||
"""Configures required settings for an SSL based OVSDB client connection
|
||||
|
||||
:return: None
|
||||
"""
|
||||
|
||||
|
@ -48,7 +48,8 @@ def setup_conf():
|
||||
|
||||
|
||||
def get_bridge_deletable_ports(br):
|
||||
"""
|
||||
"""Get bridge deletable ports
|
||||
|
||||
Return a list of OVS Bridge ports, excluding the ports who should not be
|
||||
cleaned. such ports are tagged with the 'skip_cleanup' key in external_ids.
|
||||
"""
|
||||
|
@ -295,7 +295,8 @@ def _hex_format(port, mask=0):
|
||||
|
||||
|
||||
def _gen_rules_port_min(port_min, top_bit):
|
||||
"""
|
||||
"""Generate rules for port_min
|
||||
|
||||
Encode a port range range(port_min, (port_min | (top_bit - 1)) + 1) into
|
||||
a set of bit value/masks.
|
||||
"""
|
||||
@ -376,7 +377,8 @@ def _gen_rules_port_min(port_min, top_bit):
|
||||
|
||||
|
||||
def _gen_rules_port_max(port_max, top_bit):
|
||||
"""
|
||||
"""Generate rules for port_max
|
||||
|
||||
Encode a port range range(port_max & ~(top_bit - 1), port_max + 1) into
|
||||
a set of bit value/masks.
|
||||
"""
|
||||
@ -624,8 +626,7 @@ def transaction_guard(f):
|
||||
|
||||
|
||||
def wait_until_true(predicate, timeout=60, sleep=1, exception=None):
|
||||
"""
|
||||
Wait until callable predicate is evaluated as True
|
||||
"""Wait until callable predicate is evaluated as True
|
||||
|
||||
:param predicate: Callable deciding whether waiting should continue.
|
||||
Best practice is to instantiate predicate with functools.partial()
|
||||
@ -662,7 +663,8 @@ class _AuthenticBase(object):
|
||||
|
||||
|
||||
class AuthenticEUI(_AuthenticBase, netaddr.EUI):
|
||||
'''
|
||||
'''AuthenticEUI class
|
||||
|
||||
This class retains the format of the MAC address string passed during
|
||||
initialization.
|
||||
|
||||
@ -672,7 +674,8 @@ class AuthenticEUI(_AuthenticBase, netaddr.EUI):
|
||||
|
||||
|
||||
class AuthenticIPNetwork(_AuthenticBase, netaddr.IPNetwork):
|
||||
'''
|
||||
'''AuthenticIPNetwork class
|
||||
|
||||
This class retains the format of the IP network string passed during
|
||||
initialization.
|
||||
|
||||
|
@ -287,8 +287,7 @@ class DVRResourceOperationHandler(object):
|
||||
def _delete_dvr_internal_ports(self, event, trigger, resource,
|
||||
context, router, network_id,
|
||||
new_network_id, **kwargs):
|
||||
"""
|
||||
GW port AFTER_DELETE event handler to cleanup DVR ports.
|
||||
"""GW port AFTER_DELETE event handler to cleanup DVR ports.
|
||||
|
||||
This event is emitted when a router gateway port is being deleted,
|
||||
so go ahead and delete the csnat ports and the floatingip
|
||||
|
@ -48,8 +48,7 @@ def get_inspector():
|
||||
|
||||
|
||||
def get_tables():
|
||||
"""
|
||||
Returns hardcoded list of tables which have ``tenant_id`` column.
|
||||
"""Returns hardcoded list of tables which have ``tenant_id`` column.
|
||||
|
||||
DB head can be changed. To prevent possible problems, when models will be
|
||||
updated, return hardcoded list of tables, up-to-date for this day.
|
||||
|
@ -46,8 +46,7 @@ def upgrade():
|
||||
|
||||
|
||||
def contract_creation_exceptions():
|
||||
"""
|
||||
Return create exceptions.
|
||||
"""Return create exceptions.
|
||||
|
||||
These elements depend on the networksegments table which was renamed
|
||||
in the contract branch.
|
||||
|
@ -87,8 +87,7 @@ def upgrade():
|
||||
|
||||
|
||||
def contract_creation_exceptions():
|
||||
"""
|
||||
Return create exceptions.
|
||||
"""Return create exceptions.
|
||||
|
||||
These elements depend on the networksegments table which are added
|
||||
in the contract branch.
|
||||
|
@ -60,7 +60,8 @@ def upgrade():
|
||||
|
||||
|
||||
def expand_drop_exceptions():
|
||||
"""
|
||||
"""Drop and extend the ML2 port bindings key contraint
|
||||
|
||||
Drop the existing primary key constraint and then extend it to include
|
||||
host as the primary key to support multiple bindings for the same port.
|
||||
This is needed to use drop in expand migration to pass test_branches.
|
||||
|
@ -58,7 +58,8 @@ def upgrade():
|
||||
|
||||
|
||||
def expand_drop_exceptions():
|
||||
"""
|
||||
"""Drop and replace the QoS policy foreign key contraint
|
||||
|
||||
Drop the existing QoS policy foreign key uniq constraint and then replace
|
||||
it with new unique constraint for pair (policy_id, direction).
|
||||
|
||||
|
@ -494,7 +494,8 @@ def _get_head_file_path(config):
|
||||
|
||||
|
||||
def _get_heads_file_path(config):
|
||||
'''
|
||||
'''Get heads file path
|
||||
|
||||
Return the path of the file that was once used to maintain the list of
|
||||
latest heads.
|
||||
'''
|
||||
@ -504,18 +505,14 @@ def _get_heads_file_path(config):
|
||||
|
||||
|
||||
def _get_contract_head_file_path(config):
|
||||
'''
|
||||
Return the path of the file that is used to maintain contract head
|
||||
'''
|
||||
'''Return the path of the file that is used to maintain contract head'''
|
||||
return os.path.join(
|
||||
_get_root_versions_dir(config),
|
||||
CONTRACT_HEAD_FILENAME)
|
||||
|
||||
|
||||
def _get_expand_head_file_path(config):
|
||||
'''
|
||||
Return the path of the file that is used to maintain expand head
|
||||
'''
|
||||
'''Return the path of the file that is used to maintain expand head'''
|
||||
return os.path.join(
|
||||
_get_root_versions_dir(config),
|
||||
EXPAND_HEAD_FILENAME)
|
||||
|
@ -127,7 +127,8 @@ class AnySubnetRequest(SubnetRequest):
|
||||
|
||||
def __init__(self, tenant_id, subnet_id, version, prefixlen,
|
||||
gateway_ip=None, allocation_pools=None):
|
||||
"""
|
||||
"""Initialize AnySubnetRequest
|
||||
|
||||
:param version: Either constants.IPv4 or constants.IPv6
|
||||
:param prefixlen: The prefix len requested. Must be within the min and
|
||||
max allowed.
|
||||
@ -158,7 +159,8 @@ class SpecificSubnetRequest(SubnetRequest):
|
||||
"""
|
||||
def __init__(self, tenant_id, subnet_id, subnet_cidr,
|
||||
gateway_ip=None, allocation_pools=None):
|
||||
"""
|
||||
"""Initialize SpecificSubnetRequest
|
||||
|
||||
:param subnet: The subnet requested. Can be IPv4 or IPv6. However,
|
||||
when IPAM tries to fulfill this request, the IP version must match
|
||||
the version of the address scope being used.
|
||||
@ -190,7 +192,8 @@ class AddressRequest(object):
|
||||
class SpecificAddressRequest(AddressRequest):
|
||||
"""For requesting a specified address from IPAM"""
|
||||
def __init__(self, address):
|
||||
"""
|
||||
"""Initialize SpecificAddressRequest
|
||||
|
||||
:param address: The address being requested
|
||||
:type address: A netaddr.IPAddress or convertible to one.
|
||||
"""
|
||||
@ -226,7 +229,8 @@ class AutomaticAddressRequest(SpecificAddressRequest):
|
||||
_address_generators = {EUI64: _generate_eui64_address}
|
||||
|
||||
def __init__(self, address_type=EUI64, **kwargs):
|
||||
"""
|
||||
"""Initialize AutomaticAddressRequest
|
||||
|
||||
This constructor builds an automatic IP address. Parameter needed for
|
||||
generating it can be passed as optional keyword arguments.
|
||||
|
||||
@ -256,7 +260,8 @@ class AddressRequestFactory(object):
|
||||
|
||||
@classmethod
|
||||
def get_request(cls, context, port, ip_dict):
|
||||
"""
|
||||
"""Initialize AddressRequestFactory
|
||||
|
||||
:param context: context (not used here, but can be used in sub-classes)
|
||||
:param port: port dict (not used here, but can be used in sub-classes)
|
||||
:param ip_dict: dict that can contain 'ip_address', 'mac' and
|
||||
|
@ -65,12 +65,14 @@ def register_filter_hook_on_model(model, filter_name):
|
||||
|
||||
|
||||
class Pager(object):
|
||||
'''
|
||||
'''Pager class
|
||||
|
||||
This class represents a pager object. It is consumed by get_objects to
|
||||
specify sorting and pagination criteria.
|
||||
'''
|
||||
def __init__(self, sorts=None, limit=None, page_reverse=None, marker=None):
|
||||
'''
|
||||
'''Initialize
|
||||
|
||||
:param sorts: A list of (key, direction) tuples.
|
||||
direction: True == ASC, False == DESC
|
||||
:param limit: maximum number of items to return
|
||||
@ -447,7 +449,8 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def modify_fields_to_db(cls, fields):
|
||||
"""
|
||||
"""Modify the fields before data is inserted into DB.
|
||||
|
||||
This method enables to modify the fields and its
|
||||
content before data is inserted into DB.
|
||||
|
||||
@ -537,7 +540,8 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def get_object(cls, context, **kwargs):
|
||||
"""
|
||||
"""Fetch a single object
|
||||
|
||||
Return the first result of given context or None if the result doesn't
|
||||
contain any row. Next, convert it to a versioned object.
|
||||
|
||||
@ -561,7 +565,8 @@ class NeutronDbObject(NeutronObject):
|
||||
@classmethod
|
||||
def get_objects(cls, context, _pager=None, validate_filters=True,
|
||||
**kwargs):
|
||||
"""
|
||||
"""Fetch a list of objects
|
||||
|
||||
Fetch all results from DB and convert them to versioned objects.
|
||||
|
||||
:param context:
|
||||
@ -581,8 +586,7 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def update_object(cls, context, values, validate_filters=True, **kwargs):
|
||||
"""
|
||||
Update an object that match filtering criteria from DB.
|
||||
"""Update an object that match filtering criteria from DB.
|
||||
|
||||
:param context:
|
||||
:param values: multiple keys to update in matching objects
|
||||
@ -610,8 +614,7 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def update_objects(cls, context, values, validate_filters=True, **kwargs):
|
||||
"""
|
||||
Update objects that match filtering criteria from DB.
|
||||
"""Update objects that match filtering criteria from DB.
|
||||
|
||||
:param context:
|
||||
:param values: multiple keys to update in matching objects
|
||||
@ -636,8 +639,7 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def delete_objects(cls, context, validate_filters=True, **kwargs):
|
||||
"""
|
||||
Delete objects that match filtering criteria from DB.
|
||||
"""Delete objects that match filtering criteria from DB.
|
||||
|
||||
:param context:
|
||||
:param validate_filters: Raises an error in case of passing an unknown
|
||||
@ -701,7 +703,8 @@ class NeutronDbObject(NeutronObject):
|
||||
return fields
|
||||
|
||||
def load_synthetic_db_fields(self, db_obj=None):
|
||||
"""
|
||||
"""Load synthetic DB fields
|
||||
|
||||
Load the synthetic fields that are stored in a different table from the
|
||||
main object.
|
||||
|
||||
@ -816,8 +819,7 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def count(cls, context, validate_filters=True, **kwargs):
|
||||
"""
|
||||
Count the number of objects matching filtering criteria.
|
||||
"""Count the number of objects matching filtering criteria.
|
||||
|
||||
:param context:
|
||||
:param validate_filters: Raises an error in case of passing an unknown
|
||||
@ -833,8 +835,7 @@ class NeutronDbObject(NeutronObject):
|
||||
|
||||
@classmethod
|
||||
def objects_exist(cls, context, validate_filters=True, **kwargs):
|
||||
"""
|
||||
Check if objects are present in DB.
|
||||
"""Check if objects are present in DB.
|
||||
|
||||
:param context:
|
||||
:param validate_filters: Raises an error in case of passing an unknown
|
||||
|
@ -110,9 +110,7 @@ class OVSPort(object):
|
||||
|
||||
@profiler.trace_cls("ovs_dvr_agent")
|
||||
class OVSDVRNeutronAgent(object):
|
||||
'''
|
||||
Implements OVS-based DVR(Distributed Virtual Router), for overlay networks.
|
||||
'''
|
||||
'''Implements OVS-based DVR (Distributed Virtual Router) agent'''
|
||||
# history
|
||||
# 1.0 Initial version
|
||||
|
||||
|
@ -1744,7 +1744,8 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2,
|
||||
|
||||
def update_port_status(self, context, port_id, status, host=None,
|
||||
network=None):
|
||||
"""
|
||||
"""Update port status
|
||||
|
||||
Returns port_id (non-truncated uuid) if the port exists.
|
||||
Otherwise returns None.
|
||||
'network' is deprecated and has no effect
|
||||
|
@ -101,7 +101,8 @@ class ResourceValidateRequest(object):
|
||||
return self.validate_methods
|
||||
|
||||
def validate_request(self, context, log_data):
|
||||
"""
|
||||
"""Validate request
|
||||
|
||||
This method will get validated method according to resource_type. An
|
||||
InvalidLogResourceType exception will be raised if there is no logging
|
||||
driver that supports resource_type as logging resource. In addition,
|
||||
|
@ -228,7 +228,8 @@ class SubPortsValidator(object):
|
||||
return subport_mtus
|
||||
|
||||
def _get_port_mtu(self, context, port_id):
|
||||
"""
|
||||
"""Get port MTU
|
||||
|
||||
Return MTU for the network where the given port belongs to.
|
||||
If the network or port cannot be obtained, or if MTU is not defined,
|
||||
returns None.
|
||||
|
@ -119,9 +119,7 @@ def get_rootwrap_daemon_cmd():
|
||||
|
||||
class AttributeDict(dict):
|
||||
|
||||
"""
|
||||
Provide attribute access (dict.key) to dictionary values.
|
||||
"""
|
||||
"""Provide attribute access (dict.key) to dictionary values."""
|
||||
|
||||
def __getattr__(self, name):
|
||||
"""Allow attribute access for all keys in the dict."""
|
||||
|
@ -423,7 +423,8 @@ class NetcatTester(object):
|
||||
def __init__(self, client_namespace, server_namespace, address,
|
||||
dst_port, protocol, server_address=None, src_port=None):
|
||||
|
||||
"""
|
||||
"""Initialize NetcatTester
|
||||
|
||||
Tool for testing connectivity on transport layer using netcat
|
||||
executable.
|
||||
|
||||
|
@ -324,7 +324,8 @@ class Environment(fixtures.Fixture):
|
||||
"""
|
||||
|
||||
def __init__(self, env_desc, hosts_desc):
|
||||
"""
|
||||
"""Initialize Environment
|
||||
|
||||
:param env_desc: An EnvironmentDescription instance.
|
||||
:param hosts_desc: A list of HostDescription instances.
|
||||
"""
|
||||
|
@ -61,9 +61,7 @@ class AdminDirFixture(fixtures.Fixture):
|
||||
|
||||
|
||||
class SleepyProcessFixture(fixtures.Fixture):
|
||||
"""
|
||||
Process fixture that performs time.sleep for the given number of seconds.
|
||||
"""
|
||||
"""Process fixture to perform time.sleep for a given number of seconds."""
|
||||
|
||||
def __init__(self, timeout=60):
|
||||
super(SleepyProcessFixture, self).__init__()
|
||||
|
@ -27,7 +27,8 @@ TEST_PATH = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
class APIPolicyTestCase(base.BaseTestCase):
|
||||
"""
|
||||
"""Base class for API policy tests
|
||||
|
||||
Tests for REST API policy checks. Ideally this would be done against an
|
||||
environment with an instantiated plugin, but there appears to be problems
|
||||
with instantiating a plugin against an sqlite environment and as yet, there
|
||||
@ -59,7 +60,8 @@ class APIPolicyTestCase(base.BaseTestCase):
|
||||
return policy.check(context, 'get_network', self._network_definition())
|
||||
|
||||
def test_premature_loading(self):
|
||||
"""
|
||||
"""Test premature policy loading
|
||||
|
||||
Verifies that loading policies by way of admin context before
|
||||
populating extensions and extending the resource map results in
|
||||
networks with router:external is true being invisible to regular
|
||||
@ -74,7 +76,8 @@ class APIPolicyTestCase(base.BaseTestCase):
|
||||
self.assertFalse(self._check_external_router_policy(tenant_context))
|
||||
|
||||
def test_proper_load_order(self):
|
||||
"""
|
||||
"""Test proper policy load order
|
||||
|
||||
Verifies that loading policies by way of admin context after
|
||||
populating extensions and extending the resource map results in
|
||||
networks with router:external are visible to regular tenants.
|
||||
|
@ -36,8 +36,7 @@ class BaseLoggingTestCase(base.BaseTestCase):
|
||||
|
||||
|
||||
class BaseSudoTestCase(BaseLoggingTestCase):
|
||||
"""
|
||||
Base class for tests requiring invocation of commands via a root helper.
|
||||
"""Base class for tests requiring invocation of commands via a root helper.
|
||||
|
||||
This class skips (during setUp) its tests unless sudo is enabled, ie:
|
||||
OS_SUDO_TESTING is set to '1' or 'True' in the test execution environment.
|
||||
|
@ -67,8 +67,7 @@ OPTS = [
|
||||
|
||||
|
||||
class ProcessSpawn(daemon.Daemon):
|
||||
"""
|
||||
This class is part of the functional test of the netns_cleanup module.
|
||||
"""This class is part of the functional test of the netns_cleanup module.
|
||||
|
||||
It allows spawning processes that listen on random ports either on
|
||||
tcp(6), udp(6) or unix sockets. Also it allows handling or ignoring
|
||||
|
@ -87,8 +87,7 @@ class NetnsCleanupTest(base.BaseSudoTestCase):
|
||||
|
||||
@staticmethod
|
||||
def _launch_processes(namespaces):
|
||||
"""
|
||||
Launch processes in the specified namespaces.
|
||||
"""Launch processes in the specified namespaces.
|
||||
|
||||
This function will spawn processes inside the given namespaces:
|
||||
- 6 processes listening on tcp ports (parent + 5 children)
|
||||
|
@ -36,9 +36,7 @@ load_tests = testlib_api.module_load_tests
|
||||
|
||||
|
||||
class IpamTestCase(testlib_api.SqlTestCase):
|
||||
"""
|
||||
Base class for tests that aim to test ip allocation.
|
||||
"""
|
||||
"""Base class for tests that aim to test ip allocation."""
|
||||
def setUp(self):
|
||||
super(IpamTestCase, self).setUp()
|
||||
cfg.CONF.set_override('notify_nova_on_port_status_changes', False)
|
||||
|
@ -25,7 +25,8 @@ __all__ = ['FunctionalTest']
|
||||
|
||||
|
||||
class FunctionalTest(unittest2.TestCase):
|
||||
"""
|
||||
"""Pecan wsgi functional test base class
|
||||
|
||||
Used for functional tests where you need to test your
|
||||
literal application and its integration with the framework.
|
||||
"""
|
||||
|
@ -823,8 +823,7 @@ class TestBasicRouterOperations(BasicRouterOperationsFramework):
|
||||
|
||||
def _test_ext_gw_updated_dvr_edge_router(self, host_match,
|
||||
snat_hosted_before=True):
|
||||
"""
|
||||
Helper to test external gw update for edge router on dvr_snat agent
|
||||
"""Helper to test external gw update for edge router on dvr_snat agent
|
||||
|
||||
:param host_match: True if new gw host should be the same as agent host
|
||||
:param snat_hosted_before: True if agent has already been hosting
|
||||
|
@ -780,9 +780,7 @@ class NeutronDbPluginV2TestCase(testlib_api.WebTestCase):
|
||||
self.assertEqual(expected_res, [n['id'] for n in item_res])
|
||||
|
||||
def _compare_resource(self, observed_res, expected_res, res_name):
|
||||
'''
|
||||
Compare the observed and expected resources (ie compare subnets)
|
||||
'''
|
||||
'''Compare the observed and expected resources (ie compare subnets)'''
|
||||
for k in expected_res:
|
||||
self.assertIn(k, observed_res[res_name])
|
||||
if isinstance(expected_res[k], list):
|
||||
|
@ -282,8 +282,8 @@ class FakeNeutronObjectUniqueKey(base.NeutronDbObject):
|
||||
|
||||
@base.NeutronObjectRegistry.register_if(False)
|
||||
class FakeNeutronObjectRenamedField(base.NeutronDbObject):
|
||||
"""
|
||||
Testing renaming the parameter from DB to NeutronDbObject
|
||||
"""Testing renaming the parameter from DB to NeutronDbObject
|
||||
|
||||
For tests:
|
||||
- db fields: id, field_db, field2
|
||||
- object: id, field_ovo, field2
|
||||
|
@ -2282,7 +2282,8 @@ class TestOvsNeutronAgent(object):
|
||||
self.assertEqual([], ofport_changed_ports)
|
||||
|
||||
def test__setup_tunnel_port_while_new_mapping_is_added(self):
|
||||
"""
|
||||
"""Test setup_tunnel_port while adding a new mapping
|
||||
|
||||
Test that _setup_tunnel_port doesn't fail if new vlan mapping is
|
||||
added in a different coroutine while iterating over existing mappings.
|
||||
See bug 1449944 for more info.
|
||||
|
3
tox.ini
3
tox.ini
@ -155,7 +155,6 @@ commands = sphinx-build -W -b linkcheck doc/source doc/build/linkcheck
|
||||
# E125 continuation line does not distinguish itself from next logical line
|
||||
# E126 continuation line over-indented for hanging indent
|
||||
# E128 continuation line under-indented for visual indent
|
||||
# H404 multi line docstring should start with a summary
|
||||
# H405 multi line docstring summary not separated with an empty line
|
||||
# N530 direct neutron imports not allowed
|
||||
# TODO(ihrachys) figure out what to do with N534
|
||||
@ -163,7 +162,7 @@ commands = sphinx-build -W -b linkcheck doc/source doc/build/linkcheck
|
||||
# TODO(amotoki) check the following new rules should be fixed or ignored
|
||||
# E731 do not assign a lambda expression, use a def
|
||||
# W504 line break after binary operator
|
||||
ignore = E125,E126,E128,E731,H404,H405,N530,N534,W504
|
||||
ignore = E125,E126,E128,E731,H405,N530,N534,W504
|
||||
# H106: Don't put vim configuration in source files
|
||||
# H203: Use assertIs(Not)None to check for None
|
||||
# H204: Use assert(Not)Equal to check for equality
|
||||
|
Loading…
Reference in New Issue
Block a user