mirror of
https://github.com/apple/foundationdb.git
synced 2026-01-25 04:18:18 +00:00
Fix several issues found by flake8. Ignore E402, which detects when we have import statements not at the top of the file, since we have many files that use this pattern and are not easy to fix.
This commit is contained in:
5
.flake8
5
.flake8
@@ -1,5 +1,6 @@
|
||||
[flake8]
|
||||
ignore = E203, E266, E501, W503, F403, F401, E711, C901, E721, W605
|
||||
ignore = E203, E266, E402, E501, W503, F403, F401, E711, C901, E721, W605
|
||||
max-line-length = 88
|
||||
max-complexity = 18
|
||||
select = B,C,E,F,W,T4,B9
|
||||
select = B,C,E,F,W,T4,B9
|
||||
exclude = bindings/python/fdb/six.py,contrib/Implib.so/implib-gen.py,documentation/sphinx/extensions/rubydomain.py
|
||||
|
||||
@@ -21,12 +21,12 @@
|
||||
import math
|
||||
import sys
|
||||
import os
|
||||
import util
|
||||
|
||||
sys.path[:0] = [
|
||||
os.path.join(os.path.dirname(__file__), "..", "..", "bindings", "python")
|
||||
]
|
||||
|
||||
import util
|
||||
from fdb import LATEST_API_VERSION
|
||||
|
||||
FDB_API_VERSION = LATEST_API_VERSION
|
||||
|
||||
@@ -155,7 +155,6 @@ class ClientConfigTest:
|
||||
self.test_cluster_file = self.test_dir.joinpath(
|
||||
"{}.cluster".format(random_alphanum_string(16))
|
||||
)
|
||||
port = self.cluster.port_provider.get_free_port()
|
||||
with open(self.test_cluster_file, "w") as file:
|
||||
file.write("abcde:fghijk@")
|
||||
|
||||
@@ -610,9 +609,9 @@ class ClientTracingTests(unittest.TestCase):
|
||||
with_ip=True, version=CURRENT_VERSION, thread_idx=0
|
||||
)
|
||||
self.find_and_check_event(cur_ver_trace, "ClientStart", ["Machine"], [])
|
||||
prev_ver_trace = self.find_trace_file(
|
||||
with_ip=True, version=PREV_RELEASE_VERSION, thread_idx=0
|
||||
)
|
||||
# prev_ver_trace = self.find_trace_file(
|
||||
# with_ip=True, version=PREV_RELEASE_VERSION, thread_idx=0
|
||||
# )
|
||||
# there have been sporadic check failures in the trace check below, so we comment this out for the time being
|
||||
# previous release version was likely not flushing trace correctly when network::stop() is called
|
||||
# TODO: re-enable this check when we bump up PREV_RELEASE_VERSION to one where there is such a guarantee
|
||||
@@ -771,7 +770,7 @@ if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
description="""
|
||||
Unit tests for running FDB client with different configurations.
|
||||
Unit tests for running FDB client with different configurations.
|
||||
Also accepts python unit tests command line arguments.
|
||||
""",
|
||||
)
|
||||
|
||||
@@ -60,7 +60,9 @@ def api_version(ver):
|
||||
|
||||
if "_version" in globals():
|
||||
if globals()["_version"] != ver:
|
||||
raise RuntimeError("FDB API already loaded at version %d" % _version)
|
||||
raise RuntimeError(
|
||||
"FDB API already loaded at version %d" % globals()["_version"]
|
||||
)
|
||||
return
|
||||
|
||||
if ver < 13:
|
||||
|
||||
@@ -195,6 +195,15 @@ def _error_predicate(predicate, error_code):
|
||||
return bool(_capi.fdb_error_predicate(predicate, error_code))
|
||||
|
||||
|
||||
# define generated types for linters
|
||||
class StreamingMode:
|
||||
pass
|
||||
|
||||
|
||||
class ConflictRangeType:
|
||||
pass
|
||||
|
||||
|
||||
def make_enum(scope):
|
||||
_dict = getattr(_opts, scope)
|
||||
|
||||
|
||||
@@ -28,19 +28,18 @@ import unicodedata
|
||||
import math
|
||||
import uuid
|
||||
|
||||
_range = range
|
||||
import fdb.tuple
|
||||
from fdb.tuple import pack, unpack, compare, SingleFloat
|
||||
|
||||
from fdb.tuple import pack, unpack, range, compare, SingleFloat
|
||||
from fdb import six
|
||||
|
||||
from fdb.six import u
|
||||
|
||||
|
||||
def randomUnicode():
|
||||
while True:
|
||||
c = random.randint(0, 0xFFFF)
|
||||
if unicodedata.category(unichr(c))[0] in "LMNPSZ":
|
||||
return unichr(c)
|
||||
if unicodedata.category(six.unichr(c))[0] in "LMNPSZ":
|
||||
return six.unichr(c)
|
||||
|
||||
|
||||
def randomElement():
|
||||
@@ -48,14 +47,12 @@ def randomElement():
|
||||
if r == 0:
|
||||
if random.random() < 0.5:
|
||||
chars = [b"\x00", b"\x01", b"a", b"7", b"\xfe", b"\ff"]
|
||||
return b"".join(
|
||||
[random.choice(chars) for c in _range(random.randint(0, 5))]
|
||||
)
|
||||
return b"".join([random.choice(chars) for c in range(random.randint(0, 5))])
|
||||
else:
|
||||
return b"".join(
|
||||
[
|
||||
six.int2byte(random.randint(0, 255))
|
||||
for _ in _range(random.randint(0, 10))
|
||||
for _ in range(random.randint(0, 10))
|
||||
]
|
||||
)
|
||||
elif r == 1:
|
||||
@@ -74,10 +71,10 @@ def randomElement():
|
||||
u("\U0001f4a9"),
|
||||
]
|
||||
return u("").join(
|
||||
[random.choice(chars) for c in _range(random.randint(0, 10))]
|
||||
[random.choice(chars) for c in range(random.randint(0, 10))]
|
||||
)
|
||||
else:
|
||||
return u("").join([randomUnicode() for _ in _range(random.randint(0, 10))])
|
||||
return u("").join([randomUnicode() for _ in range(random.randint(0, 10))])
|
||||
elif r == 2:
|
||||
return random.choice([-1, 1]) * min(
|
||||
2 ** random.randint(0, 2040) + random.randint(-10, 10), 2**2040 - 1
|
||||
@@ -99,10 +96,7 @@ def randomElement():
|
||||
elif r == 6:
|
||||
is_double = random.random() < 0.5
|
||||
byte_str = b"".join(
|
||||
[
|
||||
six.int2byte(random.randint(0, 255))
|
||||
for _ in _range(8 if is_double else 4)
|
||||
]
|
||||
[six.int2byte(random.randint(0, 255)) for _ in range(8 if is_double else 4)]
|
||||
)
|
||||
if is_double:
|
||||
return struct.unpack(">d", byte_str)[0]
|
||||
@@ -113,11 +107,11 @@ def randomElement():
|
||||
elif r == 8:
|
||||
return uuid.uuid4()
|
||||
elif r == 9:
|
||||
return [randomElement() for _ in _range(random.randint(0, 5))]
|
||||
return [randomElement() for _ in range(random.randint(0, 5))]
|
||||
|
||||
|
||||
def randomTuple():
|
||||
return tuple(randomElement() for x in _range(random.randint(0, 4)))
|
||||
return tuple(randomElement() for x in range(random.randint(0, 4)))
|
||||
|
||||
|
||||
def isprefix(a, b):
|
||||
@@ -136,7 +130,7 @@ def equalEnough(t1, t2):
|
||||
if len(t1) != len(t2):
|
||||
return False
|
||||
|
||||
for i in _range(len(t1)):
|
||||
for i in range(len(t1)):
|
||||
e1 = t1[i]
|
||||
e2 = t2[i]
|
||||
|
||||
@@ -157,7 +151,7 @@ def equalEnough(t1, t2):
|
||||
|
||||
|
||||
def tupleTest(N=10000):
|
||||
someTuples = [randomTuple() for i in _range(N)]
|
||||
someTuples = [randomTuple() for i in range(N)]
|
||||
a = sorted(someTuples, cmp=compare)
|
||||
b = sorted(someTuples, key=pack)
|
||||
|
||||
@@ -177,7 +171,7 @@ def tupleTest(N=10000):
|
||||
|
||||
print("Sort %d OK" % N)
|
||||
|
||||
for i in _range(N):
|
||||
for i in range(N):
|
||||
t = randomTuple()
|
||||
t2 = t + (randomElement(),)
|
||||
t3 = randomTuple()
|
||||
@@ -189,7 +183,7 @@ def tupleTest(N=10000):
|
||||
)
|
||||
return False
|
||||
|
||||
r = range(t)
|
||||
r = fdb.tuple.range(t)
|
||||
if r.start <= pack(t) < r.stop:
|
||||
print(
|
||||
"element within own range:\n Tuple: %s\n Bytes: %s\n Start: %s\n Stop: %s"
|
||||
|
||||
@@ -166,20 +166,14 @@ def print_errors(ensemble_id: str):
|
||||
ensemble_id, errors_only=(not config.success), compressed=compressed
|
||||
):
|
||||
if len(rec) == 5:
|
||||
version_stamp, result_code, host, seed, output = rec
|
||||
versionstamp, result_code, host, seed, output = rec
|
||||
elif len(rec) == 4:
|
||||
version_stamp, result_code, host, output = rec
|
||||
seed = None
|
||||
versionstamp, result_code, host, output = rec
|
||||
elif len(rec) == 3:
|
||||
version_stamp, result_code, output = rec
|
||||
host = None
|
||||
seed = None
|
||||
versionstamp, result_code, output = rec
|
||||
elif len(rec) == 2:
|
||||
version_stamp, seed = rec
|
||||
versionstamp, seed = rec
|
||||
output = str(joshua_model.fdb.tuple.unpack(seed)[0]) + "\n"
|
||||
result_code = None
|
||||
host = None
|
||||
seed = None
|
||||
else:
|
||||
raise Exception("Unknown result format")
|
||||
lines = output.splitlines()
|
||||
|
||||
@@ -277,7 +277,7 @@ def address_file(
|
||||
fout.write("\n".join(new_lines))
|
||||
|
||||
return True
|
||||
except (OSError, UnicodeDecodeError) as e:
|
||||
except (OSError, UnicodeDecodeError):
|
||||
logging.exception(
|
||||
"Unable to read file %s due to OSError", os.path.join(base_path, file_path)
|
||||
)
|
||||
@@ -336,7 +336,7 @@ def address_path(
|
||||
else:
|
||||
logging.error("Path %s does not exist", path)
|
||||
return False
|
||||
except OSError as e:
|
||||
except OSError:
|
||||
logging.exception("Unable to find all API versions due to OSError")
|
||||
return False
|
||||
|
||||
|
||||
@@ -117,7 +117,7 @@ def _main():
|
||||
pr_info["merge_commit"]
|
||||
)
|
||||
else:
|
||||
print(f"Using pull request branch")
|
||||
print("Using pull request branch")
|
||||
# Might be a fast-forward merge without merge hash, need to cherry-pick from the original owner's repository
|
||||
_prepare_pull_request(pr)
|
||||
commits_to_be_cherry_picked = pr_info["commits"]
|
||||
|
||||
@@ -68,7 +68,6 @@ class CommitDebugHandler(xml.sax.ContentHandler, object):
|
||||
if self._starttime is None:
|
||||
self._starttime = float(attrs["Time"])
|
||||
|
||||
attr_id = attrs["ID"]
|
||||
# Trace viewer doesn't seem to care about types, so use host as pid and port as tid
|
||||
(pid, tid) = attrs["Machine"].split(":")
|
||||
traces = locationToPhase[attrs["Location"]]
|
||||
|
||||
@@ -205,7 +205,7 @@ class _ScopeLinter:
|
||||
and self._scope_stack[-1].spelling == SCOPING_BEGIN_PUNCTUATION
|
||||
):
|
||||
return LinterIssue(
|
||||
token.file_name, token.line, f"Found wait inside when clause"
|
||||
token.file_name, token.line, "Found wait inside when clause"
|
||||
)
|
||||
return None
|
||||
else:
|
||||
|
||||
@@ -362,7 +362,7 @@ async def run_binding_tests(
|
||||
|
||||
try:
|
||||
await run_test_cycles()
|
||||
except:
|
||||
except Exception:
|
||||
logger.exception("Error found during the binding test")
|
||||
raise
|
||||
finally:
|
||||
|
||||
@@ -125,11 +125,11 @@ class SerializableObjectLibrary:
|
||||
classes = sorted(list(self._library[path].keys()))
|
||||
for class_ in classes:
|
||||
stream.write(f"### `{class_}`\n")
|
||||
stream.write(f"#### Member variables\n")
|
||||
stream.write("#### Member variables\n")
|
||||
for index, item in enumerate(self._library[path][class_]["fields"]):
|
||||
# NOTE index starts with 0
|
||||
stream.write(f"{index + 1}. `{item['name']}`: `{item['type']}`\n")
|
||||
stream.write(f"#### Serialize code\n")
|
||||
stream.write("#### Serialize code\n")
|
||||
stream.write("```c++\n")
|
||||
# This additional `\t` formats the source better.
|
||||
stream.write("\t" + self._library[path][class_]["raw_serialize_code"])
|
||||
|
||||
@@ -201,11 +201,11 @@ html_permalinks = False
|
||||
|
||||
latex_elements = {
|
||||
# The paper size ('letterpaper' or 'a4paper').
|
||||
#'papersize': 'letterpaper',
|
||||
# 'papersize': 'letterpaper',
|
||||
# The font size ('10pt', '11pt' or '12pt').
|
||||
#'pointsize': '10pt',
|
||||
# 'pointsize': '10pt',
|
||||
# Additional stuff for the LaTeX preamble.
|
||||
#'preamble': '',
|
||||
# 'preamble': '',
|
||||
}
|
||||
|
||||
# Grouping the document tree into LaTeX files. List of tuples
|
||||
|
||||
@@ -18,8 +18,6 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
from sphinx.addnodes import toctree
|
||||
|
||||
# This extension cruelly monkey patches sphinx.environment.BuildEnvironment so
|
||||
# that toctree entries can contain relative internal links, using the syntax
|
||||
# Name <relative://relative/path>
|
||||
|
||||
@@ -42,7 +42,7 @@ def getOrValidateAddress(address):
|
||||
s.connect(("www.foundationdb.org", 80))
|
||||
return s.getsockname()[0]
|
||||
except Exception as e:
|
||||
print("ERROR: Could not determine an address")
|
||||
print("ERROR: Could not determine an address: %s" % e)
|
||||
exit(1)
|
||||
else:
|
||||
try:
|
||||
|
||||
@@ -280,7 +280,6 @@ def process_traces(
|
||||
cmake_seed,
|
||||
):
|
||||
res = True
|
||||
backtraces = []
|
||||
parser = None
|
||||
if log_format == "json":
|
||||
parser = JSONParser(
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
from authlib.jose import JsonWebKey, KeySet, jwt
|
||||
from typing import List
|
||||
from typing import List, Union
|
||||
import base64
|
||||
import json
|
||||
import time
|
||||
|
||||
from test_util import random_alphanum_str
|
||||
|
||||
|
||||
def to_str(s: Union[str, bytes]):
|
||||
if isinstance(s, bytes):
|
||||
s = s.decode("utf8")
|
||||
return s
|
||||
|
||||
|
||||
def private_key_gen(kty: str, kid: str):
|
||||
assert kty == "EC" or kty == "RSA"
|
||||
|
||||
@@ -29,13 +29,13 @@ from tmp_cluster import TempCluster
|
||||
|
||||
if __name__ == "__main__":
|
||||
script_desc = """
|
||||
This script automatically configures N temporary local clusters on the machine and then
|
||||
calls a command while these clusters are running. As soon as the command returns, all
|
||||
configured clusters are killed and all generated data is deleted.
|
||||
This script automatically configures N temporary local clusters on the machine and then
|
||||
calls a command while these clusters are running. As soon as the command returns, all
|
||||
configured clusters are killed and all generated data is deleted.
|
||||
|
||||
The purpose of this is to support testing a set of integration tests using multiple clusters
|
||||
(i.e. using the Multi-threaded client).
|
||||
"""
|
||||
The purpose of this is to support testing a set of integration tests using multiple clusters
|
||||
(i.e. using the Multi-threaded client).
|
||||
"""
|
||||
|
||||
parser = CreateTmpFdbClusterArgParser(description=script_desc)
|
||||
|
||||
|
||||
@@ -49,7 +49,6 @@ def main_loop(main_pipe, pipe):
|
||||
pipe.send(TypeError("unexpected type {}".format(type(req))))
|
||||
continue
|
||||
op = req.op
|
||||
args = req.args
|
||||
resp = True
|
||||
try:
|
||||
if op == "configure_client":
|
||||
@@ -71,7 +70,6 @@ def main_loop(main_pipe, pipe):
|
||||
resp = Exception("db not open")
|
||||
else:
|
||||
for tenant in req.args:
|
||||
tenant_str = to_str(tenant)
|
||||
tenant_bytes = to_bytes(tenant)
|
||||
fdb.tenant_management.create_tenant(db, tenant_bytes)
|
||||
elif op == "delete_tenant":
|
||||
@@ -79,7 +77,6 @@ def main_loop(main_pipe, pipe):
|
||||
resp = Exception("db not open")
|
||||
else:
|
||||
for tenant in req.args:
|
||||
tenant_str = to_str(tenant)
|
||||
tenant_bytes = to_bytes(tenant)
|
||||
cleanup_tenant(db, tenant_bytes)
|
||||
elif op == "cleanup_database":
|
||||
@@ -149,7 +146,7 @@ class Server(object):
|
||||
try:
|
||||
self._main_pipe.send(req)
|
||||
resp = self._main_pipe.recv()
|
||||
if resp != True:
|
||||
if not resp:
|
||||
print("{} failed: {}".format(req, resp))
|
||||
raise resp
|
||||
else:
|
||||
|
||||
@@ -94,6 +94,7 @@ special_key_ranges = [
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
# handler for when looping is assumed with usage
|
||||
# e.g. GRV cache enablement removes the guarantee that transaction always gets the latest read version before it starts,
|
||||
# which could introduce arbitrary conflicts even on idle test clusters, and those need to be resolved via retrying.
|
||||
@@ -332,7 +333,7 @@ def test_system_and_special_key_range_disallowed(db, tenant_tr_gen):
|
||||
tr.options.set_access_system_keys()
|
||||
del tr[b"\xff":b"\xff\xff"]
|
||||
tr.commit().wait()
|
||||
assert False, f"disallowed system keyspace write has succeeded"
|
||||
assert False, "disallowed system keyspace write has succeeded"
|
||||
except fdb.FDBError as e:
|
||||
assert e.code == 6000, f"expected permission_denied, got {e} instead"
|
||||
|
||||
@@ -383,11 +384,9 @@ def test_public_key_set_rollover(
|
||||
new_kid = random_alphanum_str(12)
|
||||
new_kty = "EC" if kty == "RSA" else "RSA"
|
||||
new_key = private_key_gen(kty=new_kty, kid=new_kid)
|
||||
token_default = token_gen(cluster.private_key, token_claim_1h(default_tenant))
|
||||
|
||||
second_tenant = random_alphanum_bytes(12)
|
||||
tenant_gen(second_tenant)
|
||||
token_second = token_gen(new_key, token_claim_1h(second_tenant))
|
||||
|
||||
interim_set = public_keyset_from_keys([new_key, cluster.private_key])
|
||||
max_repeat = 10
|
||||
@@ -604,7 +603,7 @@ def test_bad_token(cluster, default_tenant, tenant_tr_gen, token_claim_1h):
|
||||
)
|
||||
print(f"Trace check begin for '{case_name}': {checker.begin}")
|
||||
try:
|
||||
value = tr[b"abc"].value
|
||||
tr[b"abc"].wait()
|
||||
assert (
|
||||
False
|
||||
), f"expected permission_denied for case '{case_name}', but read transaction went through"
|
||||
@@ -654,10 +653,10 @@ def test_bad_token(cluster, default_tenant, tenant_tr_gen, token_claim_1h):
|
||||
tr = tenant_tr_gen(default_tenant)
|
||||
tr.options.set_authorization_token(unknown_key_token)
|
||||
try:
|
||||
value = tr[b"abc"].value
|
||||
tr[b"abc"].wait()
|
||||
assert (
|
||||
False
|
||||
), f"expected permission_denied for 'unknown key' case, but read transaction went through"
|
||||
), "expected permission_denied for 'unknown key' case, but read transaction went through"
|
||||
except fdb.FDBError as e:
|
||||
assert (
|
||||
e.code == 6000
|
||||
@@ -669,7 +668,7 @@ def test_bad_token(cluster, default_tenant, tenant_tr_gen, token_claim_1h):
|
||||
tr.commit().wait()
|
||||
assert (
|
||||
False
|
||||
), f"expected permission_denied for 'unknown key' case, but write transaction went through"
|
||||
), "expected permission_denied for 'unknown key' case, but write transaction went through"
|
||||
except fdb.FDBError as e:
|
||||
assert (
|
||||
e.code == 6000
|
||||
|
||||
@@ -189,7 +189,7 @@ def cluster(
|
||||
):
|
||||
apply_trace_time = float(entry.attrib["Time"])
|
||||
if bad_trace_time is None and ev_type == bad_ev_type:
|
||||
bad_trace_found = float(entry.attrib["Time"])
|
||||
bad_trace_time = float(entry.attrib["Time"])
|
||||
if apply_trace_time is None:
|
||||
pytest.fail(
|
||||
f"failed to find '{keyset_apply_ev_type}' event with >0 public keys"
|
||||
@@ -223,7 +223,7 @@ def cluster(
|
||||
f"{ev_target} trace entry's FromAddr does not have a valid ':tls' suffix: found '{tls_suffix}'"
|
||||
)
|
||||
try:
|
||||
ip = ipaddress.ip_address(client_ip)
|
||||
ipaddress.ip_address(client_ip)
|
||||
except ValueError as e:
|
||||
pytest.fail(
|
||||
f"{ev_target} trace entry's FromAddr '{client_ip}' has an invalid IP format: {e}"
|
||||
|
||||
@@ -75,7 +75,7 @@ def wait_until_tenant_tr_fails(
|
||||
)
|
||||
try:
|
||||
if not read_blocked:
|
||||
value = tr[b"abc"].value
|
||||
tr[b"abc"].wait()
|
||||
except fdb.FDBError as e:
|
||||
assert e.code == 6000, f"expected permission_denied, got {e} instead"
|
||||
read_blocked = True
|
||||
@@ -109,7 +109,7 @@ def wait_until_tenant_tr_succeeds(
|
||||
time.sleep(delay)
|
||||
tr = tenant_tr_gen(tenant)
|
||||
tr.options.set_authorization_token(token)
|
||||
value = tr[b"abc"].value
|
||||
tr[b"abc"].wait()
|
||||
tr[b"abc"] = b"qwe"
|
||||
tr.commit().wait()
|
||||
break
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
#!/bin/env python2
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import os
|
||||
import random
|
||||
|
||||
parser = argparse.ArgumentParser("Run multithreaded client tests")
|
||||
|
||||
@@ -29,9 +32,11 @@ parser.add_argument(
|
||||
default="client-logs",
|
||||
help="Path to write client logs to. The directory will be created if it does not exist.",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
import sys
|
||||
sys.path.append(args.build_dir + "/bindings/python")
|
||||
import fdb
|
||||
|
||||
### sample usage (from inside your FDB build output directory):
|
||||
|
||||
@@ -45,12 +50,6 @@ import sys
|
||||
## This fails (unsupported configuration):
|
||||
# ../tests/loopback_cluster/run_cluster.sh . 3 '../tests/python_tests/multithreaded_client.py loopback-cluster-*/fdb.cluster --threads 2 --skip-so-files'
|
||||
|
||||
sys.path.append(args.build_dir + "/bindings/python")
|
||||
|
||||
import fdb
|
||||
import os
|
||||
import random
|
||||
|
||||
fdb.api_version(630)
|
||||
|
||||
if not os.path.exists(args.client_log_dir):
|
||||
|
||||
Reference in New Issue
Block a user