mirror of
https://github.com/apple/foundationdb.git
synced 2026-01-24 20:08:38 +00:00
Remove Python 2.7 support and six.py (#11418)
This commit is contained in:
2
.flake8
2
.flake8
@@ -3,4 +3,4 @@ ignore = E203, E266, E402, E501, W503, F403, F401, E711, C901, E721, W605
|
||||
max-line-length = 88
|
||||
max-complexity = 18
|
||||
select = B,C,E,F,W,T4,B9
|
||||
exclude = bindings/python/fdb/six.py,contrib/Implib.so/implib-gen.py,documentation/sphinx/extensions/rubydomain.py
|
||||
exclude = contrib/Implib.so/implib-gen.py,documentation/sphinx/extensions/rubydomain.py
|
||||
|
||||
@@ -5,7 +5,6 @@ repos:
|
||||
- id: black
|
||||
exclude: |
|
||||
(?x)^(
|
||||
bindings/python/fdb/six.py|
|
||||
contrib/Implib.so/implib-gen.py|
|
||||
documentation/sphinx/extensions/rubydomain.py
|
||||
)$
|
||||
|
||||
@@ -86,15 +86,6 @@ _java_cmd = "java -ea -cp %s:%s com.apple.foundationdb.test." % (
|
||||
testers = {
|
||||
"python": Tester(
|
||||
"python",
|
||||
"python " + _absolute_path("python/tests/tester.py"),
|
||||
2040,
|
||||
23,
|
||||
MAX_API_VERSION,
|
||||
types=ALL_TYPES,
|
||||
tenants_enabled=True,
|
||||
),
|
||||
"python3": Tester(
|
||||
"python3",
|
||||
"python3 " + _absolute_path("python/tests/tester.py"),
|
||||
2040,
|
||||
23,
|
||||
|
||||
@@ -20,7 +20,6 @@ function scripted() {
|
||||
|
||||
function run_scripted() {
|
||||
scripted python
|
||||
scripted python3
|
||||
scripted ruby
|
||||
scripted java
|
||||
scripted java_async
|
||||
@@ -35,7 +34,6 @@ while `true`; do
|
||||
echo "Pass $i"
|
||||
i=$((i+1))
|
||||
run python
|
||||
run python3
|
||||
run ruby
|
||||
run java
|
||||
run java_async
|
||||
|
||||
@@ -4,7 +4,6 @@ set(SRCS
|
||||
fdb/directory_impl.py
|
||||
fdb/impl.py
|
||||
fdb/locality.py
|
||||
fdb/six.py
|
||||
fdb/subspace_impl.py
|
||||
fdb/tenant_management.py
|
||||
fdb/tuple.py
|
||||
|
||||
@@ -25,7 +25,6 @@ import struct
|
||||
import threading
|
||||
|
||||
from fdb import impl as _impl
|
||||
from fdb import six
|
||||
import fdb.tuple
|
||||
from .subspace_impl import Subspace
|
||||
|
||||
@@ -35,7 +34,7 @@ class AllocatorTransactionState:
|
||||
self.lock = threading.Lock()
|
||||
|
||||
|
||||
class HighContentionAllocator(object):
|
||||
class HighContentionAllocator:
|
||||
def __init__(self, subspace):
|
||||
self.counters = subspace[0]
|
||||
self.recent = subspace[1]
|
||||
@@ -131,7 +130,7 @@ class HighContentionAllocator(object):
|
||||
return 8192
|
||||
|
||||
|
||||
class Directory(object):
|
||||
class Directory:
|
||||
def __init__(self, directory_layer, path=(), layer=b""):
|
||||
self._directory_layer = directory_layer
|
||||
self._path = path
|
||||
@@ -613,17 +612,17 @@ class DirectoryLayer(Directory):
|
||||
|
||||
def _to_unicode_path(path):
|
||||
if isinstance(path, bytes):
|
||||
path = six.text_type(path)
|
||||
path = path.decode()
|
||||
|
||||
if isinstance(path, six.text_type):
|
||||
if isinstance(path, str):
|
||||
return (path,)
|
||||
|
||||
if isinstance(path, tuple):
|
||||
path = list(path)
|
||||
for i, name in enumerate(path):
|
||||
if isinstance(name, bytes):
|
||||
path[i] = six.text_type(path[i])
|
||||
elif not isinstance(name, six.text_type):
|
||||
path[i] = path[i].decode()
|
||||
elif not isinstance(name, str):
|
||||
raise ValueError(
|
||||
"Invalid path: must be a unicode string or a tuple of unicode strings"
|
||||
)
|
||||
|
||||
@@ -35,8 +35,7 @@ import traceback
|
||||
import weakref
|
||||
|
||||
import fdb
|
||||
from fdb import six
|
||||
from fdb.tuple import pack
|
||||
from fdb.tuple import pack, int2byte
|
||||
|
||||
from fdb import fdboptions as _opts
|
||||
|
||||
@@ -137,7 +136,6 @@ def fill_options(scope, predicates=False):
|
||||
elif paramType == type(""):
|
||||
f = option_wrap_string(code)
|
||||
elif paramType == type(b""):
|
||||
# This won't happen in Python 2 because type("") == type(b""), but it will happen in Python 3
|
||||
f = option_wrap_bytes(code)
|
||||
elif paramType == type(0):
|
||||
f = option_wrap_int(code)
|
||||
@@ -2169,4 +2167,4 @@ def strinc(key):
|
||||
if len(key) == 0:
|
||||
raise ValueError("Key must contain at least one byte not equal to 0xFF.")
|
||||
|
||||
return key[:-1] + six.int2byte(ord(key[-1:]) + 1)
|
||||
return key[:-1] + int2byte(ord(key[-1:]) + 1)
|
||||
|
||||
@@ -1,891 +0,0 @@
|
||||
# Copyright (c) 2010-2017 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
from __future__ import absolute_import
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.11.0"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
PY34 = sys.version_info[0:2] >= (3, 4)
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
try:
|
||||
# This is a bit ugly, but it avoids running this again by
|
||||
# removing this descriptor.
|
||||
delattr(obj.__class__, self.name)
|
||||
except AttributeError:
|
||||
pass
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("intern", "__builtin__", "sys"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
|
||||
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
|
||||
MovedAttribute("getoutput", "commands", "subprocess"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
]
|
||||
# Add windows specific modules.
|
||||
if sys.platform == "win32":
|
||||
_moved_attributes += [
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splittag", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splituser", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitvalue", "urllib", "urllib.parse"),
|
||||
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
MovedAttribute("parse_http_list", "urllib2", "urllib.request"),
|
||||
MovedAttribute("parse_keqv_list", "urllib2", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return func
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
def create_unbound_method(func, cls):
|
||||
return types.MethodType(func, None, cls)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
|
||||
viewkeys = operator.methodcaller("keys")
|
||||
|
||||
viewvalues = operator.methodcaller("values")
|
||||
|
||||
viewitems = operator.methodcaller("items")
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return d.iterkeys(**kw)
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return d.itervalues(**kw)
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return d.iteritems(**kw)
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return d.iterlists(**kw)
|
||||
|
||||
viewkeys = operator.methodcaller("viewkeys")
|
||||
|
||||
viewvalues = operator.methodcaller("viewvalues")
|
||||
|
||||
viewitems = operator.methodcaller("viewitems")
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
import struct
|
||||
int2byte = struct.Struct(">B").pack
|
||||
del struct
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
_assertCountEqual = "assertCountEqual"
|
||||
if sys.version_info[1] <= 1:
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
else:
|
||||
_assertRaisesRegex = "assertRaisesRegex"
|
||||
_assertRegex = "assertRegex"
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
iterbytes = functools.partial(itertools.imap, ord)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_assertCountEqual = "assertItemsEqual"
|
||||
_assertRaisesRegex = "assertRaisesRegexp"
|
||||
_assertRegex = "assertRegexpMatches"
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
def assertCountEqual(self, *args, **kwargs):
|
||||
return getattr(self, _assertCountEqual)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRaisesRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
def assertRegex(self, *args, **kwargs):
|
||||
return getattr(self, _assertRegex)(*args, **kwargs)
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
try:
|
||||
if value is None:
|
||||
value = tp()
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
finally:
|
||||
value = None
|
||||
tb = None
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
try:
|
||||
raise tp, value, tb
|
||||
finally:
|
||||
tb = None
|
||||
""")
|
||||
|
||||
|
||||
if sys.version_info[:2] == (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
if from_value is None:
|
||||
raise value
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
elif sys.version_info[:2] > (3, 2):
|
||||
exec_("""def raise_from(value, from_value):
|
||||
try:
|
||||
raise value from from_value
|
||||
finally:
|
||||
value = None
|
||||
""")
|
||||
else:
|
||||
def raise_from(value, from_value):
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
if sys.version_info[:2] < (3, 3):
|
||||
_print = print_
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
fp = kwargs.get("file", sys.stdout)
|
||||
flush = kwargs.pop("flush", False)
|
||||
_print(*args, **kwargs)
|
||||
if flush and fp is not None:
|
||||
fp.flush()
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
|
||||
updated=functools.WRAPPER_UPDATES):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped, assigned, updated)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(type):
|
||||
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
|
||||
@classmethod
|
||||
def __prepare__(cls, name, this_bases):
|
||||
return meta.__prepare__(name, bases)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
|
||||
def python_2_unicode_compatible(klass):
|
||||
"""
|
||||
A decorator that defines __unicode__ and __str__ methods under Python 2.
|
||||
Under Python 3 it does nothing.
|
||||
|
||||
To support Python 2 and 3 with a single code base, define a __str__ method
|
||||
returning text and apply this decorator to the class.
|
||||
"""
|
||||
if PY2:
|
||||
if '__str__' not in klass.__dict__:
|
||||
raise ValueError("@python_2_unicode_compatible cannot be applied "
|
||||
"to %s because it doesn't define __str__()." %
|
||||
klass.__name__)
|
||||
klass.__unicode__ = klass.__str__
|
||||
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
|
||||
return klass
|
||||
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
@@ -24,15 +24,15 @@ import ctypes
|
||||
import uuid
|
||||
import struct
|
||||
import math
|
||||
import sys
|
||||
import functools
|
||||
from bisect import bisect_left
|
||||
|
||||
from fdb import six
|
||||
import fdb
|
||||
|
||||
_size_limits = tuple((1 << (i * 8)) - 1 for i in range(9))
|
||||
|
||||
int2byte = struct.Struct(">B").pack
|
||||
|
||||
# Define type codes:
|
||||
NULL_CODE = 0x00
|
||||
BYTES_CODE = 0x01
|
||||
@@ -65,12 +65,12 @@ def _find_terminator(v, pos):
|
||||
# If encoding and sign bit is 1 (negative), flip all of the bits. Otherwise, just flip sign.
|
||||
# If decoding and sign bit is 0 (negative), flip all of the bits. Otherwise, just flip sign.
|
||||
def _float_adjust(v, encode):
|
||||
if encode and six.indexbytes(v, 0) & 0x80 != 0x00:
|
||||
return b"".join(map(lambda x: six.int2byte(x ^ 0xFF), six.iterbytes(v)))
|
||||
elif not encode and six.indexbytes(v, 0) & 0x80 != 0x80:
|
||||
return b"".join(map(lambda x: six.int2byte(x ^ 0xFF), six.iterbytes(v)))
|
||||
if encode and v[0] & 0x80 != 0x00:
|
||||
return b"".join(map(lambda x: int2byte(x ^ 0xFF), iter(v)))
|
||||
elif not encode and v[0] & 0x80 != 0x80:
|
||||
return b"".join(map(lambda x: int2byte(x ^ 0xFF), iter(v)))
|
||||
else:
|
||||
return six.int2byte(six.indexbytes(v, 0) ^ 0x80) + v[1:]
|
||||
return int2byte(v[0] ^ 0x80) + v[1:]
|
||||
|
||||
|
||||
@functools.total_ordering
|
||||
@@ -81,7 +81,7 @@ class SingleFloat(object):
|
||||
self.value = ctypes.c_float(value).value
|
||||
elif isinstance(value, ctypes.c_float):
|
||||
self.value = value.value
|
||||
elif isinstance(value, six.integer_types):
|
||||
elif isinstance(value, int):
|
||||
self.value = ctypes.c_float(value).value
|
||||
else:
|
||||
raise ValueError(
|
||||
@@ -121,7 +121,7 @@ class Versionstamp(object):
|
||||
LENGTH = 12
|
||||
_TR_VERSION_LEN = 10
|
||||
_MAX_USER_VERSION = (1 << 16) - 1
|
||||
_UNSET_TR_VERSION = 10 * six.int2byte(0xFF)
|
||||
_UNSET_TR_VERSION = 10 * int2byte(0xFF)
|
||||
_STRUCT_FORMAT_STRING = ">" + str(_TR_VERSION_LEN) + "sH"
|
||||
|
||||
@classmethod
|
||||
@@ -145,11 +145,11 @@ class Versionstamp(object):
|
||||
|
||||
@classmethod
|
||||
def validate_user_version(cls, user_version):
|
||||
if not isinstance(user_version, six.integer_types):
|
||||
if not isinstance(user_version, int):
|
||||
raise TypeError(
|
||||
"Local version has illegal type "
|
||||
+ str(type(user_version))
|
||||
+ " (requires integer type)"
|
||||
+ " (requires int)"
|
||||
)
|
||||
elif user_version < 0 or user_version > cls._MAX_USER_VERSION:
|
||||
raise ValueError(
|
||||
@@ -182,9 +182,10 @@ class Versionstamp(object):
|
||||
tr_version = v[start : start + cls._TR_VERSION_LEN]
|
||||
if tr_version == cls._UNSET_TR_VERSION:
|
||||
tr_version = None
|
||||
user_version = six.indexbytes(v, start + cls._TR_VERSION_LEN) * (
|
||||
1 << 8
|
||||
) + six.indexbytes(v, start + cls._TR_VERSION_LEN + 1)
|
||||
user_version = (
|
||||
v[start + cls._TR_VERSION_LEN] * (1 << 8)
|
||||
+ v[start + cls._TR_VERSION_LEN + 1]
|
||||
)
|
||||
return Versionstamp(tr_version, user_version)
|
||||
|
||||
def is_complete(self):
|
||||
@@ -262,7 +263,7 @@ class Versionstamp(object):
|
||||
|
||||
|
||||
def _decode(v, pos):
|
||||
code = six.indexbytes(v, pos)
|
||||
code = v[pos]
|
||||
if code == NULL_CODE:
|
||||
return None, pos + 1
|
||||
elif code == BYTES_CODE:
|
||||
@@ -284,18 +285,18 @@ def _decode(v, pos):
|
||||
end,
|
||||
)
|
||||
elif code == POS_INT_END: # 0x1d; Positive 9-255 byte integer
|
||||
length = six.indexbytes(v, pos + 1)
|
||||
length = v[pos + 1]
|
||||
val = 0
|
||||
for i in _range(length):
|
||||
val = val << 8
|
||||
val += six.indexbytes(v, pos + 2 + i)
|
||||
val += v[pos + 2 + i]
|
||||
return val, pos + 2 + length
|
||||
elif code == NEG_INT_START: # 0x0b; Negative 9-255 byte integer
|
||||
length = six.indexbytes(v, pos + 1) ^ 0xFF
|
||||
length = v[pos + 1] ^ 0xFF
|
||||
val = 0
|
||||
for i in _range(length):
|
||||
val = val << 8
|
||||
val += six.indexbytes(v, pos + 2 + i)
|
||||
val += v[pos + 2 + i]
|
||||
return val - (1 << (length * 8)) + 1, pos + 2 + length
|
||||
elif code == FLOAT_CODE:
|
||||
return (
|
||||
@@ -329,8 +330,8 @@ def _decode(v, pos):
|
||||
ret = []
|
||||
end_pos = pos + 1
|
||||
while end_pos < len(v):
|
||||
if six.indexbytes(v, end_pos) == 0x00:
|
||||
if end_pos + 1 < len(v) and six.indexbytes(v, end_pos + 1) == 0xFF:
|
||||
if v[end_pos] == 0x00:
|
||||
if end_pos + 1 < len(v) and v[end_pos + 1] == 0xFF:
|
||||
ret.append(None)
|
||||
end_pos += 2
|
||||
else:
|
||||
@@ -357,17 +358,8 @@ def _reduce_children(child_values):
|
||||
return bytes_list, version_pos
|
||||
|
||||
|
||||
if sys.version_info < (2, 7):
|
||||
|
||||
def _bit_length(x):
|
||||
s = bin(x) # binary representation: bin(-37) --> '-0b100101'
|
||||
s = s.lstrip("-0b") # remove leading zeros and minus sign
|
||||
return len(s)
|
||||
|
||||
else:
|
||||
|
||||
def _bit_length(x):
|
||||
return x.bit_length()
|
||||
def _bit_length(x):
|
||||
return x.bit_length()
|
||||
|
||||
|
||||
def _encode(value, nested=False):
|
||||
@@ -376,85 +368,83 @@ def _encode(value, nested=False):
|
||||
# sorting need to work too!
|
||||
if value == None: # ==, not is, because some fdb.impl.Value are equal to None
|
||||
if nested:
|
||||
return b"".join([six.int2byte(NULL_CODE), six.int2byte(0xFF)]), -1
|
||||
return b"".join([int2byte(NULL_CODE), int2byte(0xFF)]), -1
|
||||
else:
|
||||
return b"".join([six.int2byte(NULL_CODE)]), -1
|
||||
return b"".join([int2byte(NULL_CODE)]), -1
|
||||
elif isinstance(value, bytes): # also gets non-None fdb.impl.Value
|
||||
return (
|
||||
six.int2byte(BYTES_CODE) + value.replace(b"\x00", b"\x00\xFF") + b"\x00",
|
||||
int2byte(BYTES_CODE) + value.replace(b"\x00", b"\x00\xFF") + b"\x00",
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, six.text_type):
|
||||
elif isinstance(value, str):
|
||||
return (
|
||||
six.int2byte(STRING_CODE)
|
||||
int2byte(STRING_CODE)
|
||||
+ value.encode("utf-8").replace(b"\x00", b"\x00\xFF")
|
||||
+ b"\x00",
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, six.integer_types) and (
|
||||
elif isinstance(value, int) and (
|
||||
not isinstance(value, bool) or (hasattr(fdb, "_version") and fdb._version < 500)
|
||||
):
|
||||
if value == 0:
|
||||
return b"".join([six.int2byte(INT_ZERO_CODE)]), -1
|
||||
return b"".join([int2byte(INT_ZERO_CODE)]), -1
|
||||
elif value > 0:
|
||||
if value >= _size_limits[-1]:
|
||||
length = (_bit_length(value) + 7) // 8
|
||||
data = [six.int2byte(POS_INT_END), six.int2byte(length)]
|
||||
data = [int2byte(POS_INT_END), int2byte(length)]
|
||||
for i in _range(length - 1, -1, -1):
|
||||
data.append(six.int2byte((value >> (8 * i)) & 0xFF))
|
||||
data.append(int2byte((value >> (8 * i)) & 0xFF))
|
||||
return b"".join(data), -1
|
||||
|
||||
n = bisect_left(_size_limits, value)
|
||||
return six.int2byte(INT_ZERO_CODE + n) + struct.pack(">Q", value)[-n:], -1
|
||||
return int2byte(INT_ZERO_CODE + n) + struct.pack(">Q", value)[-n:], -1
|
||||
else:
|
||||
if -value >= _size_limits[-1]:
|
||||
length = (_bit_length(value) + 7) // 8
|
||||
value += (1 << (length * 8)) - 1
|
||||
data = [six.int2byte(NEG_INT_START), six.int2byte(length ^ 0xFF)]
|
||||
data = [int2byte(NEG_INT_START), int2byte(length ^ 0xFF)]
|
||||
for i in _range(length - 1, -1, -1):
|
||||
data.append(six.int2byte((value >> (8 * i)) & 0xFF))
|
||||
data.append(int2byte((value >> (8 * i)) & 0xFF))
|
||||
return b"".join(data), -1
|
||||
|
||||
n = bisect_left(_size_limits, -value)
|
||||
maxv = _size_limits[n]
|
||||
return (
|
||||
six.int2byte(INT_ZERO_CODE - n) + struct.pack(">Q", maxv + value)[-n:],
|
||||
int2byte(INT_ZERO_CODE - n) + struct.pack(">Q", maxv + value)[-n:],
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, ctypes.c_float) or isinstance(value, SingleFloat):
|
||||
return (
|
||||
six.int2byte(FLOAT_CODE)
|
||||
+ _float_adjust(struct.pack(">f", value.value), True),
|
||||
int2byte(FLOAT_CODE) + _float_adjust(struct.pack(">f", value.value), True),
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, ctypes.c_double):
|
||||
return (
|
||||
six.int2byte(DOUBLE_CODE)
|
||||
+ _float_adjust(struct.pack(">d", value.value), True),
|
||||
int2byte(DOUBLE_CODE) + _float_adjust(struct.pack(">d", value.value), True),
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, float):
|
||||
return (
|
||||
six.int2byte(DOUBLE_CODE) + _float_adjust(struct.pack(">d", value), True),
|
||||
int2byte(DOUBLE_CODE) + _float_adjust(struct.pack(">d", value), True),
|
||||
-1,
|
||||
)
|
||||
elif isinstance(value, uuid.UUID):
|
||||
return six.int2byte(UUID_CODE) + value.bytes, -1
|
||||
return int2byte(UUID_CODE) + value.bytes, -1
|
||||
elif isinstance(value, bool):
|
||||
if value:
|
||||
return b"".join([six.int2byte(TRUE_CODE)]), -1
|
||||
return b"".join([int2byte(TRUE_CODE)]), -1
|
||||
else:
|
||||
return b"".join([six.int2byte(FALSE_CODE)]), -1
|
||||
return b"".join([int2byte(FALSE_CODE)]), -1
|
||||
elif isinstance(value, Versionstamp):
|
||||
version_pos = -1 if value.is_complete() else 1
|
||||
return six.int2byte(VERSIONSTAMP_CODE) + value.to_bytes(), version_pos
|
||||
return int2byte(VERSIONSTAMP_CODE) + value.to_bytes(), version_pos
|
||||
elif isinstance(value, tuple) or isinstance(value, list):
|
||||
child_bytes, version_pos = _reduce_children(
|
||||
map(lambda x: _encode(x, True), value)
|
||||
)
|
||||
new_version_pos = -1 if version_pos < 0 else version_pos + 1
|
||||
return (
|
||||
b"".join([six.int2byte(NESTED_CODE)] + child_bytes + [six.int2byte(0x00)]),
|
||||
b"".join([int2byte(NESTED_CODE)] + child_bytes + [int2byte(0x00)]),
|
||||
new_version_pos,
|
||||
)
|
||||
else:
|
||||
@@ -552,13 +542,13 @@ def _code_for(value):
|
||||
return NULL_CODE
|
||||
elif isinstance(value, bytes):
|
||||
return BYTES_CODE
|
||||
elif isinstance(value, six.text_type):
|
||||
elif isinstance(value, str):
|
||||
return STRING_CODE
|
||||
elif (not hasattr(fdb, "_version") or fdb._version >= 500) and isinstance(
|
||||
value, bool
|
||||
):
|
||||
return FALSE_CODE
|
||||
elif isinstance(value, six.integer_types):
|
||||
elif isinstance(value, int):
|
||||
return INT_ZERO_CODE
|
||||
elif isinstance(value, ctypes.c_float) or isinstance(value, SingleFloat):
|
||||
return FLOAT_CODE
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
[pycodestyle]
|
||||
max-line-length = 150
|
||||
exclude = six.py, fdboptions.py
|
||||
exclude = fdboptions.py
|
||||
ignore = E266, E402, E711, E712, E721, E722, W503, W504
|
||||
|
||||
@@ -18,23 +18,18 @@
|
||||
# limitations under the License.
|
||||
#
|
||||
|
||||
import traceback
|
||||
import sys
|
||||
|
||||
import fdb
|
||||
import fdb.directory_impl
|
||||
|
||||
from fdb import six
|
||||
|
||||
ops_that_create_dirs = [
|
||||
six.u("DIRECTORY_CREATE_SUBSPACE"),
|
||||
six.u("DIRECTORY_CREATE_LAYER"),
|
||||
six.u("DIRECTORY_CREATE_OR_OPEN"),
|
||||
six.u("DIRECTORY_CREATE"),
|
||||
six.u("DIRECTORY_OPEN"),
|
||||
six.u("DIRECTORY_MOVE"),
|
||||
six.u("DIRECTORY_MOVE_TO"),
|
||||
six.u("DIRECTORY_OPEN_SUBSPACE"),
|
||||
"DIRECTORY_CREATE_SUBSPACE",
|
||||
"DIRECTORY_CREATE_LAYER",
|
||||
"DIRECTORY_CREATE_OR_OPEN",
|
||||
"DIRECTORY_CREATE",
|
||||
"DIRECTORY_OPEN",
|
||||
"DIRECTORY_MOVE",
|
||||
"DIRECTORY_MOVE_TO",
|
||||
"DIRECTORY_OPEN_SUBSPACE",
|
||||
]
|
||||
|
||||
log_all = False
|
||||
@@ -83,12 +78,12 @@ class DirectoryExtension:
|
||||
print("%d. %s" % (inst.index, inst.op))
|
||||
|
||||
directory = self.dir_list[self.dir_index]
|
||||
if inst.op == six.u("DIRECTORY_CREATE_SUBSPACE"):
|
||||
if inst.op == "DIRECTORY_CREATE_SUBSPACE":
|
||||
path = self.pop_tuples(inst.stack)
|
||||
raw_prefix = inst.pop()
|
||||
log_op("created subspace at %r: %r" % (path, raw_prefix))
|
||||
self.append_dir(inst, fdb.Subspace(path, raw_prefix))
|
||||
elif inst.op == six.u("DIRECTORY_CREATE_LAYER"):
|
||||
elif inst.op == "DIRECTORY_CREATE_LAYER":
|
||||
index1, index2, allow_manual_prefixes = inst.pop(3)
|
||||
if self.dir_list[index1] is None or self.dir_list[index2] is None:
|
||||
log_op("create directory layer: None")
|
||||
@@ -112,7 +107,7 @@ class DirectoryExtension:
|
||||
allow_manual_prefixes == 1,
|
||||
),
|
||||
)
|
||||
elif inst.op == six.u("DIRECTORY_CHANGE"):
|
||||
elif inst.op == "DIRECTORY_CHANGE":
|
||||
self.dir_index = inst.pop()
|
||||
if not self.dir_list[self.dir_index]:
|
||||
self.dir_index = self.error_index
|
||||
@@ -126,9 +121,9 @@ class DirectoryExtension:
|
||||
"changed directory to %d (%s @%r)"
|
||||
% (self.dir_index, clazz, new_path)
|
||||
)
|
||||
elif inst.op == six.u("DIRECTORY_SET_ERROR_INDEX"):
|
||||
elif inst.op == "DIRECTORY_SET_ERROR_INDEX":
|
||||
self.error_index = inst.pop()
|
||||
elif inst.op == six.u("DIRECTORY_CREATE_OR_OPEN"):
|
||||
elif inst.op == "DIRECTORY_CREATE_OR_OPEN":
|
||||
path = self.pop_tuples(inst.stack)
|
||||
layer = inst.pop()
|
||||
log_op(
|
||||
@@ -136,7 +131,7 @@ class DirectoryExtension:
|
||||
)
|
||||
d = directory.create_or_open(inst.tr, path, layer or b"")
|
||||
self.append_dir(inst, d)
|
||||
elif inst.op == six.u("DIRECTORY_CREATE"):
|
||||
elif inst.op == "DIRECTORY_CREATE":
|
||||
path = self.pop_tuples(inst.stack)
|
||||
layer, prefix = inst.pop(2)
|
||||
log_op(
|
||||
@@ -146,23 +141,23 @@ class DirectoryExtension:
|
||||
self.append_dir(
|
||||
inst, directory.create(inst.tr, path, layer or b"", prefix)
|
||||
)
|
||||
elif inst.op == six.u("DIRECTORY_OPEN"):
|
||||
elif inst.op == "DIRECTORY_OPEN":
|
||||
path = self.pop_tuples(inst.stack)
|
||||
layer = inst.pop()
|
||||
log_op("open %r: layer=%r" % (directory.get_path() + path, layer))
|
||||
self.append_dir(inst, directory.open(inst.tr, path, layer or b""))
|
||||
elif inst.op == six.u("DIRECTORY_MOVE"):
|
||||
elif inst.op == "DIRECTORY_MOVE":
|
||||
old_path, new_path = self.pop_tuples(inst.stack, 2)
|
||||
log_op(
|
||||
"move %r to %r"
|
||||
% (directory.get_path() + old_path, directory.get_path() + new_path)
|
||||
)
|
||||
self.append_dir(inst, directory.move(inst.tr, old_path, new_path))
|
||||
elif inst.op == six.u("DIRECTORY_MOVE_TO"):
|
||||
elif inst.op == "DIRECTORY_MOVE_TO":
|
||||
new_absolute_path = self.pop_tuples(inst.stack)
|
||||
log_op("move %r to %r" % (directory.get_path(), new_absolute_path))
|
||||
self.append_dir(inst, directory.move_to(inst.tr, new_absolute_path))
|
||||
elif inst.op == six.u("DIRECTORY_REMOVE"):
|
||||
elif inst.op == "DIRECTORY_REMOVE":
|
||||
count = inst.pop()
|
||||
if count == 0:
|
||||
log_op("remove %r" % (directory.get_path(),))
|
||||
@@ -171,7 +166,7 @@ class DirectoryExtension:
|
||||
path = self.pop_tuples(inst.stack)
|
||||
log_op("remove %r" % (directory.get_path() + path,))
|
||||
directory.remove(inst.tr, path)
|
||||
elif inst.op == six.u("DIRECTORY_REMOVE_IF_EXISTS"):
|
||||
elif inst.op == "DIRECTORY_REMOVE_IF_EXISTS":
|
||||
count = inst.pop()
|
||||
if count == 0:
|
||||
log_op("remove_if_exists %r" % (directory.get_path(),))
|
||||
@@ -180,7 +175,7 @@ class DirectoryExtension:
|
||||
path = self.pop_tuples(inst.stack)
|
||||
log_op("remove_if_exists %r" % (directory.get_path() + path,))
|
||||
directory.remove_if_exists(inst.tr, path)
|
||||
elif inst.op == six.u("DIRECTORY_LIST"):
|
||||
elif inst.op == "DIRECTORY_LIST":
|
||||
count = inst.pop()
|
||||
if count == 0:
|
||||
result = directory.list(inst.tr)
|
||||
@@ -191,7 +186,7 @@ class DirectoryExtension:
|
||||
log_op("list %r" % (directory.get_path() + path,))
|
||||
|
||||
inst.push(fdb.tuple.pack(tuple(result)))
|
||||
elif inst.op == six.u("DIRECTORY_EXISTS"):
|
||||
elif inst.op == "DIRECTORY_EXISTS":
|
||||
count = inst.pop()
|
||||
if count == 0:
|
||||
result = directory.exists(inst.tr)
|
||||
@@ -205,10 +200,10 @@ class DirectoryExtension:
|
||||
inst.push(1)
|
||||
else:
|
||||
inst.push(0)
|
||||
elif inst.op == six.u("DIRECTORY_PACK_KEY"):
|
||||
elif inst.op == "DIRECTORY_PACK_KEY":
|
||||
key_tuple = self.pop_tuples(inst.stack)
|
||||
inst.push(directory.pack(key_tuple))
|
||||
elif inst.op == six.u("DIRECTORY_UNPACK_KEY"):
|
||||
elif inst.op == "DIRECTORY_UNPACK_KEY":
|
||||
key = inst.pop()
|
||||
log_op(
|
||||
"unpack %r in subspace with prefix %r" % (key, directory.rawPrefix)
|
||||
@@ -216,26 +211,26 @@ class DirectoryExtension:
|
||||
tup = directory.unpack(key)
|
||||
for t in tup:
|
||||
inst.push(t)
|
||||
elif inst.op == six.u("DIRECTORY_RANGE"):
|
||||
elif inst.op == "DIRECTORY_RANGE":
|
||||
tup = self.pop_tuples(inst.stack)
|
||||
rng = directory.range(tup)
|
||||
inst.push(rng.start)
|
||||
inst.push(rng.stop)
|
||||
elif inst.op == six.u("DIRECTORY_CONTAINS"):
|
||||
elif inst.op == "DIRECTORY_CONTAINS":
|
||||
key = inst.pop()
|
||||
result = directory.contains(key)
|
||||
if result:
|
||||
inst.push(1)
|
||||
else:
|
||||
inst.push(0)
|
||||
elif inst.op == six.u("DIRECTORY_OPEN_SUBSPACE"):
|
||||
elif inst.op == "DIRECTORY_OPEN_SUBSPACE":
|
||||
path = self.pop_tuples(inst.stack)
|
||||
log_op("open_subspace %r (at %r)" % (path, directory.key()))
|
||||
self.append_dir(inst, directory.subspace(path))
|
||||
elif inst.op == six.u("DIRECTORY_LOG_SUBSPACE"):
|
||||
elif inst.op == "DIRECTORY_LOG_SUBSPACE":
|
||||
prefix = inst.pop()
|
||||
inst.tr[prefix + fdb.tuple.pack((self.dir_index,))] = directory.key()
|
||||
elif inst.op == six.u("DIRECTORY_LOG_DIRECTORY"):
|
||||
elif inst.op == "DIRECTORY_LOG_DIRECTORY":
|
||||
prefix = inst.pop()
|
||||
exists = directory.exists(inst.tr)
|
||||
if exists:
|
||||
@@ -243,15 +238,11 @@ class DirectoryExtension:
|
||||
else:
|
||||
children = ()
|
||||
logSubspace = fdb.Subspace((self.dir_index,), prefix)
|
||||
inst.tr[logSubspace[six.u("path")]] = fdb.tuple.pack(
|
||||
directory.get_path()
|
||||
)
|
||||
inst.tr[logSubspace[six.u("layer")]] = fdb.tuple.pack(
|
||||
(directory.get_layer(),)
|
||||
)
|
||||
inst.tr[logSubspace[six.u("exists")]] = fdb.tuple.pack((int(exists),))
|
||||
inst.tr[logSubspace[six.u("children")]] = fdb.tuple.pack(children)
|
||||
elif inst.op == six.u("DIRECTORY_STRIP_PREFIX"):
|
||||
inst.tr[logSubspace["path"]] = fdb.tuple.pack(directory.get_path())
|
||||
inst.tr[logSubspace["layer"]] = fdb.tuple.pack((directory.get_layer(),))
|
||||
inst.tr[logSubspace["exists"]] = fdb.tuple.pack((int(exists),))
|
||||
inst.tr[logSubspace["children"]] = fdb.tuple.pack(children)
|
||||
elif inst.op == "DIRECTORY_STRIP_PREFIX":
|
||||
s = inst.pop()
|
||||
if not s.startswith(directory.key()):
|
||||
raise Exception(
|
||||
|
||||
@@ -34,7 +34,6 @@ fdb.api_version(int(sys.argv[2]))
|
||||
|
||||
import fdb.tuple
|
||||
from directory_extension import DirectoryExtension
|
||||
from fdb import six
|
||||
from fdb.impl import strinc
|
||||
from unit_tests import run_unit_tests
|
||||
|
||||
@@ -186,9 +185,9 @@ class Tester:
|
||||
# if op != "PUSH" and op != "SWAP":
|
||||
# print("%d. Instruction is %s" % (idx, op))
|
||||
|
||||
isDatabase = op.endswith(six.u("_DATABASE"))
|
||||
isTenant = op.endswith(six.u("_TENANT"))
|
||||
isSnapshot = op.endswith(six.u("_SNAPSHOT"))
|
||||
isDatabase = op.endswith("_DATABASE")
|
||||
isTenant = op.endswith("_TENANT")
|
||||
isSnapshot = op.endswith("_SNAPSHOT")
|
||||
|
||||
if isDatabase:
|
||||
op = op[:-9]
|
||||
@@ -207,33 +206,33 @@ class Tester:
|
||||
)
|
||||
|
||||
try:
|
||||
if inst.op == six.u("PUSH"):
|
||||
if inst.op == "PUSH":
|
||||
inst.push(op_tuple[1])
|
||||
elif inst.op == six.u("DUP"):
|
||||
elif inst.op == "DUP":
|
||||
inst.stack.push(*self.stack[0])
|
||||
elif inst.op == six.u("EMPTY_STACK"):
|
||||
elif inst.op == "EMPTY_STACK":
|
||||
self.stack = Stack()
|
||||
elif inst.op == six.u("SWAP"):
|
||||
elif inst.op == "SWAP":
|
||||
idx = inst.pop()
|
||||
self.stack[0], self.stack[idx] = self.stack[idx], self.stack[0]
|
||||
elif inst.op == six.u("POP"):
|
||||
elif inst.op == "POP":
|
||||
inst.pop()
|
||||
elif inst.op == six.u("SUB"):
|
||||
elif inst.op == "SUB":
|
||||
a, b = inst.pop(2)
|
||||
inst.push(a - b)
|
||||
elif inst.op == six.u("CONCAT"):
|
||||
elif inst.op == "CONCAT":
|
||||
a, b = inst.pop(2)
|
||||
inst.push(a + b)
|
||||
elif inst.op == six.u("WAIT_FUTURE"):
|
||||
elif inst.op == "WAIT_FUTURE":
|
||||
old_idx, item = inst.pop(with_idx=True)
|
||||
inst.stack.push(old_idx, item)
|
||||
elif inst.op == six.u("NEW_TRANSACTION"):
|
||||
elif inst.op == "NEW_TRANSACTION":
|
||||
self.new_transaction()
|
||||
elif inst.op == six.u("USE_TRANSACTION"):
|
||||
elif inst.op == "USE_TRANSACTION":
|
||||
self.switch_transaction(inst.pop())
|
||||
elif inst.op == six.u("ON_ERROR"):
|
||||
elif inst.op == "ON_ERROR":
|
||||
inst.push(inst.tr.on_error(inst.pop()))
|
||||
elif inst.op == six.u("GET"):
|
||||
elif inst.op == "GET":
|
||||
key = inst.pop()
|
||||
num = random.randint(0, 2)
|
||||
if num == 0:
|
||||
@@ -247,15 +246,15 @@ class Tester:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
else:
|
||||
inst.push(f)
|
||||
elif inst.op == six.u("GET_ESTIMATED_RANGE_SIZE"):
|
||||
elif inst.op == "GET_ESTIMATED_RANGE_SIZE":
|
||||
begin, end = inst.pop(2)
|
||||
obj.get_estimated_range_size_bytes(begin, end).wait()
|
||||
inst.push(b"GOT_ESTIMATED_RANGE_SIZE")
|
||||
elif inst.op == six.u("GET_RANGE_SPLIT_POINTS"):
|
||||
elif inst.op == "GET_RANGE_SPLIT_POINTS":
|
||||
begin, end, chunkSize = inst.pop(3)
|
||||
obj.get_range_split_points(begin, end, chunkSize).wait()
|
||||
inst.push(b"GOT_RANGE_SPLIT_POINTS")
|
||||
elif inst.op == six.u("GET_KEY"):
|
||||
elif inst.op == "GET_KEY":
|
||||
key, or_equal, offset, prefix = inst.pop(4)
|
||||
result = obj.get_key(fdb.KeySelector(key, or_equal, offset))
|
||||
if result.startswith(prefix):
|
||||
@@ -265,7 +264,7 @@ class Tester:
|
||||
else:
|
||||
inst.push(strinc(prefix))
|
||||
|
||||
elif inst.op == six.u("GET_RANGE"):
|
||||
elif inst.op == "GET_RANGE":
|
||||
begin, end, limit, reverse, mode = inst.pop(5)
|
||||
if limit == 0 and mode == -1 and random.random() < 0.5:
|
||||
if reverse:
|
||||
@@ -276,12 +275,12 @@ class Tester:
|
||||
r = obj.get_range(begin, end, limit, reverse, mode)
|
||||
|
||||
self.push_range(inst, r)
|
||||
elif inst.op == six.u("GET_RANGE_STARTS_WITH"):
|
||||
elif inst.op == "GET_RANGE_STARTS_WITH":
|
||||
prefix, limit, reverse, mode = inst.pop(4)
|
||||
self.push_range(
|
||||
inst, obj.get_range_startswith(prefix, limit, reverse, mode)
|
||||
)
|
||||
elif inst.op == six.u("GET_RANGE_SELECTOR"):
|
||||
elif inst.op == "GET_RANGE_SELECTOR":
|
||||
(
|
||||
begin_key,
|
||||
begin_or_equal,
|
||||
@@ -305,10 +304,10 @@ class Tester:
|
||||
r = obj.get_range(beginSel, endSel, limit, reverse, mode)
|
||||
|
||||
self.push_range(inst, r, prefix_filter=prefix)
|
||||
elif inst.op == six.u("GET_READ_VERSION"):
|
||||
elif inst.op == "GET_READ_VERSION":
|
||||
self.last_version = obj.get_read_version().wait()
|
||||
inst.push(b"GOT_READ_VERSION")
|
||||
elif inst.op == six.u("SET"):
|
||||
elif inst.op == "SET":
|
||||
key, value = inst.pop(2)
|
||||
if random.random() < 0.5:
|
||||
obj[key] = value
|
||||
@@ -317,7 +316,7 @@ class Tester:
|
||||
|
||||
if isDatabase or isTenant:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("LOG_STACK"):
|
||||
elif inst.op == "LOG_STACK":
|
||||
prefix = inst.pop()
|
||||
entries = {}
|
||||
while len(self.stack) > 0:
|
||||
@@ -328,15 +327,15 @@ class Tester:
|
||||
entries = {}
|
||||
|
||||
self.log_stack(self.db, prefix, entries)
|
||||
elif inst.op == six.u("ATOMIC_OP"):
|
||||
elif inst.op == "ATOMIC_OP":
|
||||
opType, key, value = inst.pop(3)
|
||||
getattr(obj, opType.lower())(key, value)
|
||||
|
||||
if isDatabase or isTenant:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("SET_READ_VERSION"):
|
||||
elif inst.op == "SET_READ_VERSION":
|
||||
inst.tr.set_read_version(self.last_version)
|
||||
elif inst.op == six.u("CLEAR"):
|
||||
elif inst.op == "CLEAR":
|
||||
if random.random() < 0.5:
|
||||
del obj[inst.pop()]
|
||||
else:
|
||||
@@ -344,7 +343,7 @@ class Tester:
|
||||
|
||||
if isDatabase or isTenant:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("CLEAR_RANGE"):
|
||||
elif inst.op == "CLEAR_RANGE":
|
||||
begin, end = inst.pop(2)
|
||||
num = random.randint(0, 2)
|
||||
if num == 0:
|
||||
@@ -356,43 +355,43 @@ class Tester:
|
||||
|
||||
if isDatabase or isTenant:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("CLEAR_RANGE_STARTS_WITH"):
|
||||
elif inst.op == "CLEAR_RANGE_STARTS_WITH":
|
||||
obj.clear_range_startswith(inst.pop())
|
||||
if isDatabase or isTenant:
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("READ_CONFLICT_RANGE"):
|
||||
elif inst.op == "READ_CONFLICT_RANGE":
|
||||
inst.tr.add_read_conflict_range(inst.pop(), inst.pop())
|
||||
inst.push(b"SET_CONFLICT_RANGE")
|
||||
elif inst.op == six.u("WRITE_CONFLICT_RANGE"):
|
||||
elif inst.op == "WRITE_CONFLICT_RANGE":
|
||||
inst.tr.add_write_conflict_range(inst.pop(), inst.pop())
|
||||
inst.push(b"SET_CONFLICT_RANGE")
|
||||
elif inst.op == six.u("READ_CONFLICT_KEY"):
|
||||
elif inst.op == "READ_CONFLICT_KEY":
|
||||
inst.tr.add_read_conflict_key(inst.pop())
|
||||
inst.push(b"SET_CONFLICT_KEY")
|
||||
elif inst.op == six.u("WRITE_CONFLICT_KEY"):
|
||||
elif inst.op == "WRITE_CONFLICT_KEY":
|
||||
inst.tr.add_write_conflict_key(inst.pop())
|
||||
inst.push(b"SET_CONFLICT_KEY")
|
||||
elif inst.op == six.u("DISABLE_WRITE_CONFLICT"):
|
||||
elif inst.op == "DISABLE_WRITE_CONFLICT":
|
||||
inst.tr.options.set_next_write_no_write_conflict_range()
|
||||
elif inst.op == six.u("COMMIT"):
|
||||
elif inst.op == "COMMIT":
|
||||
inst.push(inst.tr.commit())
|
||||
elif inst.op == six.u("RESET"):
|
||||
elif inst.op == "RESET":
|
||||
inst.tr.reset()
|
||||
elif inst.op == six.u("CANCEL"):
|
||||
elif inst.op == "CANCEL":
|
||||
inst.tr.cancel()
|
||||
elif inst.op == six.u("GET_COMMITTED_VERSION"):
|
||||
elif inst.op == "GET_COMMITTED_VERSION":
|
||||
self.last_version = inst.tr.get_committed_version()
|
||||
inst.push(b"GOT_COMMITTED_VERSION")
|
||||
elif inst.op == six.u("GET_APPROXIMATE_SIZE"):
|
||||
elif inst.op == "GET_APPROXIMATE_SIZE":
|
||||
inst.tr.get_approximate_size().wait()
|
||||
inst.push(b"GOT_APPROXIMATE_SIZE")
|
||||
elif inst.op == six.u("GET_VERSIONSTAMP"):
|
||||
elif inst.op == "GET_VERSIONSTAMP":
|
||||
inst.push(inst.tr.get_versionstamp())
|
||||
elif inst.op == six.u("TUPLE_PACK"):
|
||||
elif inst.op == "TUPLE_PACK":
|
||||
count = inst.pop()
|
||||
items = inst.pop(count)
|
||||
inst.push(fdb.tuple.pack(tuple(items)))
|
||||
elif inst.op == six.u("TUPLE_PACK_WITH_VERSIONSTAMP"):
|
||||
elif inst.op == "TUPLE_PACK_WITH_VERSIONSTAMP":
|
||||
prefix = inst.pop()
|
||||
count = inst.pop()
|
||||
items = inst.pop(count)
|
||||
@@ -413,26 +412,23 @@ class Tester:
|
||||
inst.push(b"ERROR: NONE")
|
||||
else:
|
||||
inst.push(b"ERROR: MULTIPLE")
|
||||
elif inst.op == six.u("TUPLE_UNPACK"):
|
||||
elif inst.op == "TUPLE_UNPACK":
|
||||
for i in fdb.tuple.unpack(inst.pop()):
|
||||
inst.push(fdb.tuple.pack((i,)))
|
||||
elif inst.op == six.u("TUPLE_SORT"):
|
||||
elif inst.op == "TUPLE_SORT":
|
||||
count = inst.pop()
|
||||
items = inst.pop(count)
|
||||
unpacked = map(fdb.tuple.unpack, items)
|
||||
if six.PY3:
|
||||
sorted_items = sorted(unpacked, key=fdb.tuple.pack)
|
||||
else:
|
||||
sorted_items = sorted(unpacked, cmp=fdb.tuple.compare)
|
||||
sorted_items = sorted(unpacked, key=fdb.tuple.pack)
|
||||
for item in sorted_items:
|
||||
inst.push(fdb.tuple.pack(item))
|
||||
elif inst.op == six.u("TUPLE_RANGE"):
|
||||
elif inst.op == "TUPLE_RANGE":
|
||||
count = inst.pop()
|
||||
items = inst.pop(count)
|
||||
r = fdb.tuple.range(tuple(items))
|
||||
inst.push(r.start)
|
||||
inst.push(r.stop)
|
||||
elif inst.op == six.u("ENCODE_FLOAT"):
|
||||
elif inst.op == "ENCODE_FLOAT":
|
||||
f_bytes = inst.pop()
|
||||
f = struct.unpack(">f", f_bytes)[0]
|
||||
if (
|
||||
@@ -443,43 +439,43 @@ class Tester:
|
||||
):
|
||||
f = int(f)
|
||||
inst.push(fdb.tuple.SingleFloat(f))
|
||||
elif inst.op == six.u("ENCODE_DOUBLE"):
|
||||
elif inst.op == "ENCODE_DOUBLE":
|
||||
d_bytes = inst.pop()
|
||||
d = struct.unpack(">d", d_bytes)[0]
|
||||
inst.push(d)
|
||||
elif inst.op == six.u("DECODE_FLOAT"):
|
||||
elif inst.op == "DECODE_FLOAT":
|
||||
f = inst.pop()
|
||||
f_bytes = struct.pack(">f", f.value)
|
||||
inst.push(f_bytes)
|
||||
elif inst.op == six.u("DECODE_DOUBLE"):
|
||||
elif inst.op == "DECODE_DOUBLE":
|
||||
d = inst.pop()
|
||||
d_bytes = struct.pack(">d", d)
|
||||
inst.push(d_bytes)
|
||||
elif inst.op == six.u("START_THREAD"):
|
||||
elif inst.op == "START_THREAD":
|
||||
t = Tester(self.db, inst.pop())
|
||||
thr = threading.Thread(target=t.run)
|
||||
thr.start()
|
||||
self.threads.append(thr)
|
||||
elif inst.op == six.u("WAIT_EMPTY"):
|
||||
elif inst.op == "WAIT_EMPTY":
|
||||
prefix = inst.pop()
|
||||
Tester.wait_empty(self.db, prefix)
|
||||
inst.push(b"WAITED_FOR_EMPTY")
|
||||
elif inst.op == six.u("TENANT_CREATE"):
|
||||
elif inst.op == "TENANT_CREATE":
|
||||
name = inst.pop()
|
||||
fdb.tenant_management.create_tenant(self.db, name)
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("TENANT_DELETE"):
|
||||
elif inst.op == "TENANT_DELETE":
|
||||
name = inst.pop()
|
||||
fdb.tenant_management.delete_tenant(self.db, name)
|
||||
inst.push(b"RESULT_NOT_PRESENT")
|
||||
elif inst.op == six.u("TENANT_SET_ACTIVE"):
|
||||
elif inst.op == "TENANT_SET_ACTIVE":
|
||||
name = inst.pop()
|
||||
self.tenant = self.db.open_tenant(name)
|
||||
self.tenant.get_id().wait()
|
||||
inst.push(b"SET_ACTIVE_TENANT")
|
||||
elif inst.op == six.u("TENANT_CLEAR_ACTIVE"):
|
||||
elif inst.op == "TENANT_CLEAR_ACTIVE":
|
||||
self.tenant = None
|
||||
elif inst.op == six.u("TENANT_LIST"):
|
||||
elif inst.op == "TENANT_LIST":
|
||||
begin, end, limit = inst.pop(3)
|
||||
tenant_list = fdb.tenant_management.list_tenants(
|
||||
self.db, begin, end, limit
|
||||
@@ -494,15 +490,15 @@ class Tester:
|
||||
except (json.decoder.JSONDecodeError, KeyError):
|
||||
assert False, "Invalid Tenant Metadata"
|
||||
inst.push(fdb.tuple.pack(tuple(result)))
|
||||
elif inst.op == six.u("TENANT_GET_ID"):
|
||||
elif inst.op == "TENANT_GET_ID":
|
||||
if self.tenant != None:
|
||||
self.tenant.get_id().wait()
|
||||
inst.push(b"GOT_TENANT_ID")
|
||||
else:
|
||||
inst.push(b"NO_ACTIVE_TENANT")
|
||||
elif inst.op == six.u("UNIT_TESTS"):
|
||||
elif inst.op == "UNIT_TESTS":
|
||||
run_unit_tests(db)
|
||||
elif inst.op.startswith(six.u("DIRECTORY_")):
|
||||
elif inst.op.startswith("DIRECTORY_"):
|
||||
self.directory_extension.process_instruction(inst)
|
||||
else:
|
||||
raise Exception("Unknown op %s" % inst.op)
|
||||
|
||||
@@ -21,25 +21,20 @@
|
||||
|
||||
|
||||
import ctypes
|
||||
import sys
|
||||
import random
|
||||
import struct
|
||||
import unicodedata
|
||||
import math
|
||||
import uuid
|
||||
|
||||
import fdb.tuple
|
||||
from fdb.tuple import pack, unpack, compare, SingleFloat
|
||||
|
||||
from fdb import six
|
||||
from fdb.six import u
|
||||
from fdb.tuple import pack, unpack, compare, int2byte, SingleFloat
|
||||
|
||||
|
||||
def randomUnicode():
|
||||
while True:
|
||||
c = random.randint(0, 0xFFFF)
|
||||
if unicodedata.category(six.unichr(c))[0] in "LMNPSZ":
|
||||
return six.unichr(c)
|
||||
if unicodedata.category(chr(c))[0] in "LMNPSZ":
|
||||
return chr(c)
|
||||
|
||||
|
||||
def randomElement():
|
||||
@@ -50,31 +45,26 @@ def randomElement():
|
||||
return b"".join([random.choice(chars) for c in range(random.randint(0, 5))])
|
||||
else:
|
||||
return b"".join(
|
||||
[
|
||||
six.int2byte(random.randint(0, 255))
|
||||
for _ in range(random.randint(0, 10))
|
||||
]
|
||||
[int2byte(random.randint(0, 255)) for _ in range(random.randint(0, 10))]
|
||||
)
|
||||
elif r == 1:
|
||||
if random.random() < 0.5:
|
||||
chars = [
|
||||
u("\x00"),
|
||||
u("\x01"),
|
||||
u("a"),
|
||||
u("7"),
|
||||
u("\xfe"),
|
||||
u("\ff"),
|
||||
u("\u0000"),
|
||||
u("\u0001"),
|
||||
u("\uffff"),
|
||||
u("\uff00"),
|
||||
u("\U0001f4a9"),
|
||||
"\x00",
|
||||
"\x01",
|
||||
"a",
|
||||
"7",
|
||||
"\xfe",
|
||||
"\ff",
|
||||
"\u0000",
|
||||
"\u0001",
|
||||
"\uffff",
|
||||
"\uff00",
|
||||
"\U0001f4a9",
|
||||
]
|
||||
return u("").join(
|
||||
[random.choice(chars) for c in range(random.randint(0, 10))]
|
||||
)
|
||||
return "".join([random.choice(chars) for c in range(random.randint(0, 10))])
|
||||
else:
|
||||
return u("").join([randomUnicode() for _ in range(random.randint(0, 10))])
|
||||
return "".join([randomUnicode() for _ in range(random.randint(0, 10))])
|
||||
elif r == 2:
|
||||
return random.choice([-1, 1]) * min(
|
||||
2 ** random.randint(0, 2040) + random.randint(-10, 10), 2**2040 - 1
|
||||
@@ -96,7 +86,7 @@ def randomElement():
|
||||
elif r == 6:
|
||||
is_double = random.random() < 0.5
|
||||
byte_str = b"".join(
|
||||
[six.int2byte(random.randint(0, 255)) for _ in range(8 if is_double else 4)]
|
||||
[int2byte(random.randint(0, 255)) for _ in range(8 if is_double else 4)]
|
||||
)
|
||||
if is_double:
|
||||
return struct.unpack(">d", byte_str)[0]
|
||||
|
||||
@@ -65,12 +65,12 @@ Python API
|
||||
.. |future-object| replace:: :ref:`Future <api-python-future>` object
|
||||
.. |infrequent| replace:: *Infrequently used*.
|
||||
.. |slice-defaults| replace:: The default slice begin is ``''``; the default slice end is ``'\xFF'``.
|
||||
.. |byte-string| replace:: In Python 2, a byte string is a string of type ``str``. In Python 3, a byte string has type ``bytes``.
|
||||
.. |byte-string| replace:: In Python 3, a byte string has type ``bytes``.
|
||||
|
||||
Installation
|
||||
============
|
||||
|
||||
The FoundationDB Python API is compatible with Python 2.7 - 3.7. You will need to have a Python version within this range on your system before the FoundationDB Python API can be installed. Also please note that Python 3.7 no longer bundles a full copy of libffi, which is used for building the _ctypes module on non-macOS UNIX platforms. Hence, if you are using Python 3.7, you should make sure libffi is already installed on your system.
|
||||
The FoundationDB Python API is compatible with 3.8 and newer versions.
|
||||
|
||||
On macOS, the FoundationDB Python API is installed as part of the FoundationDB installation (see :ref:`installing-client-binaries`). On Ubuntu or RHEL/CentOS, you will need to install the FoundationDB Python API manually via Python's package manager ``pip``:
|
||||
|
||||
@@ -530,7 +530,7 @@ A ``Transaction`` object represents a FoundationDB database transaction. All op
|
||||
|
||||
The most convenient way to use Transactions is using the :func:`@fdb.transactional <transactional>` decorator.
|
||||
|
||||
Keys and values in FoundationDB are byte strings (``str`` in Python 2.x, ``bytes`` in 3.x). To encode other data types, see the :mod:`fdb.tuple` module and :ref:`encoding-data-types`.
|
||||
Keys and values in FoundationDB are byte strings (``bytes`` type in Python 3). To encode other data types, see the :mod:`fdb.tuple` module and :ref:`encoding-data-types`.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
@@ -1157,11 +1157,10 @@ The tuple layer in the FoundationDB Python API supports tuples that contain elem
|
||||
+-----------------------+-------------------------------------------------------------------------------+
|
||||
| Byte string | Any ``value`` such that ``isinstance(value, bytes)`` |
|
||||
+-----------------------+-------------------------------------------------------------------------------+
|
||||
| Unicode string | Any ``value`` such that ``isinstance(value, unicode)`` |
|
||||
| Unicode string | Any ``value`` such that ``isinstance(value, str)`` |
|
||||
+-----------------------+-------------------------------------------------------------------------------+
|
||||
| Integer | Python 2.7: Any ``value`` such that ``isinstance(value, (int,long))`` and |
|
||||
| | ``-2**2040+1 <= value <= 2**2040-1``. Python 3.x: Any ``value`` such that |
|
||||
| | ``isinstance(value, int)`` and ``-2**2040+1 <= value <= 2**2040-1``. |
|
||||
| Integer | Any ``value`` such that ``isinstance(value, int)`` and |
|
||||
| | ``-2**2040+1 <= value <= 2**2040-1``. |
|
||||
+-----------------------+-------------------------------------------------------------------------------+
|
||||
| Floating point number | Any ``value`` such that ``isinstance(value, fdb.tuple.SingleFloat)`` or |
|
||||
| (single-precision) | ``isinstance(value, ctypes.c_float)`` |
|
||||
|
||||
@@ -44,7 +44,7 @@ C
|
||||
|
||||
FoundationDB's C bindings are installed with the FoundationDB client binaries. You can find more details in the :doc:`C API Documentation <api-c>`.
|
||||
|
||||
Python 3.7+
|
||||
Python 3.8+
|
||||
-----------
|
||||
|
||||
Python package is available from `PiPy <https://pypi.org/project/foundationdb/>`_
|
||||
|
||||
@@ -270,7 +270,6 @@
|
||||
<File Id='FDBPY$(var.PyVer)TUPLE' Name='tuple.py' DiskId='1' Source='$(var.PyPath)tuple.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)DIR' Name='directory_impl.py' DiskId='1' Source='$(var.PyPath)directory_impl.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)SUBS' Name='subspace_impl.py' DiskId='1' Source='$(var.PyPath)subspace_impl.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)SIX' Name='six.py' DiskId='1' Source='$(var.PyPath)six.py' KeyPath='no'/>
|
||||
<RemoveFile Id="Purge$(var.PyVer)PYC" Name="*.pyc" On="uninstall" />
|
||||
<RemoveFile Id="Purge$(var.PyVer)PYO" Name="*.pyo" On="uninstall" />
|
||||
<RemoveFile Id="Purge$(var.PyVer)Cache" Directory="PyTarget$(var.PyVer)CacheDir" Name="*.*" On="uninstall" />
|
||||
|
||||
@@ -284,7 +284,6 @@
|
||||
<File Id='FDBPY$(var.PyVer)TUPLE' Name='tuple.py' DiskId='1' Source='$(var.PyPath)tuple.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)DIR' Name='directory_impl.py' DiskId='1' Source='$(var.PyPath)directory_impl.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)SUBS' Name='subspace_impl.py' DiskId='1' Source='$(var.PyPath)subspace_impl.py' KeyPath='no'/>
|
||||
<File Id='FDBPY$(var.PyVer)SIX' Name='six.py' DiskId='1' Source='$(var.PyPath)six.py' KeyPath='no'/>
|
||||
<RemoveFile Id="Purge$(var.PyVer)PYC" Name="*.pyc" On="uninstall" />
|
||||
<RemoveFile Id="Purge$(var.PyVer)PYO" Name="*.pyo" On="uninstall" />
|
||||
<RemoveFile Id="Purge$(var.PyVer)Cache" Directory="PyTarget$(var.PyVer)CacheDir" Name="*.*" On="uninstall" />
|
||||
|
||||
@@ -49,9 +49,6 @@
|
||||
<Content Include="..\..\bindings\python\fdb\impl.py">
|
||||
<Link>Inputs\impl.py</Link>
|
||||
</Content>
|
||||
<Content Include="..\..\bindings\python\fdb\six.py">
|
||||
<Link>Inputs\six.py</Link>
|
||||
</Content>
|
||||
<Content Include="..\..\bindings\python\fdb\tuple.py">
|
||||
<Link>Inputs\tuple.py</Link>
|
||||
</Content>
|
||||
|
||||
Reference in New Issue
Block a user