diff --git a/gremlin/gremlin.py b/gremlin/gremlin.py new file mode 100644 index 0000000..dfd2302 --- /dev/null +++ b/gremlin/gremlin.py @@ -0,0 +1,26 @@ +import mysql.connector + +# Establish a connection to the database +connection = mysql.connector.connect( + host="server1.home", + user="sascha", + password="mgltoJtmmDnKJ86LltsGdw", + database="regulus" +) + +# Create a cursor object to execute SQL queries +db = connection.cursor() + +# Example query +db.execute('SELECT AccountName,Street,City,ZIP FROM Accounts LIMIT 100') + +# Fetch the results +accounts = db.fetchall() + +# Process the results +for account in accounts: + print(account) + +# Close the cursor and connection +db.close() +connection.close() \ No newline at end of file diff --git a/src9/store/.venv/lib/python3.10/site-packages/_mysql_connector.cpython-310-x86_64-linux-gnu.so b/src9/store/.venv/lib/python3.10/site-packages/_mysql_connector.cpython-310-x86_64-linux-gnu.so new file mode 100755 index 0000000..08108db Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/_mysql_connector.cpython-310-x86_64-linux-gnu.so differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..9a03ff5 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__init__.py new file mode 100644 index 0000000..51cc97a --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__init__.py @@ -0,0 +1,127 @@ +# Copyright (c) 2009, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""MySQL Connector/Python - MySQL driver written in Python.""" + +try: + from .connection_cext import CMySQLConnection +except ImportError: + HAVE_CEXT = False +else: + HAVE_CEXT = True + + +from . import version +from .connection import MySQLConnection +from .constants import CharacterSet, ClientFlag, FieldFlag, FieldType, RefreshOption +from .dbapi import ( + BINARY, + DATETIME, + NUMBER, + ROWID, + STRING, + Binary, + Date, + DateFromTicks, + Time, + TimeFromTicks, + Timestamp, + TimestampFromTicks, + apilevel, + paramstyle, + threadsafety, +) +from .errors import ( # pylint: disable=redefined-builtin + DatabaseError, + DataError, + Error, + IntegrityError, + InterfaceError, + InternalError, + NotSupportedError, + OperationalError, + PoolError, + ProgrammingError, + Warning, + custom_error_exception, +) +from .pooling import connect + +Connect = connect + +__version_info__ = version.VERSION +"""This attribute indicates the Connector/Python version as an array +of version components.""" + +__version__ = version.VERSION_TEXT +"""This attribute indicates the Connector/Python version as a string.""" + +__all__ = [ + "MySQLConnection", + "Connect", + "custom_error_exception", + # Some useful constants + "FieldType", + "FieldFlag", + "ClientFlag", + "CharacterSet", + "RefreshOption", + "HAVE_CEXT", + # Error handling + "Error", + "Warning", + "InterfaceError", + "DatabaseError", + "NotSupportedError", + "DataError", + "IntegrityError", + "PoolError", + "ProgrammingError", + "OperationalError", + "InternalError", + # DBAPI PEP 249 required exports + "connect", + "apilevel", + "threadsafety", + "paramstyle", + "Date", + "Time", + "Timestamp", + "Binary", + "DateFromTicks", + "DateFromTicks", + "TimestampFromTicks", + "TimeFromTicks", + "STRING", + "BINARY", + "NUMBER", + "DATETIME", + "ROWID", + # C Extension + "CMySQLConnection", +] diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..310a566 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/abstracts.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/abstracts.cpython-310.pyc new file mode 100644 index 0000000..eb175fc Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/abstracts.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/authentication.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/authentication.cpython-310.pyc new file mode 100644 index 0000000..dec3618 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/authentication.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/charsets.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/charsets.cpython-310.pyc new file mode 100644 index 0000000..31d88a2 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/charsets.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection.cpython-310.pyc new file mode 100644 index 0000000..28d6472 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection_cext.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection_cext.cpython-310.pyc new file mode 100644 index 0000000..dd14c44 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/connection_cext.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/constants.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000..892b95d Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/constants.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/conversion.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/conversion.cpython-310.pyc new file mode 100644 index 0000000..d7e1106 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/conversion.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor.cpython-310.pyc new file mode 100644 index 0000000..327d814 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-310.pyc new file mode 100644 index 0000000..f4cbcda Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/cursor_cext.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/custom_types.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/custom_types.cpython-310.pyc new file mode 100644 index 0000000..f8e7dbb Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/custom_types.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/dbapi.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/dbapi.cpython-310.pyc new file mode 100644 index 0000000..7ec4964 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/dbapi.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errorcode.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errorcode.cpython-310.pyc new file mode 100644 index 0000000..dd31362 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errorcode.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errors.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errors.cpython-310.pyc new file mode 100644 index 0000000..4dd52d1 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/errors.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/logger.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000..3635274 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/logger.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/network.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/network.cpython-310.pyc new file mode 100644 index 0000000..99b8189 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/network.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/optionfiles.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/optionfiles.cpython-310.pyc new file mode 100644 index 0000000..63283bf Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/optionfiles.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/pooling.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/pooling.cpython-310.pyc new file mode 100644 index 0000000..5200cea Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/pooling.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/protocol.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/protocol.cpython-310.pyc new file mode 100644 index 0000000..2a51da6 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/protocol.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/types.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/types.cpython-310.pyc new file mode 100644 index 0000000..6a1fd82 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/types.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/utils.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..1746ef0 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/utils.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/version.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000..9dbe98e Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/__pycache__/version.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/abstracts.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/abstracts.py new file mode 100644 index 0000000..5898ef5 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/abstracts.py @@ -0,0 +1,2600 @@ +# Copyright (c) 2014, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="assignment,attr-defined" + +"""Module gathering all abstract base classes.""" + +from __future__ import annotations + +import os +import re +import warnings +import weakref + +from abc import ABC, abstractmethod +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from inspect import signature +from time import sleep +from types import TracebackType +from typing import ( + Any, + BinaryIO, + Callable, + Dict, + Generator, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +TLS_V1_3_SUPPORTED = False +try: + import ssl + + if hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3: + TLS_V1_3_SUPPORTED = True +except ImportError: + # If import fails, we don't have SSL support. + pass + +from .constants import ( + CONN_ATTRS_DN, + DEFAULT_CONFIGURATION, + DEPRECATED_TLS_VERSIONS, + OPENSSL_CS_NAMES, + TLS_CIPHER_SUITES, + TLS_VERSIONS, + CharacterSet, + ClientFlag, +) +from .conversion import MySQLConverter, MySQLConverterBase +from .errors import ( + DatabaseError, + Error, + InterfaceError, + NotSupportedError, + OperationalError, + ProgrammingError, +) +from .opentelemetry.constants import ( + CONNECTION_SPAN_NAME, + OPTION_CNX_SPAN, + OPTION_CNX_TRACER, + OTEL_ENABLED, +) + +if OTEL_ENABLED: + from .opentelemetry.instrumentation import ( + end_span, + record_exception_event, + set_connection_span_attrs, + trace, + ) + +from .optionfiles import read_option_files +from .types import ( + BinaryProtocolType, + DescriptionType, + HandShakeType, + MySQLConvertibleType, + RowItemType, + RowType, + StrOrBytes, + WarningType, +) +from .utils import GenericWrapper, import_object + +NAMED_TUPLE_CACHE: weakref.WeakValueDictionary[Any, Any] = weakref.WeakValueDictionary() + +DUPLICATED_IN_LIST_ERROR = ( + "The '{list}' list must not contain repeated values, the value " + "'{value}' is duplicated." +) + +TLS_VERSION_ERROR = ( + "The given tls_version: '{}' is not recognized as a valid " + "TLS protocol version (should be one of {})." +) + +TLS_VERSION_DEPRECATED_ERROR = ( + "The given tls_version: '{}' are no longer allowed (should be one of {})." +) + +TLS_VER_NO_SUPPORTED = ( + "No supported TLS protocol version found in the 'tls-versions' list '{}'. " +) + +KRB_SERVICE_PINCIPAL_ERROR = ( + 'Option "krb_service_principal" {error}, must be a string in the form ' + '"primary/instance@realm" e.g "ldap/ldapauth@MYSQL.COM" where "@realm" ' + "is optional and if it is not given will be assumed to belong to the " + "default realm, as configured in the krb5.conf file." +) + +MYSQL_PY_TYPES = ( + Decimal, + bytes, + date, + datetime, + float, + int, + str, + time, + timedelta, +) + + +class CMySQLPrepStmt(GenericWrapper): + """Structure to represent a result from `CMySQLConnection.cmd_stmt_prepare`. + It can be used consistently as a type hint. + + `_mysql_connector.MySQLPrepStmt` isn't available when the C-ext isn't built. + + In this regard, `CmdStmtPrepareResult` acts as a proxy/wrapper entity for a + `_mysql_connector.MySQLPrepStmt` instance. + """ + + +class MySQLConnectionAbstract(ABC): + """Abstract class for classes connecting to a MySQL server.""" + + def __init__(self) -> None: + """Initialize""" + # opentelemetry related + self._tracer: Any = None + self._span: Any = None + self.otel_context_propagation: bool = True + + self._client_flags: int = ClientFlag.get_default() + self._charset_id: int = 45 + self._sql_mode: Optional[str] = None + self._time_zone: Optional[str] = None + self._autocommit: bool = False + self._server_version: Optional[Tuple[int, ...]] = None + self._handshake: Optional[HandShakeType] = None + self._conn_attrs: Dict[str, str] = {} + + self._user: str = "" + self._password: str = "" + self._password1: str = "" + self._password2: str = "" + self._password3: str = "" + self._database: str = "" + self._host: str = "127.0.0.1" + self._port: int = 3306 + self._unix_socket: Optional[str] = None + self._client_host: str = "" + self._client_port: int = 0 + self._ssl: Dict[str, Optional[Union[str, bool, List[str]]]] = {} + self._ssl_disabled: bool = DEFAULT_CONFIGURATION["ssl_disabled"] + self._force_ipv6: bool = False + self._oci_config_file: Optional[str] = None + self._oci_config_profile: Optional[str] = None + self._fido_callback: Optional[Union[str, Callable[[str], None]]] = None + self._webauthn_callback: Optional[Union[str, Callable[[str], None]]] = None + self._krb_service_principal: Optional[str] = None + + self._use_unicode: bool = True + self._get_warnings: bool = False + self._raise_on_warnings: bool = False + self._connection_timeout: Optional[int] = DEFAULT_CONFIGURATION[ + "connect_timeout" + ] + self._buffered: bool = False + self._unread_result: bool = False + self._have_next_result: bool = False + self._raw: bool = False + self._in_transaction: bool = False + self._allow_local_infile: bool = DEFAULT_CONFIGURATION["allow_local_infile"] + self._allow_local_infile_in_path: Optional[str] = DEFAULT_CONFIGURATION[ + "allow_local_infile_in_path" + ] + + self._prepared_statements: Any = None + self._query_attrs: Dict[str, BinaryProtocolType] = {} + + self._ssl_active: bool = False + self._auth_plugin: Optional[str] = None + self._auth_plugin_class: Optional[str] = None + self._pool_config_version: Any = None + self.converter: Optional[MySQLConverter] = None + self._converter_class: Optional[Type[MySQLConverter]] = None + self._converter_str_fallback: bool = False + self._compress: bool = False + + self._consume_results: bool = False + self._init_command: Optional[str] = None + self._character_set: CharacterSet = CharacterSet() + + def __enter__(self) -> MySQLConnectionAbstract: + return self + + def __exit__( + self, + exc_type: Type[BaseException], + exc_value: BaseException, + traceback: TracebackType, + ) -> None: + self.close() + + def get_self(self) -> MySQLConnectionAbstract: + """Returns self for `weakref.proxy`. + + This method is used when the original object is needed when using + `weakref.proxy`. + """ + return self + + @property + def is_secure(self) -> bool: + """Returns `True` if is a secure connection.""" + return self._ssl_active or ( + self._unix_socket is not None and os.name == "posix" + ) + + @property + def have_next_result(self) -> bool: + """Returns If have next result.""" + return self._have_next_result + + @property + def query_attrs(self) -> List[Tuple[str, BinaryProtocolType]]: + """Returns query attributes list.""" + return list(self._query_attrs.items()) + + def query_attrs_append(self, value: Tuple[str, BinaryProtocolType]) -> None: + """Adds element to the query attributes list on the connector's side. + + If an element in the query attributes list already matches + the attribute name provided, the new element will NOT be added. + + Args: + value: key-value as a 2-tuple. + """ + attr_name, attr_value = value + if attr_name not in self._query_attrs: + self._query_attrs[attr_name] = attr_value + + def query_attrs_remove(self, name: str) -> BinaryProtocolType: + """Removes element by name from the query attributes list on the connector's side. + + If no match, `None` is returned, else the corresponding value is returned. + + Args: + name: key name. + """ + return self._query_attrs.pop(name, None) + + def query_attrs_clear(self) -> None: + """Clears query attributes list on the connector's side.""" + self._query_attrs = {} + + def _validate_tls_ciphersuites(self) -> None: + """Validates the tls_ciphersuites option.""" + tls_ciphersuites = [] + tls_cs = self._ssl["tls_ciphersuites"] + + if isinstance(tls_cs, str): + if not (tls_cs.startswith("[") and tls_cs.endswith("]")): + raise AttributeError( + f"tls_ciphersuites must be a list, found: '{tls_cs}'" + ) + tls_css = tls_cs[1:-1].split(",") + if not tls_css: + raise AttributeError( + "No valid cipher suite found in 'tls_ciphersuites' list" + ) + for _tls_cs in tls_css: + _tls_cs = tls_cs.strip().upper() + if _tls_cs: + tls_ciphersuites.append(_tls_cs) + + elif isinstance(tls_cs, (list, set)): + tls_ciphersuites = [tls_cs for tls_cs in tls_cs if tls_cs] + else: + raise AttributeError( + "tls_ciphersuites should be a list with one or more " + f"ciphersuites. Found: '{tls_cs}'" + ) + + tls_versions = ( + TLS_VERSIONS[:] + if self._ssl.get("tls_versions", None) is None + else self._ssl["tls_versions"][:] # type: ignore[index] + ) + + # A newer TLS version can use a cipher introduced on + # an older version. + tls_versions.sort(reverse=True) # type: ignore[union-attr] + newer_tls_ver = tls_versions[0] + # translated_names[0] belongs to TLSv1, TLSv1.1 and TLSv1.2 + # translated_names[1] are TLSv1.3 only + translated_names: List[List[str]] = [[], []] + iani_cipher_suites_names = {} + ossl_cipher_suites_names: List[str] = [] + + # Old ciphers can work with new TLS versions. + # Find all the ciphers introduced on previous TLS versions. + for tls_ver in TLS_VERSIONS[: TLS_VERSIONS.index(newer_tls_ver) + 1]: + iani_cipher_suites_names.update(TLS_CIPHER_SUITES[tls_ver]) + ossl_cipher_suites_names.extend(OPENSSL_CS_NAMES[tls_ver]) + + for name in tls_ciphersuites: + if "-" in name and name in ossl_cipher_suites_names: + if name in OPENSSL_CS_NAMES["TLSv1.3"]: + translated_names[1].append(name) + else: + translated_names[0].append(name) + elif name in iani_cipher_suites_names: + translated_name = iani_cipher_suites_names[name] + if translated_name in translated_names: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_ciphersuites", value=translated_name + ) + ) + if name in TLS_CIPHER_SUITES["TLSv1.3"]: + translated_names[1].append(iani_cipher_suites_names[name]) + else: + translated_names[0].append(iani_cipher_suites_names[name]) + else: + raise AttributeError( + f"The value '{name}' in tls_ciphersuites is not a valid " + "cipher suite" + ) + if not translated_names[0] and not translated_names[1]: + raise AttributeError( + "No valid cipher suite found in the 'tls_ciphersuites' list" + ) + + self._ssl["tls_ciphersuites"] = [ + ":".join(translated_names[0]), + ":".join(translated_names[1]), + ] + + def _validate_tls_versions(self) -> None: + """Validates the tls_versions option.""" + tls_versions = [] + tls_version = self._ssl["tls_versions"] + + if isinstance(tls_version, str): + if not (tls_version.startswith("[") and tls_version.endswith("]")): + raise AttributeError( + f"tls_versions must be a list, found: '{tls_version}'" + ) + tls_vers = tls_version[1:-1].split(",") + for tls_ver in tls_vers: + tls_version = tls_ver.strip() + if tls_version == "": + continue + if tls_version in tls_versions: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_versions", value=tls_version + ) + ) + tls_versions.append(tls_version) + if tls_vers == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED: + raise AttributeError( + TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS) + ) + elif isinstance(tls_version, list): + if not tls_version: + raise AttributeError( + "At least one TLS protocol version must be specified in " + "'tls_versions' list" + ) + for tls_ver in tls_version: + if tls_ver in tls_versions: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_versions", value=tls_ver + ) + ) + tls_versions.append(tls_ver) + elif isinstance(tls_version, set): + for tls_ver in tls_version: + tls_versions.append(tls_ver) + else: + raise AttributeError( + "tls_versions should be a list with one or more of versions " + f"in {', '.join(TLS_VERSIONS)}. found: '{tls_versions}'" + ) + + if not tls_versions: + raise AttributeError( + "At least one TLS protocol version must be specified " + "in 'tls_versions' list when this option is given" + ) + + use_tls_versions = [] + deprecated_tls_versions = [] + invalid_tls_versions = [] + for tls_ver in tls_versions: + if tls_ver in TLS_VERSIONS: + use_tls_versions.append(tls_ver) + if tls_ver in DEPRECATED_TLS_VERSIONS: + deprecated_tls_versions.append(tls_ver) + else: + invalid_tls_versions.append(tls_ver) + + if use_tls_versions: + if use_tls_versions == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED: + raise NotSupportedError( + TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS) + ) + use_tls_versions.sort() + self._ssl["tls_versions"] = use_tls_versions + elif deprecated_tls_versions: + raise NotSupportedError( + TLS_VERSION_DEPRECATED_ERROR.format( + deprecated_tls_versions, TLS_VERSIONS + ) + ) + elif invalid_tls_versions: + raise AttributeError(TLS_VERSION_ERROR.format(tls_ver, TLS_VERSIONS)) + + @property + def user(self) -> str: + """The user name used for connecting to the MySQL server.""" + return self._user + + @property + def server_host(self) -> str: + """MySQL server IP address or name.""" + return self._host + + @property + def server_port(self) -> int: + "MySQL server TCP/IP port." + return self._port + + @property + def unix_socket(self) -> Optional[str]: + "The Unix socket file for connecting to the MySQL server." + return self._unix_socket + + @property + @abstractmethod + def database(self) -> str: + """The current database.""" + + @database.setter + def database(self, value: str) -> None: + """Sets the current database.""" + self.cmd_query(f"USE {value}") + + @property + def can_consume_results(self) -> bool: + """Returns whether to consume results.""" + return self._consume_results + + @can_consume_results.setter + def can_consume_results(self, value: bool) -> None: + """Sets if can consume results.""" + assert isinstance(value, bool) + self._consume_results = value + + @property + def pool_config_version(self) -> Any: + """Returns the pool configuration version.""" + return self._pool_config_version + + @pool_config_version.setter + def pool_config_version(self, value: Any) -> None: + """Sets the pool configuration version""" + self._pool_config_version = value + + def config(self, **kwargs: Any) -> None: + """Configures the MySQL Connection. + + This method allows you to configure the `MySQLConnection` + instance after it has been instantiated. + + Args: + **kwargs: For a complete list of possible arguments, see [1]. + + Raises: + AttributeError: When provided unsupported connection arguments. + InterfaceError: When the provided connection argument is invalid. + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html + """ + # opentelemetry related + self._span = kwargs.pop(OPTION_CNX_SPAN, None) + self._tracer = kwargs.pop(OPTION_CNX_TRACER, None) + + config = kwargs.copy() + if "dsn" in config: + raise NotSupportedError("Data source name is not supported") + + # Read option files + config = read_option_files(**config) + + # Configure how we handle MySQL warnings + try: + self.get_warnings = config["get_warnings"] + del config["get_warnings"] + except KeyError: + pass # Leave what was set or default + try: + self.raise_on_warnings = config["raise_on_warnings"] + del config["raise_on_warnings"] + except KeyError: + pass # Leave what was set or default + + # Configure client flags + try: + default = ClientFlag.get_default() + self.set_client_flags(config["client_flags"] or default) + del config["client_flags"] + except KeyError: + pass # Missing client_flags-argument is OK + + try: + if config["compress"]: + self._compress = True + self.set_client_flags([ClientFlag.COMPRESS]) + except KeyError: + pass # Missing compress argument is OK + + self._allow_local_infile = config.get( + "allow_local_infile", DEFAULT_CONFIGURATION["allow_local_infile"] + ) + self._allow_local_infile_in_path = config.get( + "allow_local_infile_in_path", + DEFAULT_CONFIGURATION["allow_local_infile_in_path"], + ) + infile_in_path = None + if self._allow_local_infile_in_path: + infile_in_path = os.path.abspath(self._allow_local_infile_in_path) + if ( + infile_in_path + and os.path.exists(infile_in_path) + and not os.path.isdir(infile_in_path) + or os.path.islink(infile_in_path) + ): + raise AttributeError("allow_local_infile_in_path must be a directory") + if self._allow_local_infile or self._allow_local_infile_in_path: + self.set_client_flags([ClientFlag.LOCAL_FILES]) + else: + self.set_client_flags([-ClientFlag.LOCAL_FILES]) + + try: + if not config["consume_results"]: + self._consume_results = False + else: + self._consume_results = True + except KeyError: + self._consume_results = False + + # Configure auth_plugin + try: + self._auth_plugin = config["auth_plugin"] + del config["auth_plugin"] + except KeyError: + self._auth_plugin = "" + + # Disallow the usage of some default authentication plugins + if self._auth_plugin == "authentication_webauthn_client": + raise InterfaceError( + f"'{self._auth_plugin}' cannot be used as the default authentication " + "plugin" + ) + + # Set converter class + try: + self.set_converter_class(config["converter_class"]) + except KeyError: + pass # Using default converter class + except TypeError as err: + raise AttributeError( + "Converter class should be a subclass of " + "conversion.MySQLConverterBase" + ) from err + + # Compatible configuration with other drivers + compat_map = [ + # (,) + ("db", "database"), + ("username", "user"), + ("passwd", "password"), + ("connect_timeout", "connection_timeout"), + ("read_default_file", "option_files"), + ] + for compat, translate in compat_map: + try: + if translate not in config: + config[translate] = config[compat] + del config[compat] + except KeyError: + pass # Missing compat argument is OK + + # Configure login information + if "user" in config or "password" in config: + try: + user = config["user"] + del config["user"] + except KeyError: + user = self._user + try: + password = config["password"] + del config["password"] + except KeyError: + password = self._password + self.set_login(user, password) + + # Configure host information + if "host" in config and config["host"]: + self._host = config["host"] + + # Check network locations + try: + self._port = int(config["port"]) + del config["port"] + except KeyError: + pass # Missing port argument is OK + except ValueError as err: + raise InterfaceError("TCP/IP port number should be an integer") from err + + if "ssl_disabled" in config: + self._ssl_disabled = config.pop("ssl_disabled") + + # If an init_command is set, keep it, so we can execute it in _post_connection + if "init_command" in config: + self._init_command = config["init_command"] + del config["init_command"] + + # Other configuration + set_ssl_flag = False + for key, value in config.items(): + try: + DEFAULT_CONFIGURATION[key] + except KeyError: + raise AttributeError(f"Unsupported argument '{key}'") from None + # SSL Configuration + if key.startswith("ssl_"): + set_ssl_flag = True + self._ssl.update({key.replace("ssl_", ""): value}) + elif key.startswith("tls_"): + set_ssl_flag = True + self._ssl.update({key: value}) + else: + attribute = "_" + key + try: + setattr(self, attribute, value.strip()) + except AttributeError: + setattr(self, attribute, value) + + # Disable SSL for unix socket connections + if self._unix_socket and os.name == "posix": + self._ssl_disabled = True + + if self._ssl_disabled and self._auth_plugin == "mysql_clear_password": + raise InterfaceError( + "Clear password authentication is not supported over insecure channels" + ) + + if set_ssl_flag: + if "verify_cert" not in self._ssl: + self._ssl["verify_cert"] = DEFAULT_CONFIGURATION["ssl_verify_cert"] + if "verify_identity" not in self._ssl: + self._ssl["verify_identity"] = DEFAULT_CONFIGURATION[ + "ssl_verify_identity" + ] + # Make sure both ssl_key/ssl_cert are set, or neither (XOR) + if "ca" not in self._ssl or self._ssl["ca"] is None: + self._ssl["ca"] = "" + if bool("key" in self._ssl) != bool("cert" in self._ssl): + raise AttributeError( + "ssl_key and ssl_cert need to be both specified, or neither" + ) + # Make sure key/cert are set to None + if not set(("key", "cert")) <= set(self._ssl): + self._ssl["key"] = None + self._ssl["cert"] = None + elif (self._ssl["key"] is None) != (self._ssl["cert"] is None): + raise AttributeError( + "ssl_key and ssl_cert need to be both set, or neither" + ) + if "tls_versions" in self._ssl and self._ssl["tls_versions"] is not None: + self._validate_tls_versions() + + if ( + "tls_ciphersuites" in self._ssl + and self._ssl["tls_ciphersuites"] is not None + ): + self._validate_tls_ciphersuites() + + if self._conn_attrs is None: + self._conn_attrs = {} + elif not isinstance(self._conn_attrs, dict): + raise InterfaceError("conn_attrs must be of type dict") + else: + for attr_name, attr_value in self._conn_attrs.items(): + if attr_name in CONN_ATTRS_DN: + continue + # Validate name type + if not isinstance(attr_name, str): + raise InterfaceError( + "Attribute name should be a string, found: " + f"'{attr_name}' in '{self._conn_attrs}'" + ) + # Validate attribute name limit 32 characters + if len(attr_name) > 32: + raise InterfaceError( + f"Attribute name '{attr_name}' exceeds 32 characters limit size" + ) + # Validate names in connection attributes cannot start with "_" + if attr_name.startswith("_"): + raise InterfaceError( + "Key names in connection attributes cannot start with " + "'_', found: '{attr_name}'" + ) + # Validate value type + if not isinstance(attr_value, str): + raise InterfaceError( + f"Attribute '{attr_name}' value: '{attr_value}' must " + "be a string type" + ) + # Validate attribute value limit 1024 characters + if len(attr_value) > 1024: + raise InterfaceError( + f"Attribute '{attr_name}' value: '{attr_value}' " + "exceeds 1024 characters limit size" + ) + + if self._client_flags & ClientFlag.CONNECT_ARGS: + self._add_default_conn_attrs() + + if "kerberos_auth_mode" in config and config["kerberos_auth_mode"] is not None: + if not isinstance(config["kerberos_auth_mode"], str): + raise InterfaceError("'kerberos_auth_mode' must be of type str") + kerberos_auth_mode = config["kerberos_auth_mode"].lower() + if kerberos_auth_mode == "sspi": + if os.name != "nt": + raise InterfaceError( + "'kerberos_auth_mode=SSPI' is only available on Windows" + ) + self._auth_plugin_class = "MySQLSSPIKerberosAuthPlugin" + elif kerberos_auth_mode == "gssapi": + self._auth_plugin_class = "MySQLKerberosAuthPlugin" + else: + raise InterfaceError( + "Invalid 'kerberos_auth_mode' mode. Please use 'SSPI' or 'GSSAPI'" + ) + + if ( + "krb_service_principal" in config + and config["krb_service_principal"] is not None + ): + self._krb_service_principal = config["krb_service_principal"] + if not isinstance(self._krb_service_principal, str): + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format(error="is not a string") + ) + if self._krb_service_principal == "": + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format( + error="can not be an empty string" + ) + ) + if "/" not in self._krb_service_principal: + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format(error="is incorrectly formatted") + ) + + if self._fido_callback: + warn_msg = ( + "The `fido_callback` connection argument is deprecated and it will be " + "removed in a future release of MySQL Connector/Python. " + "Use `webauth_callback` instead" + ) + warnings.warn(warn_msg, DeprecationWarning) + self._validate_callable("fido_callback", self._fido_callback, 1) + + if self._webauthn_callback: + self._validate_callable("webauth_callback", self._webauthn_callback, 1) + + def _add_default_conn_attrs(self) -> None: + """Adds the default connection attributes.""" + + @staticmethod + def _validate_callable( + option_name: str, callback: Union[str, Callable], num_args: int = 0 + ) -> None: + """Validates if it's a Python callable. + + Args: + option_name (str): Connection option name. + callback (str or callable): The fully qualified path to the callable or + a callable. + num_args (int): Number of positional arguments allowed. + + Raises: + ProgrammingError: If `callback` is not valid or wrong number of positional + arguments. + + .. versionadded:: 8.2.0 + """ + if isinstance(callback, str): + try: + callback = import_object(callback) + except ValueError as err: + raise ProgrammingError(f"{err}") from err + + if not callable(callback): + raise ProgrammingError(f"Expected a callable for '{option_name}'") + + # Check if the callable signature has positional arguments + num_params = len(signature(callback).parameters) + if num_params != num_args: + raise ProgrammingError( + f"'{option_name}' requires {num_args} positional argument, but the " + f"callback provided has {num_params}" + ) + + @staticmethod + def _check_server_version(server_version: StrOrBytes) -> Tuple[int, ...]: + """Checks the MySQL version. + + This method will check the MySQL version and raise an InterfaceError + when it is not supported or invalid. It will return the version + as a tuple with major, minor and patch. + + Raises InterfaceError if invalid server version. + + Returns tuple + """ + if isinstance(server_version, (bytearray, bytes)): + server_version = server_version.decode() + + regex_ver = re.compile(r"^(\d{1,2})\.(\d{1,2})\.(\d{1,3})(.*)") + match = regex_ver.match(server_version) + if not match: + raise InterfaceError("Failed parsing MySQL version") + + version = tuple(int(v) for v in match.groups()[0:3]) + if version < (4, 1): + raise InterfaceError(f"MySQL Version '{server_version}' is not supported") + + return version + + def get_server_version(self) -> Optional[Tuple[int, ...]]: + """Gets the MySQL version. + + Returns: + The MySQL server version as a tuple. If not previously connected, it will + return `None`. + """ + return self._server_version + + def get_server_info(self) -> Optional[str]: + """Gets the original MySQL version information. + + Returns: + The original MySQL server as text. If not previously connected, it will + return `None`. + """ + try: + return self._handshake["server_version_original"] # type: ignore[return-value] + except (TypeError, KeyError): + return None + + @property + @abstractmethod + def in_transaction(self) -> bool: + """Returns bool to indicate whether a transaction is active for the connection. + + The value is `True` regardless of whether you start a transaction using the + `start_transaction()` API call or by directly executing an SQL statement such + as START TRANSACTION or BEGIN. + + `in_transaction` was added in MySQL Connector/Python 1.1.0. + + Examples: + ``` + >>> cnx.start_transaction() + >>> cnx.in_transaction + True + >>> cnx.commit() + >>> cnx.in_transaction + False + ``` + """ + + def set_client_flags(self, flags: Union[int, Sequence[int]]) -> int: + """Sets the client flags. + + The flags-argument can be either an int or a list (or tuple) of + ClientFlag-values. If it is an integer, it will set client_flags + to flags as is. + + If flags is a sequence, each item in the sequence sets the flag when the + value is positive or unsets it when negative (see example below). + + Args: + flags: A list (or tuple), each flag will be set or unset when it's negative. + + Returns: + integer: Client flags. + + Raises: + ProgrammingError: When the flags argument is not a set or an integer + bigger than 0. + + Examples: + ``` + For example, to unset `LONG_FLAG` and set the `FOUND_ROWS` flags: + >>> from mysql.connector.constants import ClientFlag + >>> cnx.set_client_flags([ClientFlag.FOUND_ROWS, -ClientFlag.LONG_FLAG]) + >>> cnx.reconnect() + ``` + """ + if isinstance(flags, int) and flags > 0: + self._client_flags = flags + elif isinstance(flags, (tuple, list)): + for flag in flags: + if flag < 0: + self._client_flags &= ~abs(flag) + else: + self._client_flags |= flag + else: + raise ProgrammingError("set_client_flags expect integer (>0) or set") + return self._client_flags + + def shutdown(self) -> NoReturn: + """Shuts down connection to MySQL Server. + + This method closes the socket. It raises no exceptions. + + Unlike `disconnect()`, `shutdown()` closes the client connection without + attempting to send a `QUIT` command to the server first. Thus, it will not + block if the connection is disrupted for some reason such as network failure. + """ + raise NotImplementedError + + def isset_client_flag(self, flag: int) -> bool: + """Checks if a client flag is set. + + Returns: + `True` if the client flag was set, `False` otherwise. + """ + return (self._client_flags & flag) > 0 + + @property + def time_zone(self) -> str: + """Gets the current time zone.""" + return self.info_query("SELECT @@session.time_zone")[ + 0 + ] # type: ignore[return-value] + + @time_zone.setter + def time_zone(self, value: str) -> None: + """Sets the time zone.""" + self.cmd_query(f"SET @@session.time_zone = '{value}'") + self._time_zone = value + + @property + def sql_mode(self) -> str: + """Gets the SQL mode.""" + if self._sql_mode is None: + self._sql_mode = self.info_query("SELECT @@session.sql_mode")[0] + return self._sql_mode + + @sql_mode.setter + def sql_mode(self, value: Union[str, Sequence[int]]) -> None: + """Sets the SQL mode. + + This method sets the SQL Mode for the current connection. The value + argument can be either a string with comma separate mode names, or + a sequence of mode names. + + It is good practice to use the constants class `SQLMode`: + ``` + >>> from mysql.connector.constants import SQLMode + >>> cnx.sql_mode = [SQLMode.NO_ZERO_DATE, SQLMode.REAL_AS_FLOAT] + ``` + """ + if isinstance(value, (list, tuple)): + value = ",".join(value) + self.cmd_query(f"SET @@session.sql_mode = '{value}'") + self._sql_mode = value + + @abstractmethod + def info_query(self, query: str) -> Optional[RowType]: + """Sends a query which only returns 1 row. + + Shortcut for: + + ``` + cursor = self.cursor(buffered=True) + cursor.execute(query) + return cursor.fetchone() + ``` + + Args: + query: Statement to execute. + + Returns: + row: A tuple (RowType). + """ + + def set_login( + self, username: Optional[str] = None, password: Optional[str] = None + ) -> None: + """Sets login information for MySQL. + + Sets the username and/or password for the user connecting to the MySQL Server. + + Args: + username: Account's user name. + password: Account's password. + """ + if username is not None: + self._user = username.strip() + else: + self._user = "" + if password is not None: + self._password = password + else: + self._password = "" + + def set_unicode(self, value: bool = True) -> None: + """Toggles unicode mode. + + Sets whether we return string fields as unicode or not. + + Args: + value: A boolean - default is `True`. + """ + self._use_unicode = value + if self.converter: + self.converter.set_unicode(value) + + @property + def autocommit(self) -> bool: + """Gets whether autocommit is on or off.""" + value = self.info_query("SELECT @@session.autocommit")[0] + return value == 1 + + @autocommit.setter + def autocommit(self, value: bool) -> None: + """Toggles autocommit.""" + switch = "ON" if value else "OFF" + self.cmd_query(f"SET @@session.autocommit = {switch}") + self._autocommit = value + + @property + def get_warnings(self) -> bool: + """Gets whether this connection retrieves warnings automatically. + + This method returns whether this connection retrieves warnings + automatically. + + Returns `True`, or `False` when warnings are not retrieved. + """ + return self._get_warnings + + @get_warnings.setter + def get_warnings(self, value: bool) -> None: + """Sets whether warnings should be automatically retrieved. + + The toggle-argument must be a boolean. When True, cursors for this + connection will retrieve information about warnings (if any). + + Raises `ValueError` on error. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._get_warnings = value + + @property + def raise_on_warnings(self) -> bool: + """Gets whether this connection raises an error on warnings. + + This method returns whether this connection will raise errors when + MySQL reports warnings. + + Returns `True` or `False`. + """ + return self._raise_on_warnings + + @raise_on_warnings.setter + def raise_on_warnings(self, value: bool) -> None: + """Sets whether warnings raise an error. + + The toggle-argument must be a boolean. When True, cursors for this + connection will raise an error when MySQL reports warnings. + + Raising on warnings implies retrieving warnings automatically. In + other words: warnings will be set to True. If set to False, warnings + will be also set to False. + + Raises `ValueError` on error. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._raise_on_warnings = value + # Don't disable warning retrieval if raising explicitly disabled + if value: + self._get_warnings = value + + @property + def unread_result(self) -> bool: + """Gets whether there is an unread result. + + This method is used by cursors to check whether another cursor still + needs to retrieve its result set. + + Returns `True`, or `False` when there is no unread result. + """ + return self._unread_result + + @unread_result.setter + def unread_result(self, value: bool) -> None: + """Sets whether there is an unread result. + + This method is used by cursors to let other cursors know there is + still a result set that needs to be retrieved. + + Raises `ValueError` on errors. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._unread_result = value + + @property + def charset(self) -> str: + """Returns the character set for current connection. + + This property returns the character set name of the current connection. + The server is queried when the connection is active. If not connected, + the configured character set name is returned. + + Returns a string. + """ + return self._character_set.get_info(self._charset_id)[0] + + @property + def python_charset(self) -> str: + """Returns the Python character set for current connection. + + This property returns the character set name of the current connection. + Note that, unlike property charset, this checks if the previously set + character set is supported by Python and if not, it returns the + equivalent character set that Python supports. + + Returns a string. + """ + encoding = self._character_set.get_info(self._charset_id)[0] + if encoding in ("utf8mb4", "utf8mb3", "binary"): + return "utf8" + return encoding + + def set_charset_collation( + self, charset: Optional[Union[int, str]] = None, collation: Optional[str] = None + ) -> None: + """Sets the character set and collation for the current connection. + + This method sets the character set and collation to be used for + the current connection. The charset argument can be either the + name of a character set as a string, or the numerical equivalent + as defined in constants.CharacterSet. + + When the collation is not given, the default will be looked up and + used. + + Args: + charset: Can be either the name of a character set, or the numerical + equivalent as defined in `constants.CharacterSet`. + collation: When collation is `None`, the default collation for the + character set is used. + + Examples: + The following will set the collation for the latin1 character set to + `latin1_general_ci`: + ``` + >>> cnx = mysql.connector.connect(user='scott') + >>> cnx.set_charset_collation('latin1', 'latin1_general_ci') + ``` + """ + err_msg = "{} should be either integer, string or None" + if not isinstance(charset, (int, str)) and charset is not None: + raise ValueError(err_msg.format("charset")) + if not isinstance(collation, str) and collation is not None: + raise ValueError("collation should be either string or None") + + if charset: + if isinstance(charset, int): + ( + self._charset_id, + charset_name, + collation_name, + ) = self._character_set.get_charset_info(charset) + elif isinstance(charset, str): + ( + self._charset_id, + charset_name, + collation_name, + ) = self._character_set.get_charset_info(charset, collation) + else: + raise ValueError(err_msg.format("charset")) + elif collation: + ( + self._charset_id, + charset_name, + collation_name, + ) = self._character_set.get_charset_info(collation=collation) + else: + charset = DEFAULT_CONFIGURATION["charset"] + ( + self._charset_id, + charset_name, + collation_name, + ) = self._character_set.get_charset_info(charset, collation=None) + + self._execute_query(f"SET NAMES '{charset_name}' COLLATE '{collation_name}'") + + try: + # Required for C Extension + self.set_character_set_name(charset_name) + except AttributeError: + # Not required for pure Python connection + pass + + if self.converter: + self.converter.set_charset(charset_name, character_set=self._character_set) + + @property + def collation(self) -> str: + """Returns the collation for current connection. + + This property returns the collation name of the current connection. + The server is queried when the connection is active. If not connected, + the configured collation name is returned. + + Returns a string. + """ + return self._character_set.get_charset_info(self._charset_id)[2] + + @property + @abstractmethod + def connection_id(self) -> Optional[int]: + """MySQL connection ID.""" + + @abstractmethod + def _do_handshake(self) -> None: + """Gathers information of the MySQL server before authentication.""" + + @abstractmethod + def _open_connection(self) -> None: + """Opens the connection to the MySQL server.""" + + def _post_connection(self) -> None: + """Executes commands after connection has been established. + + This method executes commands after the connection has been + established. Some setting like autocommit, character set, and SQL mode + are set using this method. + """ + self.set_charset_collation(self._charset_id) + self.autocommit = self._autocommit + if self._time_zone: + self.time_zone = self._time_zone + if self._sql_mode: + self.sql_mode = self._sql_mode + if self._init_command: + self._execute_query(self._init_command) + + @abstractmethod + def disconnect(self) -> None: + """Disconnects from the MySQL server. + + This method tries to send a `QUIT` command and close the socket. It raises + no exceptions. + + `MySQLConnection.close()` is a synonymous for `MySQLConnection.disconnect()` + method name and more commonly used. + + To shut down the connection without sending a `QUIT` command first, + use `shutdown()`. + """ + + close: Callable[[], Any] = disconnect + + def connect(self, **kwargs: Any) -> None: + """Connects to the MySQL server. + + This method sets up the connection to the MySQL server. If no + arguments are given, it will use the already configured or default + values. + + Args: + **kwargs: For a complete list of possible arguments, see [1]. + + Examples: + ``` + >>> cnx = MySQLConnection(user='joe', database='test') + ``` + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html + """ + # open connection using the default charset id + if kwargs: + self.config(**kwargs) + + self.disconnect() + self._open_connection() + + charset, collation = ( + kwargs.pop("charset", None), + kwargs.pop("collation", None), + ) + if charset or collation: + self._charset_id = self._character_set.get_charset_info(charset, collation)[ + 0 + ] + + if not self._client_flags & ClientFlag.CAN_HANDLE_EXPIRED_PASSWORDS: + self._post_connection() + else: + # the server does not allow to run any other statement different from + # ALTER when user's password has been expired - the server either + # disconnects the client or restricts the client to "sandbox mode" [1]. + # [1]: https://dev.mysql.com/doc/refman/5.7/en/expired-password-handling.html + try: + self.set_charset_collation(charset=self._charset_id) + except DatabaseError: + # get out of sandbox mode - with no FOR user clause, the statement sets + # the password for the current user. + self.cmd_query(f"SET PASSWORD = '{self._password1 or self._password}'") + + # Set charset and collation. + self.set_charset_collation(charset=self._charset_id) + + # go back to sandbox mode. + self.cmd_query("ALTER USER CURRENT_USER() PASSWORD EXPIRE") + + def reconnect(self, attempts: int = 1, delay: int = 0) -> None: + """Attempts to reconnect to the MySQL server. + + The argument `attempts` should be the number of times a reconnect + is tried. The `delay` argument is the number of seconds to wait between + each retry. + + You may want to set the number of attempts higher and use delay when + you expect the MySQL server to be down for maintenance or when you + expect the network to be temporary unavailable. + + Args: + attempts: Number of attempts to make when reconnecting. + delay: Use it (defined in seconds) if you want to wait between each retry. + + Raises: + InterfaceError: When reconnection fails. + """ + counter = 0 + span = None + + if self._tracer: + span = self._tracer.start_span( + name=CONNECTION_SPAN_NAME, kind=trace.SpanKind.CLIENT + ) + + try: + while counter != attempts: + counter = counter + 1 + try: + self.disconnect() + self.connect() + if self.is_connected(): + break + except (Error, IOError) as err: + if counter == attempts: + msg = ( + f"Can not reconnect to MySQL after {attempts} " + f"attempt(s): {err}" + ) + raise InterfaceError(msg) from err + if delay > 0: + sleep(delay) + except InterfaceError as interface_err: + if OTEL_ENABLED: + set_connection_span_attrs(self, span) + record_exception_event(span, interface_err) + end_span(span) + raise + + self._span = span + if OTEL_ENABLED: + set_connection_span_attrs(self, self._span) + + @abstractmethod + def is_connected(self) -> bool: + """Reports whether the connection to MySQL Server is available or not. + + Checks whether the connection to MySQL is available using the `ping()` method, + but unlike `ping()`, `is_connected()` returns `True` when the connection is + available, `False` otherwise + """ + + @abstractmethod + def ping(self, reconnect: bool = False, attempts: int = 1, delay: int = 0) -> None: + """Checks availability of the MySQL server. + + When reconnect is set to `True`, one or more attempts are made to try + to reconnect to the MySQL server using the reconnect()-method. + + `delay` is the number of seconds to wait between each retry. + + When the connection is not available, an InterfaceError is raised. + + Args: + reconnect: If True, one or more `attempts` are made to try to reconnect + to the MySQL server, and these options are forwarded to the + `reconnect()` method. + attempts: Number of attempts to make when reconnecting. + delay: Use it (defined in seconds) if you want to wait between each retry. + + Raises: + InterfaceError: When the connection is not available. Use the + `is_connected()` method if you just want to check the + connection without raising an error. + """ + + @abstractmethod + def commit(self) -> None: + """Commits current transaction. + + This method is part of PEP 249 - Python Database API Specification v2.0. + + This method sends a COMMIT statement to the MySQL server, committing the + current transaction. Since by default Connector/Python does not autocommit. + + It is important to call this method after every transaction that modifies + data for tables that use transactional storage engines. + + Examples: + ``` + >>> stmt = "INSERT INTO employees (first_name) VALUES (%s), (%s)" + >>> cursor.execute(stmt, ('Jane', 'Mary')) + >>> cnx.commit() + ``` + """ + + @abstractmethod + def cursor( + self, + buffered: Optional[bool] = None, + raw: Optional[bool] = None, + prepared: Optional[bool] = None, + cursor_class: Optional[Type["MySQLCursorAbstract"]] = None, + dictionary: Optional[bool] = None, + named_tuple: Optional[bool] = None, + ) -> "MySQLCursorAbstract": + """Instantiates and returns a cursor. + + By default, `MySQLCursor` or `CMySQLCursor` is returned. Depending on the + options while connecting, a buffered and/or raw cursor is instantiated + instead. Also depending upon the cursor options, rows can be returned as + dictionary or named tuple. + + Dictionary and namedtuple based cursors are available with buffered output + but not raw. + + It is possible to also give a custom cursor through the `cursor_class` + parameter, but it needs to be a subclass of `mysql.connector.cursor.CursorBase` + or `mysql.connector.cursor_cext.CMySQLCursor` according to the type of + connection that's being used. + + Args: + buffered: If `True`, the cursor fetches all rows from the server after an + operation is executed. This is useful when queries return small + result sets. + raw: If `True`, the cursor skips the conversion from MySQL data types to + Python types when fetching rows. A raw cursor is usually used to get + better performance or when you want to do the conversion yourself. + prepared: If `True`, the cursor is used for executing prepared statements. + cursor_class: It can be used to pass a class to use for instantiating a + new cursor. It must be a subclass of `cursor.CursorBase` or + `cursor_cext.CMySQLCursor` according to the type of connection + that's being used. + dictionary: If `True`, the cursor returns rows as dictionaries. + named_tuple: If `True`, the cursor returns rows as named tuples. + + Returns: + cursor: A cursor object. + + Raises: + ProgrammingError: When `cursor_class` is not a subclass of + `MySQLCursorAbstract`. + ValueError: When cursor is not available. + """ + + @abstractmethod + def _execute_query(self, query: str) -> None: + """Executes a query.""" + + @abstractmethod + def rollback(self) -> None: + """Rollbacks current transaction. + + Sends a ROLLBACK statement to the MySQL server, undoing all data changes + from the current transaction. By default, Connector/Python does not + autocommit, so it is possible to cancel transactions when using + transactional storage engines such as `InnoDB`. + + Examples: + ``` + >>> stmt = "INSERT INTO employees (first_name) VALUES (%s), (%s)" + >>> cursor.execute(stmt, ('Jane', 'Mary')) + >>> cnx.rollback() + ``` + """ + + def start_transaction( + self, + consistent_snapshot: bool = False, + isolation_level: Optional[str] = None, + readonly: Optional[bool] = None, + ) -> None: + """Starts a transaction. + + This method explicitly starts a transaction sending the + START TRANSACTION statement to the MySQL server. You can optionally + set whether there should be a consistent snapshot, which + isolation level you need or which access mode i.e. READ ONLY or + READ WRITE. + + Args: + consistent_snapshot: If `True`, Connector/Python sends WITH CONSISTENT + SNAPSHOT with the statement. MySQL ignores this for + isolation levels for which that option does not apply. + isolation_level: Permitted values are 'READ UNCOMMITTED', 'READ COMMITTED', + 'REPEATABLE READ', and 'SERIALIZABLE'. If the value is + `None`, no isolation level is sent, so the default level + applies. + readonly: Can be `True` to start the transaction in READ ONLY mode or + `False` to start it in READ WRITE mode. If readonly is omitted, + the server's default access mode is used. + + Raises: + ProgrammingError: When a transaction is already in progress + and when `ValueError` when `isolation_level` + specifies an Unknown level. + + Examples: + For example, to start a transaction with isolation level `SERIALIZABLE`, + you would do the following: + ``` + >>> cnx = mysql.connector.connect(...) + >>> cnx.start_transaction(isolation_level='SERIALIZABLE') + ``` + """ + if self.in_transaction: + raise ProgrammingError("Transaction already in progress") + + if isolation_level: + level = isolation_level.strip().replace("-", " ").upper() + levels = [ + "READ UNCOMMITTED", + "READ COMMITTED", + "REPEATABLE READ", + "SERIALIZABLE", + ] + + if level not in levels: + raise ValueError(f'Unknown isolation level "{isolation_level}"') + + self._execute_query(f"SET TRANSACTION ISOLATION LEVEL {level}") + + if readonly is not None: + if self._server_version < (5, 6, 5): + raise ValueError( + f"MySQL server version {self._server_version} does not " + "support this feature" + ) + + if readonly: + access_mode = "READ ONLY" + else: + access_mode = "READ WRITE" + self._execute_query(f"SET TRANSACTION {access_mode}") + + query = "START TRANSACTION" + if consistent_snapshot: + query += " WITH CONSISTENT SNAPSHOT" + self.cmd_query(query) + + def reset_session( + self, + user_variables: Optional[Dict[str, Any]] = None, + session_variables: Optional[Dict[str, Any]] = None, + ) -> None: + """Clears the current active session. + + This method resets the session state, if the MySQL server is 5.7.3 + or later active session will be reset without re-authenticating. + For other server versions session will be reset by re-authenticating. + + It is possible to provide a sequence of variables and their values to + be set after clearing the session. This is possible for both user + defined variables and session variables. + + Args: + user_variables: User variables map. + session_variables: System variables map. + + Raises: + OperationalError: If not connected. + InternalError: If there are unread results and InterfaceError on errors. + + Examples: + ``` + >>> user_variables = {'var1': '1', 'var2': '10'} + >>> session_variables = {'wait_timeout': 100000, 'sql_mode': 'TRADITIONAL'} + >>> cnx.reset_session(user_variables, session_variables) + ``` + """ + if not self.is_connected(): + raise OperationalError("MySQL Connection not available") + + try: + self.cmd_reset_connection() + except (NotSupportedError, NotImplementedError): + if self._compress: + raise NotSupportedError( + "Reset session is not supported with compression for " + "MySQL server version 5.7.2 or earlier" + ) from None + self.cmd_change_user( + self._user, + self._password, + self._database, + self._charset_id, + ) + + if user_variables or session_variables: + cur = self.cursor() + if user_variables: + for key, value in user_variables.items(): + cur.execute(f"SET @`{key}` = {value}") + if session_variables: + for key, value in session_variables.items(): + cur.execute(f"SET SESSION `{key}` = {value}") + cur.close() + + def set_converter_class(self, convclass: Optional[Type[MySQLConverter]]) -> None: + """ + Sets the converter class to be used. + + Args: + convclass: Should be a class overloading methods and members of + `conversion.MySQLConverter`. + """ + if convclass and issubclass(convclass, MySQLConverterBase): + charset_name = self._character_set.get_info(self._charset_id)[0] + self._converter_class = convclass + self.converter = convclass(charset_name, self._use_unicode) + self.converter.str_fallback = self._converter_str_fallback + else: + raise TypeError( + "Converter class should be a subclass of conversion.MySQLConverterBase." + ) + + @abstractmethod + def get_row( + self, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[Optional[RowType], Optional[Dict[str, Any]]]: + """Retrieves the next row of a query result set. + + Args: + binary: If `True`, read as binary result (only meaningful for pure Python + connections). + columns: Field types (only meaningful for pure Python connections and when + `binary=True`). + raw: If `True`, the converter class does not convert the parsed values. + prep_stmt: Prepared statement object (only meaningful for + C-ext connections). + + Returns: + tuple: The row as a tuple (RowType) containing byte objects, or `None` when + no more rows are available. (at position 0). EOF packet + information as a dictionary containing `status_flag` and + `warning_count` (at position 1). + + Raises: + InterfaceError: When all rows have been retrieved. + """ + + @abstractmethod + def get_rows( + self, + count: Optional[int] = None, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[List[RowType], Optional[Dict[str, Any]]]: + """Gets all rows returned by the MySQL server. + + Args: + count: Used to obtain a given number of rows. If set to `None`, all + rows are fetched. + binary: If `True`, read as binary result (only meaningful for pure Python + connections). + columns: Field types (only meaningful for pure Python connections and when + `binary=True`). + raw: If `True`, the converter class does not convert the parsed values. + prep_stmt: Prepared statement object (only meaningful for + C-ext connections). + + Returns: + tuple: A list of tuples (RowType) containing the row data as byte objects, + or an empty list when no rows are available (at position 0). + EOF packet information as a dictionary containing `status_flag` + and `warning_count` (at position 1). + + Raises: + InterfaceError: When all rows have been retrieved. + """ + + @abstractmethod + def cmd_init_db(self, database: str) -> Optional[Dict[str, Any]]: + """Changes the current database. + + This method makes specified database the default (current) database. + In subsequent queries, this database is the default for table references + that include no explicit database qualifier. + + Args: + database: Database to become the default (current) database. + + Returns: + ok_packet: Dictionary containing the OK packet information. + """ + + @abstractmethod + def cmd_query( + self, + query: str, + raw: Optional[bool] = False, + buffered: bool = False, + raw_as_string: bool = False, + ) -> Optional[Dict[str, Any]]: + """Sends a query to the MySQL server. + + This method sends the query to the MySQL server and returns the result. To send + multiple statements, use the `cmd_query_iter()` method instead. + + The returned dictionary contains information depending on what kind of query + was executed. If the query is a `SELECT` statement, the result contains + information about columns. Other statements return a dictionary containing + OK or EOF packet information. + + Errors received from the MySQL server are raised as exceptions. + + Arguments `raw`, `buffered` and `raw_as_string` are only meaningful + for `C-ext` connections. + + Args: + query: Statement to be executed. + raw: If `True`, the cursor skips the conversion from MySQL data types to + Python types when fetching rows. A raw cursor is usually used to get + better performance or when you want to do the conversion yourself. If + not provided, take its value from the MySQL instance. + buffered: If `True`, the cursor fetches all rows from the server after an + operation is executed. This is useful when queries return small + result sets. + raw_as_string: Is a special argument for Python v2 and returns `str` + instead of `bytearray`. + + Returns: + dictionary: `Result` or `OK packet` information + + Raises: + InterfaceError: When multiple results are found. + """ + + @abstractmethod + def cmd_query_iter( + self, statements: str + ) -> Generator[Mapping[str, Any], None, None]: + """Sends one or more statements to the MySQL server. + + Similar to the `cmd_query()` method, but instead returns a generator + object to iterate through results. It sends the statements to the + MySQL server and through the iterator you can get the results. + + Use `cmd_query_iter()` when sending multiple statements, and separate + the statements with semicolons. + + Args: + statements: Statements to be executed separated with semicolons. + + Returns: + generator: Generator object with `Result` or `OK packet` information. + + Examples: + The following example shows how to iterate through the results after + sending multiple statements: + ``` + >>> statement = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2' + >>> for result in cnx.cmd_query_iter(statement): + >>> if 'columns' in result: + >>> columns = result['columns'] + >>> rows = cnx.get_rows() + >>> else: + >>> # do something useful with INSERT result + ``` + """ + + @abstractmethod + def cmd_refresh(self, options: int) -> Optional[Dict[str, Any]]: + """Sends the Refresh command to the MySQL server. + + `WARNING: This MySQL Server functionality is deprecated.` + + This method flushes tables or caches, or resets replication server + information. The connected user must have the RELOAD privilege. + + The options argument should be a bitmask value constructed using + constants from the `constants.RefreshOption` class. + + The result is a dictionary with the OK packet information. + + Args: + options: Bitmask value constructed using constants from + the `constants.RefreshOption` class. + + Returns: + dictionary: OK packet information. + + Examples: + ``` + >>> from mysql.connector import RefreshOption + >>> refresh = RefreshOption.LOG | RefreshOption.THREADS + >>> cnx.cmd_refresh(refresh) + ``` + """ + + @abstractmethod + def cmd_quit(self) -> Optional[bytes]: + """Closes the current connection with the server. + + This method sends the `QUIT` command to the MySQL server, closing the + current connection. Since there is no response from the MySQL server, + the packet that was sent is returned. + + Returns: + packet_sent: `None` when using a C-ext connection, + else the actual packet that was sent. + """ + + @abstractmethod + def cmd_shutdown(self, shutdown_type: Optional[int] = None) -> None: + """Shuts down the MySQL Server. + + This method sends the SHUTDOWN command to the MySQL server. + The `shutdown_type` is not used, and it's kept for backward compatibility. + """ + + @abstractmethod + def cmd_statistics(self) -> Optional[Dict[str, Any]]: + """Sends the statistics command to the MySQL Server. + + Returns: + dict: Stats packet information about the MySQL server including uptime + in seconds and the number of running threads, questions, reloads, and + open tables. + """ + + @staticmethod + def cmd_process_info() -> NoReturn: + """Get the process list of the MySQL Server. + + This method is a placeholder to notify that the PROCESS_INFO command + is not supported by raising the `NotSupportedError`. + + The command + "SHOW PROCESSLIST" should be send using the cmd_query()-method or + using the `INFORMATION_SCHEMA` database. + + Raises `NotSupportedError` exception. + """ + raise NotSupportedError( + "Not implemented. Use SHOW PROCESSLIST or INFORMATION_SCHEMA" + ) + + @abstractmethod + def cmd_process_kill(self, mysql_pid: int) -> Optional[Dict[str, Any]]: + """Kills a MySQL process. + + Asks the server to kill the thread specified by `mysql_pid`. Although + still available, it is better to use the KILL SQL statement. + + Args: + mysql_pid: Process ID to be killed. + + Returns: + ok_packet: Dictionary containing the OK packet information. + + Examples: + ``` + >>> cnx.cmd_process_kill(123) # using cmd_process_kill() + >>> cnx.cmd_query('KILL 123') # alternatively (recommended) + ``` + """ + + @abstractmethod + def cmd_debug(self) -> Optional[Dict[str, Any]]: + """Instructs the server to write debugging information to the error log. + + The connected user must have the `SUPER` privilege. + + Returns: + ok_packet: Dictionary containing the EOF (end-of-file) packet information. + """ + + @abstractmethod + def cmd_ping(self) -> Optional[Dict[str, Any]]: + """Checks whether the connection to the server is working. + + This method is not to be used directly. Use `ping()` or + `is_connected()` instead. + + Returns: + ok_packet: Dictionary containing the OK packet information. + """ + + @abstractmethod + def cmd_change_user( + self, + username: str = "", + password: str = "", + database: str = "", + charset: int = 45, + password1: str = "", + password2: str = "", + password3: str = "", + oci_config_file: str = "", + oci_config_profile: str = "", + ) -> Optional[Dict[str, Any]]: + """Changes the current logged in user. + + It also causes the specified database to become the default (current) + database. It is also possible to change the character set using the + charset argument. + + Args: + username: New account's username. + password: New account's password. + database: Database to become the default (current) database. + charset: Client charset (see [1]), only the lower 8-bits. + password1: New account's password factor 1 - it's used instead + of `password` if set (higher precedence). + password2: New account's password factor 2. + password3: New account's password factor 3. + oci_config_file: OCI configuration file location (path-like string). + oci_config_profile: OCI configuration profile location (path-like string). + + Returns: + ok_packet: Dictionary containing the OK packet information. + + Examples: + ``` + >>> cnx.cmd_change_user(username='', password='', database='', charset=33) + ``` + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_basic_character_set.html#a_protocol_character_set + """ + + @abstractmethod + def cmd_stmt_prepare( + self, statement: bytes + ) -> Union[Mapping[str, Any], CMySQLPrepStmt]: + """Prepares a MySQL statement. + + Args: + statement: statement to prepare. + + Returns: + prepared_stmt: A `Prepared Statement` structure - a dictionary + is returned when using a pure Python connection, and a + `_mysql_connector.MySQLPrepStmt` object is returned when + using a C-ext connection. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/ + page_protocol_com_stmt_prepare.html + """ + + @abstractmethod + def cmd_stmt_execute( + self, + statement_id: Union[int, CMySQLPrepStmt], + data: Sequence[BinaryProtocolType] = (), + parameters: Sequence = (), + flags: int = 0, + ) -> Optional[Union[Dict[str, Any], Tuple]]: + """Executes a prepared MySQL statement. + + Args: + statement_id: Statement ID found in the dictionary returned by + `MySQLConnection.cmd_stmt_prepare` when using a pure Python + connection, or a `_mysql_connector.MySQLPrepStmt` instance + as returned by `CMySQLConnection.cmd_stmt_prepare` when + using a C-ext connection. + data: Data sequence against which the prepared statement will be executed. + parameters: Currently unused! + flags: see [1]. + + Returns: + dictionary or tuple: `OK packet` or `Result` information. + + Notes: + The previous method's signature applies to pure Python, the C-ext has + the following signature: + + ``` + def cmd_stmt_execute( + self, statement_id: CMySQLPrepStmt, *args: Any + ) -> Optional[Union[Dict[str, Any], Tuple]]: + ``` + + You should expect a similar returned value type, however, the input + is different. In this case `data` must be provided as positional + arguments instead of a sequence. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_com_stmt_execute.html + """ + + @abstractmethod + def cmd_stmt_close(self, statement_id: Union[int, CMySQLPrepStmt]) -> None: + """Deallocates a prepared MySQL statement. + + Args: + statement_id: Statement ID found in the dictionary returned by + `MySQLConnection.cmd_stmt_prepare` when using a pure Python + connection, or a `_mysql_connector.MySQLPrepStmt` instance + as returned by `CMySQLConnection.cmd_stmt_prepare` when + using a C-ext connection. + """ + + @abstractmethod + def cmd_stmt_send_long_data( + self, statement_id: Union[int, CMySQLPrepStmt], param_id: int, data: BinaryIO + ) -> int: + """Sends data for a column. + + Currently, not implemented for the C-ext. + + Args: + statement_id: Statement ID found in the dictionary returned by + `MySQLConnection.cmd_stmt_prepare` when using a pure Python + connection, or a `_mysql_connector.MySQLPrepStmt` instance + as returned by `CMySQLConnection.cmd_stmt_prepare` when + using a C-ext connection. + param_id: The parameter to supply data to [1]. + data: The actual payload to send [1]. + + Returns: + total_sent: The total number of bytes that were sent is returned. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_com_stmt_send_long_data.html + """ + + @abstractmethod + def cmd_stmt_reset(self, statement_id: Union[int, CMySQLPrepStmt]) -> None: + """Resets data for prepared statement sent as long data. + + Args: + statement_id: Statement ID found in the dictionary returned by + `MySQLConnection.cmd_stmt_prepare` when using a pure Python + connection, or a `_mysql_connector.MySQLPrepStmt` instance + as returned by `CMySQLConnection.cmd_stmt_prepare` when + using a C-ext connection. + """ + + @abstractmethod + def cmd_reset_connection(self) -> bool: + """Resets the session state without re-authenticating. + + Reset command only works on MySQL server 5.7.3 or later. + + This method permits the session state to be cleared without reauthenticating. + For MySQL servers older than 5.7.3 (when `COM_RESET_CONNECTION` was introduced) + , the `reset_session()` method can be used instead - that method resets the + session state by reauthenticating, which is more expensive. + + This method was added in Connector/Python 1.2.1. + + Returns: + `True` for a successful reset otherwise `False`. + """ + + +class MySQLCursorAbstract(ABC): + """Abstract cursor class + + Abstract class defining cursor class with method and members + required by the Python Database API Specification v2.0. + """ + + def __init__(self) -> None: + """Initialization""" + self._description: Optional[List[DescriptionType]] = None + self._rowcount: int = -1 + self._last_insert_id: Optional[int] = None + self._warnings: Optional[List[WarningType]] = None + self._warning_count: int = 0 + self._executed: Optional[bytes] = None + self._executed_list: List[StrOrBytes] = [] + self._stored_results: List[MySQLCursorAbstract] = [] + self.arraysize: int = 1 + + def __enter__(self) -> MySQLCursorAbstract: + return self + + def __exit__( + self, + exc_type: Type[BaseException], + exc_value: BaseException, + traceback: TracebackType, + ) -> None: + self.close() + + @abstractmethod + def callproc( + self, procname: str, args: Sequence = () + ) -> Optional[Union[Dict[str, RowItemType], RowType]]: + """Calls a stored procedure with the given arguments. + + The arguments will be set during this session, meaning they will be called like + ___arg where is an enumeration (+1) of the arguments. + + Args: + procname: The stored procedure name. + args: Sequence of parameters - it must contain one entry for each argument + that the procedure expects. + + Returns: + Does not return a value, but a result set will be available when the + CALL-statement executes successfully. + + `callproc()` returns a modified copy of the input sequence. `Input` + parameters are left untouched. `Output` and `input/output` parameters may + be replaced with new values. + + Result sets produced by the stored procedure are automatically fetched and + stored as `MySQLCursorBuffered` instances. + + The value returned (if any) is a `Dict` when cursor's subclass is + `MySQLCursorDict`, else a `Tuple` (RowType). + + Raises: + InterfaceError: When something is wrong + + Examples: + 1) Defining the Stored Routine in MySQL: + ``` + CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT) + BEGIN + SET pProd := pFac1 * pFac2; + END; + ``` + + 2) Executing in Python: + ``` + >>> args = (5, 6, 0) # 0 is to hold value of the OUT parameter pProd + >>> cursor.callproc('multiply', args) + ('5', '6', 30L) + ``` + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/\ + connector-python-api-mysqlcursor-callproc.html + """ + + @abstractmethod + def close(self) -> None: + """Close the cursor. + + Use close() when you are done using a cursor. This method closes the cursor, + resets all results, and ensures that the cursor object has no reference to its + original connection object. + + This method is part of PEP 249 - Python Database API Specification v2.0. + """ + + @abstractmethod + def execute( + self, + operation: str, + params: Union[ + Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType] + ] = (), + multi: bool = False, + ) -> Optional[Generator[MySQLCursorAbstract, None, None]]: + """Executes the given operation substituting any markers with the given parameters. + + For example, getting all rows where id is 5: + ``` + >>> cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) + ``` + + The `multi` argument should be set to `True` when executing multiple + statements in one operation. + + If warnings were generated, and `connection.get_warnings` is `True`, then + `self.warnings` will be a list containing these warnings. + + Args: + operation: Operation to be executed. + params: The parameters found in the tuple or dictionary params are bound + to the variables in the operation. Specify variables using `%s` or + `%(name)s` parameter style (that is, using format or pyformat style). + multi: If `multi` is set to `True`, `execute()` is able to execute multiple + statements specified in the operation string. + + Returns: + An iterator when `multi` is `True`, otherwise `None`. + + Raises: + InterfaceError: If `multi` is not set and multiple results are found. + + Examples: + The following example selects and inserts data in a single `execute()` + operation and displays the result of each statement: + + ``` + >>> operation = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2' + >>> for result in cursor.execute(operation, multi=True): + >>> if result.with_rows: + >>> print("Rows produced by statement '{}':".format( + >>> result.statement)) + >>> print(result.fetchall()) + >>> else: + >>> print("Number of rows affected by statement '{}': {}".format( + >>> result.statement, result.rowcount)) + ``` + """ + + @abstractmethod + def executemany( + self, + operation: str, + seq_params: Sequence[ + Union[Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType]] + ], + ) -> Optional[Generator[MySQLCursorAbstract, None, None]]: + """Executes the given operation multiple times. + + The `executemany()` method will execute the operation iterating + over the list of parameters in `seq_params`. + + `INSERT` statements are optimized by batching the data, that is + using the MySQL multiple rows syntax. + + Args: + operation: Operation to be executed. + seq_params: Parameters to be used when executing the operation. + + Returns: + Results are discarded. If they are needed, consider looping over data + using the `execute()` method. + + Examples: + An optimization is applied for inserts: The data values given by the + parameter sequences are batched using multiple-row syntax. The following + example inserts three records: + + ``` + >>> data = [ + >>> ('Jane', date(2005, 2, 12)), + >>> ('Joe', date(2006, 5, 23)), + >>> ('John', date(2010, 10, 3)), + >>> ] + >>> stmt = "INSERT INTO employees (first_name, hire_date) VALUES (%s, %s)" + >>> cursor.executemany(stmt, data) + ``` + + For the preceding example, the INSERT statement sent to MySQL is: + ``` + >>> INSERT INTO employees (first_name, hire_date) + >>> VALUES ('Jane', '2005-02-12'), ('Joe', '2006-05-23'), ('John', '2010-10-03') + ``` + """ + + @abstractmethod + def fetchone(self) -> Optional[Union[RowType, Dict[str, RowItemType]]]: + """Retrieves next row of a query result set + + Returns: + If the cursor's subclass is `MySQLCursorDict`, a dictionaries is + returned, otherwise a tuple (RowType). `None` is returned when there aren't + results to be read. + + Examples: + ``` + >>> cursor.execute("SELECT * FROM employees") + >>> row = cursor.fetchone() + >>> while row is not None: + >>> print(row) + >>> row = cursor.fetchone() + ``` + """ + + @abstractmethod + def fetchmany(self, size: int = 1) -> List[Union[RowType, Dict[str, RowItemType]]]: + """Fetches the next set of rows of a query result. + + Args: + size: The number of rows returned can be specified using the size + argument, which is one by default. + + Returns: + If the cursor's subclass is `MySQLCursorDict`, a list of dictionaries is + returned, otherwise a list of tuples (RowType). When no more rows are + available, it returns an empty list. + """ + + @abstractmethod + def fetchall(self) -> List[Union[RowType, Dict[str, RowItemType]]]: + """Fetches all (or all remaining) rows of a query result set. + + Returns: + If the cursor's subclass is `MySQLCursorDict`, a list of dictionaries is + returned, otherwise a list of tuples (RowType). + + Examples: + ``` + >>> cursor.execute("SELECT * FROM employees ORDER BY emp_no") + >>> head_rows = cursor.fetchmany(size=2) + >>> remaining_rows = cursor.fetchall() + ``` + """ + + @abstractmethod + def stored_results(self) -> Iterator[MySQLCursorAbstract]: + """Returns an iterator (of MySQLCursorAbstract subclass instances) for stored results. + + This method returns an iterator over results which are stored when + callproc() is called. The iterator will provide `MySQLCursorBuffered` + instances. + + Examples: + ``` + >>> cursor.callproc('myproc') + () + >>> for result in cursor.stored_results(): + ... print result.fetchall() + ... + [(1,)] + [(2,)] + ``` + """ + + def nextset(self) -> NoReturn: + """Not Implemented.""" + + def setinputsizes(self, sizes: Any) -> NoReturn: + """Not Implemented.""" + + def setoutputsize(self, size: Any, column: Any = None) -> NoReturn: + """Not Implemented.""" + + def reset(self, free: bool = True) -> None: + """Resets the cursor to default""" + + @property + @abstractmethod + def description(self) -> Optional[List[DescriptionType]]: + """This read-only property returns a list of tuples describing the columns in a + result set. + + A tuple is described as follows:: + ``` + (column_name, + type, + None, + None, + None, + None, + null_ok, + column_flags) # Addition to PEP-249 specs + ``` + + See [1] for more details and examples. + + Returns: + A list of tuples. + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/\ + connector-python-api-mysqlcursor-description.html + """ + return self._description + + @property + @abstractmethod + def rowcount(self) -> int: + """Returns the number of rows produced or affected. + + This property returns the number of rows produced by queries + such as `SELECT`, or affected rows when executing DML statements + like `INSERT` or `UPDATE`. + + Note that for non-buffered cursors it is impossible to know the + number of rows produced before having fetched them all. For those, + the number of rows will be -1 right after execution, and + incremented when fetching rows. + + Returns an integer. + """ + return self._rowcount + + @property + def lastrowid(self) -> Optional[int]: + """Returns the value generated for an AUTO_INCREMENT column. + + Returns the value generated for an AUTO_INCREMENT column by + the previous INSERT or UPDATE statement or `None` when there is + no such a value available. + + Returns a long value or `None`. + """ + return self._last_insert_id + + @property + def warnings(self) -> Optional[List[WarningType]]: + """Returns a list of tuples (WarningType) containing warnings generated + by the previously executed operation. + + Examples: + ``` + >>> cnx.get_warnings = True + >>> cursor.execute("SELECT 'a'+1") + >>> cursor.fetchall() + [(1.0,)] + >>> cursor.warnings + [(u'Warning', 1292, u"Truncated incorrect DOUBLE value: 'a'")] + ``` + """ + return self._warnings + + @property + def warning_count(self) -> int: + """Returns the number of warnings. + + This property returns the number of warnings generated by the + previously executed operation. + + Returns an integer value. + """ + return self._warning_count + + def fetchwarnings(self) -> Optional[List[WarningType]]: + """Returns a list of tuples (WarningType) containing warnings generated by + the previously executed operation. + + Examples: + ``` + >>> cnx.get_warnings = True + >>> cursor.execute("SELECT 'a'+1") + >>> cursor.fetchall() + [(1.0,)] + >>> cursor.fetchwarnings() + [(u'Warning', 1292, u"Truncated incorrect DOUBLE value: 'a'")] + ``` + """ + return self._warnings + + def get_attributes(self) -> Optional[List[Tuple[str, BinaryProtocolType]]]: + """Gets a list of query attributes from the connector's side. + + Returns: + List of existing query attributes. + """ + if hasattr(self, "_cnx"): + return self._cnx.query_attrs + if hasattr(self, "_connection"): + return self._connection.query_attrs + return None + + def add_attribute(self, name: str, value: BinaryProtocolType) -> None: + """Adds a query attribute and its value into the connector's query attributes list. + + Query attributes must be enabled on the server - they are disabled by default. A + warning is logged when setting query attributes for a server connection + that does not support them. + + Args: + name: Key name used to identify the attribute. + value: A value converted to the MySQL Binary Protocol. + + Raises: + ProgrammingError: If the value's conversion fails. + """ + if not isinstance(name, str): + raise ProgrammingError("Parameter `name` must be a string type") + if value is not None and not isinstance(value, MYSQL_PY_TYPES): + raise ProgrammingError( + f"Object {value} cannot be converted to a MySQL type" + ) + if hasattr(self, "_cnx"): + self._cnx.query_attrs_append((name, value)) + elif hasattr(self, "_connection"): + self._connection.query_attrs_append((name, value)) + + def remove_attribute(self, name: str) -> BinaryProtocolType: + """Removes a query attribute by name from the connector's query attributes list. + + If no match, `None` is returned, else the corresponding value is returned. + + Args: + name: Key name used to identify the attribute. + + Returns: + value: Attribute's value. + """ + if not isinstance(name, str): + raise ProgrammingError("Parameter `name` must be a string type") + if hasattr(self, "_cnx"): + return self._cnx.query_attrs_remove(name) + if hasattr(self, "_connection"): + return self._connection.query_attrs_remove(name) + return None + + def clear_attributes(self) -> None: + """Clears the list of query attributes on the connector's side.""" + if hasattr(self, "_cnx"): + self._cnx.query_attrs_clear() + elif hasattr(self, "_connection"): + self._connection.query_attrs_clear() diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__init__.py new file mode 100644 index 0000000..e63924a --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__init__.py @@ -0,0 +1,255 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""MySQL Connector/Python - MySQL driver written in Python.""" + +__all__ = ["CMySQLConnection", "MySQLConnection", "connect"] + +import random + +from typing import Any + +from ..constants import DEFAULT_CONFIGURATION +from ..errors import Error, InterfaceError, ProgrammingError +from ..pooling import ERROR_NO_CEXT +from .abstracts import MySQLConnectionAbstract +from .connection import MySQLConnection + +try: + import dns.exception + import dns.resolver +except ImportError: + HAVE_DNSPYTHON = False +else: + HAVE_DNSPYTHON = True + + +try: + from .connection_cext import CMySQLConnection +except ImportError: + CMySQLConnection = None + + +async def connect(*args: Any, **kwargs: Any) -> MySQLConnectionAbstract: + """Creates or gets a MySQL connection object. + + In its simpliest form, `connect()` will open a connection to a + MySQL server and return a `MySQLConnectionAbstract` subclass + object such as `MySQLConnection` or `CMySQLConnection`. + + When any connection pooling arguments are given, for example `pool_name` + or `pool_size`, a pool is created or a previously one is used to return + a `PooledMySQLConnection`. + + Args: + *args: N/A. + **kwargs: For a complete list of possible arguments, see [1]. If no arguments + are given, it uses the already configured or default values. + + Returns: + A `MySQLConnectionAbstract` subclass instance (such as `MySQLConnection` or + a `CMySQLConnection`) instance. + + Examples: + A connection with the MySQL server can be established using either the + `mysql.connector.connect()` method or a `MySQLConnectionAbstract` subclass: + ``` + >>> from mysql.connector.aio import MySQLConnection, HAVE_CEXT + >>> + >>> cnx1 = await mysql.connector.aio.connect(user='joe', database='test') + >>> cnx2 = MySQLConnection(user='joe', database='test') + >>> await cnx2.connect() + >>> + >>> cnx3 = None + >>> if HAVE_CEXT: + >>> from mysql.connector.aio import CMySQLConnection + >>> cnx3 = CMySQLConnection(user='joe', database='test') + ``` + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/connector-python-connectargs.html + """ + # DNS SRV + dns_srv = kwargs.pop("dns_srv") if "dns_srv" in kwargs else False + + if not isinstance(dns_srv, bool): + raise InterfaceError("The value of 'dns-srv' must be a boolean") + + if dns_srv: + if not HAVE_DNSPYTHON: + raise InterfaceError( + "MySQL host configuration requested DNS " + "SRV. This requires the Python dnspython " + "module. Please refer to documentation" + ) + if "unix_socket" in kwargs: + raise InterfaceError( + "Using Unix domain sockets with DNS SRV lookup is not allowed" + ) + if "port" in kwargs: + raise InterfaceError( + "Specifying a port number with DNS SRV lookup is not allowed" + ) + if "failover" in kwargs: + raise InterfaceError( + "Specifying multiple hostnames with DNS SRV look up is not allowed" + ) + if "host" not in kwargs: + kwargs["host"] = DEFAULT_CONFIGURATION["host"] + + try: + srv_records = dns.resolver.query(kwargs["host"], "SRV") + except dns.exception.DNSException: + raise InterfaceError( + f"Unable to locate any hosts for '{kwargs['host']}'" + ) from None + + failover = [] + for srv in srv_records: + failover.append( + { + "host": srv.target.to_text(omit_final_dot=True), + "port": srv.port, + "priority": srv.priority, + "weight": srv.weight, + } + ) + + failover.sort(key=lambda x: (x["priority"], -x["weight"])) + kwargs["failover"] = [ + {"host": srv["host"], "port": srv["port"]} for srv in failover + ] + + # Failover + if "failover" in kwargs: + return await _get_failover_connection(**kwargs) + + # Use C Extension by default + use_pure = kwargs.get("use_pure", False) + if "use_pure" in kwargs: + del kwargs["use_pure"] # Remove 'use_pure' from kwargs + if not use_pure and CMySQLConnection is None: + raise ImportError(ERROR_NO_CEXT) + + if CMySQLConnection and not use_pure: + cnx = CMySQLConnection(*args, **kwargs) + else: + cnx = MySQLConnection(*args, **kwargs) + await cnx.connect() + return cnx + + +async def _get_failover_connection(**kwargs: Any) -> MySQLConnectionAbstract: + """Return a MySQL connection and try to failover if needed. + + An InterfaceError is raise when no MySQL is available. ValueError is + raised when the failover server configuration contains an illegal + connection argument. Supported arguments are user, password, host, port, + unix_socket and database. ValueError is also raised when the failover + argument was not provided. + + Returns MySQLConnection instance. + """ + config = kwargs.copy() + try: + failover = config["failover"] + except KeyError: + raise ValueError("failover argument not provided") from None + del config["failover"] + + support_cnx_args = set( + [ + "user", + "password", + "host", + "port", + "unix_socket", + "database", + "pool_name", + "pool_size", + "priority", + ] + ) + + # First check if we can add all use the configuration + priority_count = 0 + for server in failover: + diff = set(server.keys()) - support_cnx_args + if diff: + arg = "s" if len(diff) > 1 else "" + lst = ", ".join(diff) + raise ValueError( + f"Unsupported connection argument {arg} in failover: {lst}" + ) + if hasattr(server, "priority"): + priority_count += 1 + + server["priority"] = server.get("priority", 100) + if server["priority"] < 0 or server["priority"] > 100: + raise InterfaceError( + "Priority value should be in the range of 0 to 100, " + f"got : {server['priority']}" + ) + if not isinstance(server["priority"], int): + raise InterfaceError( + "Priority value should be an integer in the range of 0 to " + f"100, got : {server['priority']}" + ) + + if 0 < priority_count < len(failover): + raise ProgrammingError( + "You must either assign no priority to any " + "of the routers or give a priority for " + "every router" + ) + + server_directory = {} + server_priority_list = [] + for server in sorted(failover, key=lambda x: x["priority"], reverse=True): + if server["priority"] not in server_directory: + server_directory[server["priority"]] = [server] + server_priority_list.append(server["priority"]) + else: + server_directory[server["priority"]].append(server) + + for priority in server_priority_list: + failover_list = server_directory[priority] + for _ in range(len(failover_list)): + last = len(failover_list) - 1 + index = random.randint(0, last) + server = failover_list.pop(index) + new_config = config.copy() + new_config.update(server) + new_config.pop("priority", None) + try: + return await connect(**new_config) + except Error: + # If we failed to connect, we try the next server + pass + + raise InterfaceError("Unable to connect to any of the target hosts") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..3d852c9 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/abstracts.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/abstracts.cpython-310.pyc new file mode 100644 index 0000000..a8e4871 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/abstracts.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/authentication.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/authentication.cpython-310.pyc new file mode 100644 index 0000000..8f8d4e3 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/authentication.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/charsets.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/charsets.cpython-310.pyc new file mode 100644 index 0000000..7a7faa8 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/charsets.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/connection.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/connection.cpython-310.pyc new file mode 100644 index 0000000..eacce9f Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/connection.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/cursor.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/cursor.cpython-310.pyc new file mode 100644 index 0000000..d711b9a Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/cursor.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/logger.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/logger.cpython-310.pyc new file mode 100644 index 0000000..22578cb Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/logger.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/network.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/network.cpython-310.pyc new file mode 100644 index 0000000..4867a1b Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/network.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/protocol.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/protocol.cpython-310.pyc new file mode 100644 index 0000000..9ed61b4 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/protocol.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/utils.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/utils.cpython-310.pyc new file mode 100644 index 0000000..fd24cfe Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/__pycache__/utils.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/abstracts.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/abstracts.py new file mode 100644 index 0000000..0bdcd04 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/abstracts.py @@ -0,0 +1,1832 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="assignment,attr-defined" +# pylint: disable=dangerous-default-value + +"""Module gathering all abstract base classes.""" + +from __future__ import annotations + +__all__ = ["MySQLConnectionAbstract", "MySQLCursorAbstract", "ServerInfo"] + +import asyncio +import os +import re +import warnings +import weakref + +from abc import ABC, abstractmethod +from dataclasses import dataclass, field +from inspect import signature +from types import TracebackType +from typing import ( + TYPE_CHECKING, + Any, + AsyncGenerator, + BinaryIO, + Callable, + Dict, + Generator, + Iterator, + List, + Mapping, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from ..abstracts import ( + DUPLICATED_IN_LIST_ERROR, + KRB_SERVICE_PINCIPAL_ERROR, + MYSQL_PY_TYPES, + TLS_V1_3_SUPPORTED, + TLS_VER_NO_SUPPORTED, + TLS_VERSION_DEPRECATED_ERROR, + TLS_VERSION_ERROR, +) +from ..constants import ( + CONN_ATTRS_DN, + DEFAULT_CONFIGURATION, + DEPRECATED_TLS_VERSIONS, + OPENSSL_CS_NAMES, + TLS_CIPHER_SUITES, + TLS_VERSIONS, + ClientFlag, +) +from ..conversion import MySQLConverter, MySQLConverterBase +from ..errors import ( + Error, + InterfaceError, + InternalError, + NotSupportedError, + ProgrammingError, +) +from ..types import ( + BinaryProtocolType, + DescriptionType, + EofPacketType, + HandShakeType, + OkPacketType, + ParamsSequenceType, + ResultType, + RowType, + StatsPacketType, + StrOrBytes, + WarningType, +) +from ..utils import GenericWrapper, import_object +from .authentication import MySQLAuthenticator +from .charsets import Charset, charsets +from .protocol import MySQLProtocol + +if TYPE_CHECKING: + from .network import MySQLTcpSocket, MySQLUnixSocket + + +IS_POSIX = os.name == "posix" +NAMED_TUPLE_CACHE: weakref.WeakValueDictionary[Any, Any] = weakref.WeakValueDictionary() + + +@dataclass +class ServerInfo: + """Stores the server information retrieved on the handshake. + + Also parses and validates the server version, storing it as a tuple. + """ + + protocol: int + version: str + version_tuple: Tuple[int, ...] = field(init=False) + thread_id: int + charset: int + status_flags: int + auth_plugin: str + auth_data: bytes + capabilities: int + query_attrs_is_supported: bool = False + + def __post_init__(self) -> None: + """Parse and validate server version. + + Raises: + InterfaceError: If parsing fails or MySQL version is not supported. + """ + version_re = re.compile(r"^(\d{1,2})\.(\d{1,2})\.(\d{1,3})(.*)") + match = version_re.match(self.version) + if not match: + raise InterfaceError("Failed parsing MySQL version") + + version = tuple(int(v) for v in match.groups()[0:3]) + if version < (4, 1): + raise InterfaceError(f"MySQL Version '{self.version}' is not supported") + self.version_tuple = version + + +class MySQLConnectionAbstract(ABC): + """Defines the MySQL connection interface.""" + + def __init__( + self, + *, + user: Optional[str] = None, + password: str = "", + host: str = "127.0.0.1", + port: int = 3306, + database: Optional[str] = None, + password1: str = "", + password2: str = "", + password3: str = "", + charset: str = "", + collation: str = "", + auth_plugin: Optional[str] = None, + client_flags: Optional[int] = None, + compress: bool = False, + consume_results: bool = False, + autocommit: bool = False, + time_zone: Optional[str] = None, + conn_attrs: Dict[str, str] = {}, + sql_mode: Optional[str] = None, + init_command: Optional[str] = None, + get_warnings: bool = False, + raise_on_warnings: bool = False, + buffered: bool = False, + raw: bool = False, + kerberos_auth_mode: Optional[str] = None, + krb_service_principal: Optional[str] = None, + fido_callback: Optional[Union[str, Callable[[str], None]]] = None, + webauthn_callback: Optional[Union[str, Callable[[str], None]]] = None, + allow_local_infile: bool = DEFAULT_CONFIGURATION["allow_local_infile"], + allow_local_infile_in_path: Optional[str] = DEFAULT_CONFIGURATION[ + "allow_local_infile_in_path" + ], + converter_class: Optional[MySQLConverter] = None, + converter_str_fallback: bool = False, + connection_timeout: int = DEFAULT_CONFIGURATION["connect_timeout"], + unix_socket: Optional[str] = None, + ssl_ca: Optional[str] = None, + ssl_cert: Optional[str] = None, + ssl_key: Optional[str] = None, + ssl_verify_cert: Optional[bool] = False, + ssl_verify_identity: Optional[bool] = False, + ssl_disabled: Optional[bool] = DEFAULT_CONFIGURATION["ssl_disabled"], + tls_versions: Optional[List[str]] = [], + loop: Optional[asyncio.AbstractEventLoop] = None, + ): + self._user: str = user + self._password: str = password + self._host: str = host + self._port: int = port + self._database: str = database + self._password1: str = password1 + self._password2: str = password2 + self._password3: str = password3 + self._unix_socket: str = unix_socket + self._connection_timeout: int = connection_timeout + self._connection_attrs: Dict[str, str] = conn_attrs + self._compress: bool = compress + self._consume_results: bool = consume_results + self._autocommit: bool = autocommit + self._time_zone: Optional[str] = time_zone + self._sql_mode: Optional[str] = sql_mode + self._init_command: Optional[str] = init_command + self._protocol: MySQLProtocol = MySQLProtocol() + self._socket: Optional[Union[MySQLTcpSocket, MySQLUnixSocket]] = None + self._charset: Optional[Charset] = None + self._charset_name: Optional[str] = charset + self._charset_collation: Optional[str] = collation + self._ssl_active: bool = False + self._ssl_disabled: bool = ssl_disabled + self._ssl_ca: Optional[str] = ssl_ca + self._ssl_cert: Optional[str] = ssl_cert + self._ssl_key: Optional[str] = ssl_key + self._ssl_verify_cert: Optional[bool] = ssl_verify_cert + self._ssl_verify_identity: Optional[bool] = ssl_verify_identity + self._tls_versions: Optional[List[str]] = tls_versions + self._tls_ciphersuites: Optional[List[str]] = [] + self._auth_plugin: Optional[str] = auth_plugin + self._auth_plugin_class: Optional[str] = None + self._handshake: Optional[HandShakeType] = None + self._loop: Optional[asyncio.AbstractEventLoop] = ( + loop or asyncio.get_event_loop() + ) + self._client_flags: int = client_flags or ClientFlag.get_default() + self._server_info: ServerInfo + self._cursors: weakref.WeakSet = weakref.WeakSet() + self._query_attrs: Dict[str, BinaryProtocolType] = {} + self._query_attrs_supported: int = False + self._columns_desc: List[DescriptionType] = [] + self._authenticator: MySQLAuthenticator = MySQLAuthenticator() + self._converter_class: Type[MySQLConverter] = converter_class or MySQLConverter + self._converter_str_fallback: bool = converter_str_fallback + self._kerberos_auth_mode: Optional[str] = kerberos_auth_mode + self._krb_service_principal: Optional[str] = krb_service_principal + self._allow_local_infile: bool = allow_local_infile + self._allow_local_infile_in_path: Optional[str] = allow_local_infile_in_path + self._get_warnings: bool = get_warnings + self.raise_on_warnings: bool = raise_on_warnings + self._buffered: bool = buffered + self._raw: bool = raw + self._have_next_result: bool = False + self._unread_result: bool = False + self._use_unicode: bool = True + self._in_transaction: bool = False + self._oci_config_file: Optional[str] = None + self._oci_config_profile: Optional[str] = None + self._fido_callback: Optional[Union[str, Callable[[str], None]]] = fido_callback + self._webauthn_callback: Optional[ + Union[str, Callable[[str], None]] + ] = webauthn_callback + + self.converter: Optional[MySQLConverter] = None + + self._validate_connection_options() + + async def __aenter__(self) -> MySQLConnectionAbstract: + if not self.is_socket_connected(): + await self.connect() + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + def _validate_connection_options(self) -> None: + """Validate connection options.""" + if self._user: + try: + self._user = self._user.strip() + except AttributeError as err: + raise AttributeError("'user' must be a string") from err + + if self._compress: + self.set_client_flags([ClientFlag.COMPRESS]) + + if self._allow_local_infile_in_path: + infile_in_path = os.path.abspath(self._allow_local_infile_in_path) + if ( + infile_in_path + and os.path.exists(infile_in_path) + and not os.path.isdir(infile_in_path) + or os.path.islink(infile_in_path) + ): + raise AttributeError("allow_local_infile_in_path must be a directory") + if self._allow_local_infile or self._allow_local_infile_in_path: + self.set_client_flags([ClientFlag.LOCAL_FILES]) + else: + self.set_client_flags([-ClientFlag.LOCAL_FILES]) + + # Disallow the usage of some default authentication plugins + if self._auth_plugin == "authentication_webauthn_client": + raise InterfaceError( + f"'{self._auth_plugin}' cannot be used as the default authentication " + "plugin" + ) + + # Disable SSL for unix socket connections + if self._unix_socket and os.name == "posix": + self._ssl_disabled = True + + if self._ssl_disabled and self._auth_plugin == "mysql_clear_password": + raise InterfaceError( + "Clear password authentication is not supported over insecure " + " channels" + ) + + if not isinstance(self._port, int): + raise InterfaceError("TCP/IP port number should be an integer") + + if any([self._ssl_ca, self._ssl_cert, self._ssl_key]): + # Make sure both ssl_key/ssl_cert are set, or neither (XOR) + if not all([self._ssl_key, self._ssl_cert]): + raise AttributeError( + "ssl_key and ssl_cert need to be both specified, or neither" + ) + + if (self._ssl_key is None) != (self._ssl_cert is None): + raise AttributeError( + "ssl_key and ssl_cert need to be both set, or neither" + ) + if self._tls_versions: + self._validate_tls_versions() + + if self._tls_ciphersuites: + self._validate_tls_ciphersuites() + + if not isinstance(self._connection_attrs, dict): + raise InterfaceError("conn_attrs must be of type dict") + + for attr_name, attr_value in self._connection_attrs.items(): + if attr_name in CONN_ATTRS_DN: + continue + # Validate name type + if not isinstance(attr_name, str): + raise InterfaceError( + "Attribute name should be a string, found: " + f"'{attr_name}' in '{self._connection_attrs}'" + ) + # Validate attribute name limit 32 characters + if len(attr_name) > 32: + raise InterfaceError( + f"Attribute name '{attr_name}' exceeds 32 characters limit size" + ) + # Validate names in connection attributes cannot start with "_" + if attr_name.startswith("_"): + raise InterfaceError( + "Key names in connection attributes cannot start with " + "'_', found: '{attr_name}'" + ) + # Validate value type + if not isinstance(attr_value, str): + raise InterfaceError( + f"Attribute '{attr_name}' value: '{attr_value}' must " + "be a string type" + ) + # Validate attribute value limit 1024 characters + if len(attr_value) > 1024: + raise InterfaceError( + f"Attribute '{attr_name}' value: '{attr_value}' " + "exceeds 1024 characters limit size" + ) + + if self._client_flags & ClientFlag.CONNECT_ARGS: + self._add_default_conn_attrs() + + if self._kerberos_auth_mode: + if not isinstance(self._kerberos_auth_mode, str): + raise InterfaceError("'kerberos_auth_mode' must be of type str") + kerberos_auth_mode = self._kerberos_auth_mode.lower() + if kerberos_auth_mode == "sspi": + if os.name != "nt": + raise InterfaceError( + "'kerberos_auth_mode=SSPI' is only available on Windows" + ) + self._auth_plugin_class = "MySQLSSPIKerberosAuthPlugin" + elif kerberos_auth_mode == "gssapi": + self._auth_plugin_class = "MySQLKerberosAuthPlugin" + else: + raise InterfaceError( + "Invalid 'kerberos_auth_mode' mode. Please use 'SSPI' or 'GSSAPI'" + ) + + if self._krb_service_principal: + if not isinstance(self._krb_service_principal, str): + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format(error="is not a string") + ) + if self._krb_service_principal == "": + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format( + error="can not be an empty string" + ) + ) + if "/" not in self._krb_service_principal: + raise InterfaceError( + KRB_SERVICE_PINCIPAL_ERROR.format(error="is incorrectly formatted") + ) + + if self._fido_callback: + warn_msg = ( + "The `fido_callback` connection argument is deprecated and it will be " + "removed in a future release of MySQL Connector/Python. " + "Use `webauth_callback` instead" + ) + warnings.warn(warn_msg, DeprecationWarning) + self._validate_callable("fido_callback", self._fido_callback, 1) + + if self._webauthn_callback: + self._validate_callable("webauth_callback", self._webauthn_callback, 1) + + def _validate_tls_ciphersuites(self) -> None: + """Validates the tls_ciphersuites option.""" + tls_ciphersuites = [] + tls_cs = self._tls_ciphersuites + + if isinstance(tls_cs, str): + if not (tls_cs.startswith("[") and tls_cs.endswith("]")): + raise AttributeError( + f"tls_ciphersuites must be a list, found: '{tls_cs}'" + ) + tls_css = tls_cs[1:-1].split(",") + if not tls_css: + raise AttributeError( + "No valid cipher suite found in 'tls_ciphersuites' list" + ) + for _tls_cs in tls_css: + _tls_cs = tls_cs.strip().upper() + if _tls_cs: + tls_ciphersuites.append(_tls_cs) + + elif isinstance(tls_cs, (list, set)): + tls_ciphersuites = [tls_cs for tls_cs in tls_cs if tls_cs] + else: + raise AttributeError( + "tls_ciphersuites should be a list with one or more " + f"ciphersuites. Found: '{tls_cs}'" + ) + + tls_versions = ( + TLS_VERSIONS[:] if self._tls_versions is None else self._tls_versions[:] + ) + + # A newer TLS version can use a cipher introduced on + # an older version. + tls_versions.sort(reverse=True) + newer_tls_ver = tls_versions[0] + # translated_names[0] belongs to TLSv1, TLSv1.1 and TLSv1.2 + # translated_names[1] are TLSv1.3 only + translated_names: List[List[str]] = [[], []] + iani_cipher_suites_names = {} + ossl_cipher_suites_names: List[str] = [] + + # Old ciphers can work with new TLS versions. + # Find all the ciphers introduced on previous TLS versions. + for tls_ver in TLS_VERSIONS[: TLS_VERSIONS.index(newer_tls_ver) + 1]: + iani_cipher_suites_names.update(TLS_CIPHER_SUITES[tls_ver]) + ossl_cipher_suites_names.extend(OPENSSL_CS_NAMES[tls_ver]) + + for name in tls_ciphersuites: + if "-" in name and name in ossl_cipher_suites_names: + if name in OPENSSL_CS_NAMES["TLSv1.3"]: + translated_names[1].append(name) + else: + translated_names[0].append(name) + elif name in iani_cipher_suites_names: + translated_name = iani_cipher_suites_names[name] + if translated_name in translated_names: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_ciphersuites", value=translated_name + ) + ) + if name in TLS_CIPHER_SUITES["TLSv1.3"]: + translated_names[1].append(iani_cipher_suites_names[name]) + else: + translated_names[0].append(iani_cipher_suites_names[name]) + else: + raise AttributeError( + f"The value '{name}' in tls_ciphersuites is not a valid " + "cipher suite" + ) + if not translated_names[0] and not translated_names[1]: + raise AttributeError( + "No valid cipher suite found in the 'tls_ciphersuites' list" + ) + + self._tls_ciphersuites = [ + ":".join(translated_names[0]), + ":".join(translated_names[1]), + ] + + def _validate_tls_versions(self) -> None: + """Validates the tls_versions option.""" + tls_versions = [] + tls_version = self._tls_versions + + if isinstance(tls_version, str): + if not (tls_version.startswith("[") and tls_version.endswith("]")): + raise AttributeError( + f"tls_versions must be a list, found: '{tls_version}'" + ) + tls_vers = tls_version[1:-1].split(",") + for tls_ver in tls_vers: + tls_version = tls_ver.strip() + if tls_version == "": + continue + if tls_version in tls_versions: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_versions", value=tls_version + ) + ) + tls_versions.append(tls_version) + if tls_vers == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED: + raise AttributeError( + TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS) + ) + elif isinstance(tls_version, list): + if not tls_version: + raise AttributeError( + "At least one TLS protocol version must be specified in " + "'tls_versions' list" + ) + for tls_ver in tls_version: + if tls_ver in tls_versions: + raise AttributeError( + DUPLICATED_IN_LIST_ERROR.format( + list="tls_versions", value=tls_ver + ) + ) + tls_versions.append(tls_ver) + elif isinstance(tls_version, set): + for tls_ver in tls_version: + tls_versions.append(tls_ver) + else: + raise AttributeError( + "tls_versions should be a list with one or more of versions " + f"in {', '.join(TLS_VERSIONS)}. found: '{tls_versions}'" + ) + + if not tls_versions: + raise AttributeError( + "At least one TLS protocol version must be specified " + "in 'tls_versions' list when this option is given" + ) + + use_tls_versions = [] + deprecated_tls_versions = [] + invalid_tls_versions = [] + for tls_ver in tls_versions: + if tls_ver in TLS_VERSIONS: + use_tls_versions.append(tls_ver) + if tls_ver in DEPRECATED_TLS_VERSIONS: + deprecated_tls_versions.append(tls_ver) + else: + invalid_tls_versions.append(tls_ver) + + if use_tls_versions: + if use_tls_versions == ["TLSv1.3"] and not TLS_V1_3_SUPPORTED: + raise NotSupportedError( + TLS_VER_NO_SUPPORTED.format(tls_version, TLS_VERSIONS) + ) + use_tls_versions.sort() + self._tls_versions = use_tls_versions + elif deprecated_tls_versions: + raise NotSupportedError( + TLS_VERSION_DEPRECATED_ERROR.format( + deprecated_tls_versions, TLS_VERSIONS + ) + ) + elif invalid_tls_versions: + raise AttributeError(TLS_VERSION_ERROR.format(tls_ver, TLS_VERSIONS)) + + @staticmethod + def _validate_callable( + option_name: str, callback: Union[str, Callable], num_args: int = 0 + ) -> None: + """Validates if it's a Python callable. + + Args: + option_name (str): Connection option name. + callback (str or callable): The fully qualified path to the callable or + a callable. + num_args (int): Number of positional arguments allowed. + + Raises: + ProgrammingError: If `callback` is not valid or wrong number of positional + arguments. + + .. versionadded:: 8.2.0 + """ + if isinstance(callback, str): + try: + callback = import_object(callback) + except ValueError as err: + raise ProgrammingError(f"{err}") from err + + if not callable(callback): + raise ProgrammingError(f"Expected a callable for '{option_name}'") + + # Check if the callable signature has positional arguments + num_params = len(signature(callback).parameters) + if num_params != num_args: + raise ProgrammingError( + f"'{option_name}' requires {num_args} positional argument, but the " + f"callback provided has {num_params}" + ) + + @property + @abstractmethod + def connection_id(self) -> Optional[int]: + """MySQL connection ID.""" + + @property + def user(self) -> str: + """User used while connecting to MySQL.""" + return self._user + + @property + def server_host(self) -> str: + """MySQL server IP address or name.""" + return self._host + + @property + def server_port(self) -> int: + "MySQL server TCP/IP port." + return self._port + + @property + def unix_socket(self) -> Optional[str]: + "MySQL Unix socket file location." + return self._unix_socket + + @property + def database(self) -> str: + """Get the current database.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await get_database()` to get the database instead" + ) + + @database.setter + def database(self, value: str) -> None: + """Set the current database.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await set_database(name)` to set the database instead" + ) + + async def get_database(self) -> str: + """Get the current database.""" + result = await self.info_query("SELECT DATABASE()") + return result[0] # type: ignore[return-value] + + async def set_database(self, value: str) -> None: + """Set the current database.""" + await self.cmd_query(f"USE {value}") + + @property + def can_consume_results(self) -> bool: + """Returns whether to consume results""" + return self._consume_results + + @can_consume_results.setter + def can_consume_results(self, value: bool) -> None: + """Set if can consume results.""" + assert isinstance(value, bool) + self._consume_results = value + + @property + def in_transaction(self) -> bool: + """MySQL session has started a transaction.""" + return self._in_transaction + + @property + def loop(self) -> asyncio.AbstractEventLoop: + """Return the event loop.""" + return self._loop + + @property + def is_secure(self) -> bool: + """Return True if is a secure connection.""" + return self._ssl_active or (self._unix_socket is not None and IS_POSIX) + + @property + def query_attrs(self) -> List[Tuple[str, BinaryProtocolType]]: + """Returns query attributes list.""" + return list(self._query_attrs.items()) + + @property + def have_next_result(self) -> bool: + """Return if have next result.""" + return self._have_next_result + + @property + def autocommit(self) -> bool: + """Get whether autocommit is on or off.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await get_autocommit()` to get the autocommit instead" + ) + + @autocommit.setter + def autocommit(self, value: bool) -> None: + """Toggle autocommit.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await set_autocommit(value)` to set the autocommit instead" + ) + + async def get_autocommit(self) -> bool: + """Get whether autocommit is on or off.""" + value = await self.info_query("SELECT @@session.autocommit") + return value[0] == 1 + + async def set_autocommit(self, value: bool) -> None: + """Toggle autocommit.""" + switch = "ON" if value else "OFF" + await self.cmd_query(f"SET @@session.autocommit = {switch}") + self._autocommit = value + + @property + def time_zone(self) -> str: + """Gets the current time zone.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await get_time_zone()` to get the time zone instead" + ) + + @time_zone.setter + def time_zone(self, value: str) -> None: + """Sets the time zone.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await get_autocommit(value)` to get the autocommit instead" + ) + + async def get_time_zone(self) -> str: + """Gets the current time zone.""" + value = await self.info_query("SELECT @@session.time_zone") + return value[0] # type: ignore[return-value] + + async def set_time_zone(self, value: str) -> None: + """Sets the time zone.""" + await self.cmd_query(f"SET @@session.time_zone = '{value}'") + self._time_zone = value + + @property + async def sql_mode(self) -> str: + """Gets the SQL mode.""" + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await get_sql_mode()` to get the SQL mode instead" + ) + + @sql_mode.setter + async def sql_mode(self, value: Union[str, Sequence[int]]) -> None: + """Sets the SQL mode. + + This method sets the SQL Mode for the current connection. The value + argument can be either a string with comma separate mode names, or + a sequence of mode names. + + It is good practice to use the constants class `SQLMode`: + ``` + >>> from mysql.connector.constants import SQLMode + >>> cnx.sql_mode = [SQLMode.NO_ZERO_DATE, SQLMode.REAL_AS_FLOAT] + ``` + """ + raise ProgrammingError( + "The use of async properties are not supported by Python. " + "Use `await set_sql_mode(value)` to set the SQL mode instead" + ) + + async def get_sql_mode(self) -> str: + """Gets the SQL mode.""" + if self._sql_mode is None: + self._sql_mode = (await self.info_query("SELECT @@session.sql_mode"))[0] + return self._sql_mode + + async def set_sql_mode(self, value: Union[str, Sequence[int]]) -> None: + """Sets the SQL mode. + + This method sets the SQL Mode for the current connection. The value + argument can be either a string with comma separate mode names, or + a sequence of mode names. + + It is good practice to use the constants class `SQLMode`: + ``` + >>> from mysql.connector.constants import SQLMode + >>> cnx.sql_mode = [SQLMode.NO_ZERO_DATE, SQLMode.REAL_AS_FLOAT] + ``` + """ + if isinstance(value, (list, tuple)): + value = ",".join(value) + await self.cmd_query(f"SET @@session.sql_mode = '{value}'") + self._sql_mode = value + + @property + def get_warnings(self) -> bool: + """Get whether this connection retrieves warnings automatically. + + This method returns whether this connection retrieves warnings automatically. + """ + return self._get_warnings + + @get_warnings.setter + def get_warnings(self, value: bool) -> None: + """Set whether warnings should be automatically retrieved. + + The toggle-argument must be a boolean. When True, cursors for this connection + will retrieve information about warnings (if any). + + Raises: + ValueError: When the value is not a bool type. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._get_warnings = value + + @property + def raise_on_warnings(self) -> bool: + """Get whether this connection raises an error on warnings. + + This method returns whether this connection will raise errors when MySQL + reports warnings. + """ + return self._raise_on_warnings + + @raise_on_warnings.setter + def raise_on_warnings(self, value: bool) -> None: + """Set whether warnings raise an error. + + The toggle-argument must be a boolean. When True, cursors for this connection + will raise an error when MySQL reports warnings. + + Raising on warnings implies retrieving warnings automatically. + In other words: warnings will be set to True. If set to False, warnings will + be also set to False. + + Raises: + ValueError: When the value is not a bool type. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._raise_on_warnings = value + # Don't disable warning retrieval if raising explicitly disabled + if value: + self._get_warnings = value + + @property + def unread_result(self) -> bool: + """Get whether there is an unread result. + + This method is used by cursors to check whether another cursor still needs to + retrieve its result set. + """ + return self._unread_result + + @unread_result.setter + def unread_result(self, value: bool) -> None: + """Set whether there is an unread result. + + This method is used by cursors to let other cursors know there is still a + result set that needs to be retrieved. + + Raises: + ValueError: When the value is not a bool type. + """ + if not isinstance(value, bool): + raise ValueError("Expected a boolean type") + self._unread_result = value + + @property + def charset(self) -> str: + """Return the character set for current connection. + + This property returns the character set name of the current connection. + The server is queried when the connection is active. + If not connected, the configured character set name is returned. + """ + return self._charset.name if self._charset else "utf8" + + @property + def python_charset(self) -> str: + """Return the Python character set for current connection. + + This property returns the character set name of the current connection. + Note that, unlike property charset, this checks if the previously set + character set is supported by Python and if not, it returns the equivalent + character set that Python supports. + """ + if self._charset is None or self._charset.name in ( + "utf8mb4", + "utf8mb3", + "binary", + ): + return "utf8" + return self._charset.name + + @abstractmethod + def _add_default_conn_attrs(self) -> None: + """Add the default connection attributes.""" + + @abstractmethod + async def _execute_query(self, query: str) -> ResultType: + """Execute a query. + + This method simply calls cmd_query() after checking for unread result. If there + are still unread result, an InterfaceError is raised. Otherwise whatever + cmd_query() returns is returned. + """ + + async def _post_connection(self) -> None: + """Executes commands after connection has been established. + + This method executes commands after the connection has been established. + Some setting like autocommit, character set, and SQL mode are set using this + method. + """ + await self.set_charset_collation(self._charset.charset_id) + await self.set_autocommit(self._autocommit) + if self._time_zone: + await self.set_time_zone(self._time_zone) + if self._sql_mode: + await self.set_sql_mode(self._sql_mode) + if self._init_command: + await self._execute_query(self._init_command) + + async def set_charset_collation( + self, charset: Optional[Union[int, str]] = None, collation: Optional[str] = None + ) -> None: + """Set the character set and collation for the current connection. + + This method sets the character set and collation to be used for the current + connection. The charset argument can be either the name of a character set as + a string, or the numerical equivalent as defined in constants.CharacterSet. + + When the collation is not given, the default will be looked up and used. + + Args: + charset: Can be either the name of a character set, or the numerical + equivalent as defined in `constants.CharacterSet`. + collation: When collation is `None`, the default collation for the + character set is used. + + Examples: + The following will set the collation for the latin1 character set to + `latin1_general_ci`: + ``` + >>> cnx = mysql.connector.connect(user='scott') + >>> cnx.set_charset_collation('latin1', 'latin1_general_ci') + ``` + """ + err_msg = "{} should be either integer, string or None" + if not isinstance(charset, (int, str)) and charset is not None: + raise ValueError(err_msg.format("charset")) + if not isinstance(collation, str) and collation is not None: + raise ValueError("collation should be either string or None") + + if charset and collation: + charset_str: str = ( + charsets.get_by_id(charset).name + if isinstance(charset, int) + else charset + ) + self._charset = charsets.get_by_name_and_collation(charset_str, collation) + elif charset: + if isinstance(charset, int): + self._charset = charsets.get_by_id(charset) + elif isinstance(charset, str): + self._charset = charsets.get_by_name(charset) + else: + raise ValueError(err_msg.format("charset")) + elif collation: + self._charset = charsets.get_by_collation(collation) + else: + charset = DEFAULT_CONFIGURATION["charset"] + self._charset = charsets.get_by_name(charset) # type: ignore[arg-type] + + self._charset_name = self._charset.name + self._charset_collation = self._charset.collation + + await self.cmd_query( + f"SET NAMES '{self._charset.name}' COLLATE '{self._charset.collation}'" + ) + try: + # Required for C Extension + self.set_character_set_name(self._charset.name) + except AttributeError: + # Not required for pure Python connection + pass + + if self.converter: + self.converter.set_charset(self._charset.name) + + def isset_client_flag(self, flag: int) -> bool: + """Checks if a client flag is set. + + Returns: + `True` if the client flag was set, `False` otherwise. + """ + return (self._client_flags & flag) > 0 + + def set_allow_local_infile_in_path(self, path: str) -> None: + """Set the path that user can upload files. + + Args: + path (str): Path that user can upload files. + """ + + self._allow_local_infile_in_path = path + + def get_self(self) -> MySQLConnectionAbstract: + """Return self for weakref.proxy. + + This method is used when the original object is needed when using + weakref.proxy. + """ + return self + + def get_server_version(self) -> Optional[Tuple[int, ...]]: + """Gets the MySQL version. + + Returns: + The MySQL server version as a tuple. If not previously connected, it will + return `None`. + """ + return self._server_info.version # type: ignore[return-value] + + def get_server_info(self) -> Optional[str]: + """Gets the original MySQL version information. + + Returns: + The original MySQL server as text. If not previously connected, it will + return `None`. + """ + try: + return self._handshake["server_version_original"] # type: ignore[return-value] + except (TypeError, KeyError): + return None + + @abstractmethod + def is_socket_connected(self) -> bool: + """Reports whether the socket is connected. + + Instead of ping the server like ``is_connected()``, it only checks if the + socket connection flag is set. + """ + + @abstractmethod + async def is_connected(self) -> bool: + """Reports whether the connection to MySQL Server is available. + + This method checks whether the connection to MySQL is available. + It is similar to ``ping()``, but unlike the ``ping()`` method, either `True` + or `False` is returned and no exception is raised. + """ + + @abstractmethod + async def ping( + self, reconnect: bool = False, attempts: int = 1, delay: int = 0 + ) -> bool: + """Check availability of the MySQL server. + + When reconnect is set to `True`, one or more attempts are made to try to + reconnect to the MySQL server using the ``reconnect()`` method. + + ``delay`` is the number of seconds to wait between each retry. + + When the connection is not available, an InterfaceError is raised. Use the + ``is_connected()`` method if you just want to check the connection without + raising an error. + + Raises: + InterfaceError: On errors. + """ + + def set_client_flags(self, flags: Union[int, Sequence[int]]) -> int: + """Set the client flags. + + The flags-argument can be either an int or a list (or tuple) of ClientFlag + values. If it is an integer, it will set client_flags to flags as is. + If flags is a list or tuple, each flag will be set or unset when it's negative. + + set_client_flags([ClientFlag.FOUND_ROWS,-ClientFlag.LONG_FLAG]) + + Raises: + ProgrammingError: When the flags argument is not a set or an integer bigger + than 0. + """ + if isinstance(flags, int) and flags > 0: + self._client_flags = flags + elif isinstance(flags, (tuple, list)): + for flag in flags: + if flag < 0: + self._client_flags &= ~abs(flag) + else: + self._client_flags |= flag + else: + raise ProgrammingError("set_client_flags expect integer (>0) or set") + return self._client_flags + + def set_converter_class(self, convclass: Optional[Type[MySQLConverter]]) -> None: + """Set the converter class to be used. + + This should be a class overloading methods and members of + conversion.MySQLConverter. + + Raises: + TypeError: When the class is not a subclass of `conversion.MySQLConverter`. + """ + if convclass and issubclass(convclass, MySQLConverterBase): + self._converter_class = convclass + self.converter = convclass(self._charset.name, self._use_unicode) + self.converter.str_fallback = self._converter_str_fallback + else: + raise TypeError( + "Converter class should be a subclass of conversion.MySQLConverter" + ) + + def query_attrs_append(self, value: Tuple[str, BinaryProtocolType]) -> None: + """Add element to the query attributes list on the connector's side. + + If an element in the query attributes list already matches + the attribute name provided, the new element will NOT be added. + + Args: + value: key-value as a 2-tuple. + """ + attr_name, attr_value = value + if attr_name not in self._query_attrs: + self._query_attrs[attr_name] = attr_value + + def query_attrs_remove(self, name: str) -> BinaryProtocolType: + """Remove element by name from the query attributes list. + + If no match, `None` is returned, else the corresponding value is returned. + + Args: + name: key name. + """ + return self._query_attrs.pop(name, None) + + def query_attrs_clear(self) -> None: + """Clears query attributes list on the connector's side.""" + self._query_attrs = {} + + async def handle_unread_result(self) -> None: + """Handle unread result. + + Consume pending results if is configured for it. + + Raises: + InternalError: When there are pending results and they were not consumed. + """ + if self._consume_results: + await self.consume_results() + elif self.unread_result: + raise InternalError("Unread result found") + + async def consume_results(self) -> None: + """Consume pending results.""" + if self.unread_result: + await self.get_rows() + + async def info_query(self, query: StrOrBytes) -> Optional[RowType]: + """Send a query which only returns 1 row.""" + async with await self.cursor(buffered=True) as cursor: + await cursor.execute(query) + return await cursor.fetchone() + + def add_cursor(self, cursor: MySQLCursorAbstract) -> None: + """Add cursor to the weakref set.""" + self._cursors.add(cursor) + + def remove_cursor(self, cursor: MySQLCursorAbstract) -> None: + """Remove cursor from the weakref set.""" + self._cursors.remove(cursor) + + @abstractmethod + async def connect(self) -> None: + """Connect to the MySQL server.""" + + async def reconnect(self, attempts: int = 1, delay: int = 0) -> None: + """Attempts to reconnect to the MySQL server. + + The argument `attempts` should be the number of times a reconnect is tried. + The `delay` argument is the number of seconds to wait between each retry. + + You may want to set the number of attempts higher and use delay when you expect + the MySQL server to be down for maintenance or when you expect the network to + be temporary unavailable. + + Args: + attempts: Number of attempts to make when reconnecting. + delay: Use it (defined in seconds) if you want to wait between each retry. + + Raises: + InterfaceError: When reconnection fails. + """ + counter = 0 + while counter != attempts: + counter = counter + 1 + try: + await self.disconnect() + await self.connect() + if await self.is_connected(): + break + except (Error, IOError) as err: + if counter == attempts: + msg = ( + f"Can not reconnect to MySQL after {attempts} " + f"attempt(s): {err}" + ) + raise InterfaceError(msg) from err + if delay > 0: + await asyncio.sleep(delay) + + async def shutdown(self) -> NoReturn: + """Shuts down connection to MySQL Server. + + This method closes the socket. It raises no exceptions. + + Unlike `disconnect()`, `shutdown()` closes the client connection without + attempting to send a `QUIT` command to the server first. Thus, it will not + block if the connection is disrupted for some reason such as network failure. + """ + raise NotImplementedError + + @abstractmethod + async def close(self) -> None: + """Close the connection. + + It closes any opened cursor associated to this connection, and closes the + underling socket connection. + + `MySQLConnection.close()` is a synonymous for `MySQLConnection.disconnect()` + method name and more commonly used. + + This method tries to send a `QUIT` command and close the socket. It raises + no exceptions. + """ + + disconnect: Callable[[], Any] = close + + @abstractmethod + async def cursor( + self, + buffered: Optional[bool] = None, + raw: Optional[bool] = None, + prepared: Optional[bool] = None, + cursor_class: Optional[Type[MySQLCursorAbstract]] = None, + dictionary: Optional[bool] = None, + named_tuple: Optional[bool] = None, + ) -> MySQLCursorAbstract: + """Instantiate and return a cursor. + + By default, MySQLCursor is returned. Depending on the options while + connecting, a buffered and/or raw cursor is instantiated instead. + Also depending upon the cursor options, rows can be returned as dictionary or + named tuple. + + It is possible to also give a custom cursor through the cursor_class + parameter, but it needs to be a subclass of + mysql.connector.aio.abstracts.MySQLCursorAbstract. + + Raises: + ProgrammingError: When cursor_class is not a subclass of + CursorBase. + ValueError: When cursor is not available. + """ + + @abstractmethod + async def get_row( + self, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + ) -> Tuple[Optional[RowType], Optional[EofPacketType]]: + """Get the next rows returned by the MySQL server. + + This method gets one row from the result set after sending, for example, the + query command. The result is a tuple consisting of the row and the EOF packet. + If no row was available in the result set, the row data will be None. + """ + + @abstractmethod + async def get_rows( + self, + count: Optional[int] = None, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Any = None, + ) -> Tuple[List[RowType], Optional[EofPacketType]]: + """Get all rows returned by the MySQL server. + + This method gets all rows returned by the MySQL server after sending, for + example, the query command. The result is a tuple consisting of a list of rows + and the EOF packet. + """ + + @abstractmethod + async def commit(self) -> None: + """Commit current transaction.""" + + @abstractmethod + async def rollback(self) -> None: + """Rollback current transaction.""" + + @abstractmethod + async def cmd_reset_connection(self) -> bool: + """Resets the session state without re-authenticating. + + Reset command only works on MySQL server 5.7.3 or later. + The result is True for a successful reset otherwise False. + """ + + @abstractmethod + async def cmd_init_db(self, database: str) -> OkPacketType: + """Change the current database. + + This method changes the current (default) database by sending the INIT_DB + command. The result is a dictionary containing the OK packet infawaitormation. + """ + + @abstractmethod + async def cmd_query( + self, + query: StrOrBytes, + raw: bool = False, + buffered: bool = False, + raw_as_string: bool = False, + ) -> ResultType: + """Send a query to the MySQL server. + + This method send the query to the MySQL server and returns the result. + + If there was a text result, a tuple will be returned consisting of the number + of columns and a list containing information about these columns. + + When the query doesn't return a text result, the OK or EOF packet information + as dictionary will be returned. In case the result was an error, exception + Error will be raised. + """ + + async def cmd_query_iter( + self, statements: StrOrBytes + ) -> Generator[ResultType, None, None]: + """Send one or more statements to the MySQL server. + + Similar to the cmd_query method, but instead returns a generator + object to iterate through results. It sends the statements to the + MySQL server and through the iterator you can get the results. + + statement = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2' + for result in await cnx.cmd_query(statement, iterate=True): + if 'columns' in result: + columns = result['columns'] + rows = await cnx.get_rows() + else: + # do something useful with INSERT result + """ + + @abstractmethod + async def cmd_stmt_fetch(self, statement_id: int, rows: int = 1) -> None: + """Fetch a MySQL statement Result Set. + + This method will send the FETCH command to MySQL together with the given + statement id and the number of rows to fetch. + """ + + @abstractmethod + async def cmd_stmt_prepare( + self, statement: bytes + ) -> Mapping[str, Union[int, List[DescriptionType]]]: + """Prepare a MySQL statement. + + This method will send the PREPARE command to MySQL together with the given + statement. + """ + + @abstractmethod + async def cmd_stmt_execute( + self, + statement_id: Union[int, CMySQLPrepStmt], + data: Sequence[BinaryProtocolType] = (), + parameters: Sequence = (), + flags: int = 0, + ) -> Optional[Union[Dict[str, Any], Tuple]]: + """Execute a prepared MySQL statement.""" + + @abstractmethod + async def cmd_stmt_reset(self, statement_id: int) -> None: + """Reset data for prepared statement sent as long data. + + The result is a dictionary with OK packet information. + """ + + @abstractmethod + async def cmd_stmt_close(self, statement_id: int) -> None: + """Deallocate a prepared MySQL statement. + + This method deallocates the prepared statement using the statement_id. + Note that the MySQL server does not return anything. + """ + + @abstractmethod + async def cmd_refresh(self, options: int) -> OkPacketType: + """Send the Refresh command to the MySQL server. + + This method sends the Refresh command to the MySQL server. The options + argument should be a bitwise value using constants.RefreshOption. + + Usage example: + RefreshOption = mysql.connector.RefreshOption + refresh = RefreshOption.LOG | RefreshOption.INFO + cnx.cmd_refresh(refresh) + """ + + async def cmd_stmt_send_long_data( + self, statement_id: int, param_id: int, data: BinaryIO + ) -> int: + """Send data for a column. + + This methods send data for a column (for example BLOB) for statement identified + by statement_id. The param_id indicate which parameter the data belongs too. + The data argument should be a file-like object. + + Since MySQL does not send anything back, no error is raised. When the MySQL + server is not reachable, an OperationalError is raised. + + cmd_stmt_send_long_data should be called before cmd_stmt_execute. + + The total bytes send is returned. + """ + + @abstractmethod + async def cmd_quit(self) -> bytes: + """Close the current connection with the server. + + Send the QUIT command to the MySQL server, closing the current connection. + """ + + @abstractmethod + async def cmd_shutdown(self, shutdown_type: Optional[int] = None) -> None: + """Shut down the MySQL Server. + + This method sends the SHUTDOWN command to the MySQL server. + The `shutdown_type` is not used, and it's kept for backward compatibility. + """ + + @abstractmethod + async def cmd_statistics(self) -> StatsPacketType: + """Send the statistics command to the MySQL Server. + + This method sends the STATISTICS command to the MySQL server. The result is a + dictionary with various statistical information. + """ + + @abstractmethod + async def cmd_process_kill(self, mysql_pid: int) -> OkPacketType: + """Kill a MySQL process. + + This method send the PROCESS_KILL command to the server along with the + process ID. The result is a dictionary with the OK packet information. + """ + + @abstractmethod + async def cmd_debug(self) -> EofPacketType: + """Send the DEBUG command. + + This method sends the DEBUG command to the MySQL server, which requires the + MySQL user to have SUPER privilege. The output will go to the MySQL server + error log and the result of this method is a dictionary with EOF packet + information. + """ + + @abstractmethod + async def cmd_ping(self) -> OkPacketType: + """Send the PING command. + + This method sends the PING command to the MySQL server. It is used to check + if the the connection is still valid. The result of this method is dictionary + with OK packet information. + """ + + +class MySQLCursorAbstract(ABC): + """Defines the MySQL cursor interface.""" + + def __init__(self, connection: MySQLConnectionAbstract): + self._connection: MySQLConnectionAbstract = connection + self._loop: asyncio.AbstractEventLoop = connection.loop + self._description: Optional[List[DescriptionType]] = None + self._last_insert_id: Optional[int] = None + self._warnings: Optional[List[WarningType]] = None + self._warning_count: int = 0 + self._executed: Optional[StrOrBytes] = None + self._executed_list: List[StrOrBytes] = [] + self._stored_results: List[Any] = [] + self._binary: bool = False + self._raw: bool = False + self._rowcount: int = -1 + self._nextrow: Tuple[Optional[RowType], Optional[EofPacketType]] = ( + None, + None, + ) + self.arraysize: int = 1 + self._connection.add_cursor(self) + + async def __aenter__(self) -> MySQLCursorAbstract: + return self + + async def __aexit__( + self, + exc_type: Optional[Type[BaseException]] = None, + exc_value: Optional[BaseException] = None, + traceback: Optional[TracebackType] = None, + ) -> None: + await self.close() + + async def __aiter__(self) -> Iterator[RowType]: + """Iterate over result set. + + Iteration over the result set which calls self.fetchone() + and returns the next row. + """ + return self # type: ignore[return-value] + + async def __next__(self) -> RowType: + """ + Used for iterating over the result set. Calles self.fetchone() + to get the next row. + """ + try: + row = await self.fetchone() + except InterfaceError: + raise StopAsyncIteration from None + if not row: + raise StopAsyncIteration + return row + + @property + def description(self) -> Optional[List[DescriptionType]]: + """Return description of columns in a result. + + This property returns a list of tuples describing the columns in in a result + set. A tuple is described as follows: + + (column_name, + type, + None, + None, + None, + None, + null_ok, + column_flags) # Addition to PEP-249 specs + + Returns a list of tuples. + """ + return self._description + + @property + def rowcount(self) -> int: + """Return the number of rows produced or affected. + + This property returns the number of rows produced by queries such as a + SELECT, or affected rows when executing DML statements like INSERT or UPDATE. + + Note that for non-buffered cursors it is impossible to know the number of rows + produced before having fetched them all. For those, the number of rows will + be -1 right after execution, and incremented when fetching rows. + """ + return self._rowcount + + @property + def lastrowid(self) -> Optional[int]: + """Return the value generated for an AUTO_INCREMENT column. + + Returns the value generated for an AUTO_INCREMENT column by the previous + INSERT or UPDATE statement or None when there is no such value available. + """ + return self._last_insert_id + + @property + def warnings(self) -> Optional[List[WarningType]]: + """Return warnings.""" + return self._warnings + + def fetchwarnings(self) -> Optional[List[WarningType]]: + """Returns Warnings.""" + return self._warnings + + @property + def warning_count(self) -> int: + """Return the number of warnings. + + This property returns the number of warnings generated by the previously + executed operation. + """ + return self._warning_count + + @property + def column_names(self) -> Tuple[str, ...]: + """Returns column names. + + This property returns the columns names as a tuple. + """ + if not self.description: + return tuple() + return tuple(d[0] for d in self.description) + + @property + def statement(self) -> Optional[str]: + """Returns the executed statement + + This property returns the executed statement. + When multiple statements were executed, the current statement in the iterator + will be returned. + """ + if self._executed is None: + return None + try: + return self._executed.strip().decode() # type: ignore[union-attr] + except (AttributeError, UnicodeDecodeError): + return self._executed.strip() # type: ignore[return-value] + + @property + def with_rows(self) -> bool: + """Returns whether the cursor could have rows returned. + + This property returns True when column descriptions are available and possibly + also rows, which will need to be fetched. + """ + return bool(self._description) + + @abstractmethod + def stored_results(self) -> Iterator[MySQLCursorAbstract]: + """Returns an iterator for stored results. + + This method returns an iterator over results which are stored when callproc() + is called. The iterator will provide MySQLCursorBuffered instances. + """ + + @abstractmethod + async def execute( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + multi: bool = False, + ) -> None: + """Executes the given operation. + + Executes the given operation substituting any markers with the given + parameters. + + For example, getting all rows where id is 5: + await cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) + + If the `multi`` parameter is used a `ProgrammingError` is raised. The method for + executing multiple statements is `executemulti()`. + + If warnings where generated, and connection.get_warnings is True, then + self._warnings will be a list containing these warnings. + + Raises: + ProgramingError: If multi parameter is used. + """ + + async def executemulti( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + ) -> AsyncGenerator[MySQLCursorAbstract, None]: + """Execute multiple statements. + + Executes the given operation substituting any markers with the given + parameters. + """ + + @abstractmethod + async def executemany( + self, + operation: str, + seq_params: Sequence[ParamsSequenceType], + ) -> None: + """Prepare and execute a MySQL Prepared Statement many times. + + This method will prepare the given operation and execute with each tuple found + the list seq_params. + + If the cursor instance already had a prepared statement, it is first closed. + + executemany() simply calls execute(). + """ + + @abstractmethod + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Raises: + InterfaceError: If there is no result to fetch. + + Returns: + tuple or None: A row from query result set. + """ + + @abstractmethod + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Raises: + InterfaceError: If there is no result to fetch. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + + @abstractmethod + async def fetchmany(self, size: int = 1) -> List[Sequence[Any]]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, which + defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + + @abstractmethod + async def close(self) -> bool: + """Close the cursor.""" + + def getlastrowid(self) -> Optional[int]: + """Return the value generated for an AUTO_INCREMENT column. + + Returns the value generated for an AUTO_INCREMENT column by the previous + INSERT or UPDATE statement. + """ + return self._last_insert_id + + async def reset(self, free: bool = True) -> Any: + """Reset the cursor to default.""" + + def get_attributes(self) -> Optional[List[Tuple[str, BinaryProtocolType]]]: + """Gets a list of query attributes from the connector's side. + + Returns: + List of existing query attributes. + """ + if hasattr(self, "_cnx"): + return self._cnx.query_attrs + if hasattr(self, "_connection"): + return self._connection.query_attrs + return None + + def add_attribute(self, name: str, value: BinaryProtocolType) -> None: + """Add a query attribute and its value into the connector's query attributes. + + Query attributes must be enabled on the server - they are disabled by default. A + warning is logged when setting query attributes for a server connection + that does not support them. + + Args: + name: Key name used to identify the attribute. + value: A value converted to the MySQL Binary Protocol. + + Raises: + ProgrammingError: If the value's conversion fails. + """ + if not isinstance(name, str): + raise ProgrammingError("Parameter `name` must be a string type") + if value is not None and not isinstance(value, MYSQL_PY_TYPES): + raise ProgrammingError( + f"Object {value} cannot be converted to a MySQL type" + ) + self._connection.query_attrs_append((name, value)) + + def remove_attribute(self, name: str) -> BinaryProtocolType: + """Removes a query attribute by name from the connector's query attributes. + + If no match, `None` is returned, else the corresponding value is returned. + + Args: + name: Key name used to identify the attribute. + + Returns: + value: Attribute's value. + """ + if not isinstance(name, str): + raise ProgrammingError("Parameter `name` must be a string type") + return self._connection.query_attrs_remove(name) + + def clear_attributes(self) -> None: + """Clears the list of query attributes on the connector's side.""" + self._connection.query_attrs_clear() + + +class CMySQLPrepStmt(GenericWrapper): + + """Structure to represent a result from `CMySQLConnection.cmd_stmt_prepare`. + It can be used consistently as a type hint. + + `_mysql_connector.MySQLPrepStmt` isn't available when the C-ext isn't built. + + In this regard, `CmdStmtPrepareResult` acts as a proxy/wrapper entity for a + `_mysql_connector.MySQLPrepStmt` instance. + """ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/authentication.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/authentication.py new file mode 100644 index 0000000..3eb8fa6 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/authentication.py @@ -0,0 +1,321 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Implementing support for MySQL Authentication Plugins.""" + +from __future__ import annotations + +__all__ = ["MySQLAuthenticator"] + +import copy + +from typing import TYPE_CHECKING, Any, Dict, Optional + +from ..errors import InterfaceError, NotSupportedError, get_exception +from ..protocol import ( + AUTH_SWITCH_STATUS, + DEFAULT_CHARSET_ID, + DEFAULT_MAX_ALLOWED_PACKET, + ERR_STATUS, + EXCHANGE_FURTHER_STATUS, + MFA_STATUS, + OK_STATUS, +) +from ..types import HandShakeType +from .logger import logger +from .plugins import MySQLAuthPlugin, get_auth_plugin +from .protocol import MySQLProtocol + +if TYPE_CHECKING: + from .network import MySQLSocket + + +class MySQLAuthenticator: + """Implements the authentication phase.""" + + def __init__(self) -> None: + """Constructor.""" + self._username: str = "" + self._passwords: Dict[int, str] = {} + self._plugin_config: Dict[str, Any] = {} + self._ssl_enabled: bool = False + self._auth_strategy: Optional[MySQLAuthPlugin] = None + self._auth_plugin_class: Optional[str] = None + + @property + def ssl_enabled(self) -> bool: + """Signals whether or not SSL is enabled.""" + return self._ssl_enabled + + @property + def plugin_config(self) -> Dict[str, Any]: + """Custom arguments that are being provided to the authentication plugin. + + The parameters defined here will override the ones defined in the + auth plugin itself. + + The plugin config is a read-only property - the plugin configuration + provided when invoking `authenticate()` is recorded and can be queried + by accessing this property. + + Returns: + dict: The latest plugin configuration provided when invoking + `authenticate()`. + """ + return self._plugin_config + + def _switch_auth_strategy( + self, + new_strategy_name: str, + strategy_class: Optional[str] = None, + username: Optional[str] = None, + password_factor: int = 1, + ) -> None: + """Switch the authorization plugin. + + Args: + new_strategy_name: New authorization plugin name to switch to. + strategy_class: New authorization plugin class to switch to + (has higher precedence than the authorization plugin name). + username: Username to be used - if not defined, the username + provided when `authentication()` was invoked is used. + password_factor: Up to three levels of authentication (MFA) are allowed, + hence you can choose the password corresponding to the 1st, + 2nd, or 3rd factor - 1st is the default. + """ + if username is None: + username = self._username + + if strategy_class is None: + strategy_class = self._auth_plugin_class + + logger.debug("Switching to strategy %s", new_strategy_name) + self._auth_strategy = get_auth_plugin( + plugin_name=new_strategy_name, auth_plugin_class=strategy_class + )( + username, + self._passwords.get(password_factor, ""), + ssl_enabled=self.ssl_enabled, + ) + + async def _mfa_n_factor( + self, + sock: MySQLSocket, + pkt: bytes, + ) -> Optional[bytes]: + """Handle MFA (Multi-Factor Authentication) response. + + Up to three levels of authentication (MFA) are allowed. + + Args: + sock: Pointer to the socket connection. + pkt: MFA response. + + Returns: + ok_packet: If last server's response is an OK packet. + None: If last server's response isn't an OK packet and no ERROR was raised. + + Raises: + InterfaceError: If got an invalid N factor. + errors.ErrorTypes: If got an ERROR response. + """ + n_factor = 2 + while pkt[4] == MFA_STATUS: + if n_factor not in self._passwords: + raise InterfaceError( + "Failed Multi Factor Authentication (invalid N factor)" + ) + + new_strategy_name, auth_data = MySQLProtocol.parse_auth_next_factor(pkt) + self._switch_auth_strategy(new_strategy_name, password_factor=n_factor) + logger.debug("MFA %i factor %s", n_factor, self._auth_strategy.name) + + pkt = await self._auth_strategy.auth_switch_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == EXCHANGE_FURTHER_STATUS: + auth_data = MySQLProtocol.parse_auth_more_data(pkt) + pkt = await self._auth_strategy.auth_more_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == OK_STATUS: + logger.debug("MFA completed succesfully") + return pkt + + if pkt[4] == ERR_STATUS: + raise get_exception(pkt) + + n_factor += 1 + + logger.warning("MFA terminated with a no ok packet") + return None + + async def _handle_server_response( + self, + sock: MySQLSocket, + pkt: bytes, + ) -> Optional[bytes]: + """Handle server's response. + + Args: + sock: Pointer to the socket connection. + pkt: Server's response after completing the `HandShakeResponse`. + + Returns: + ok_packet: If last server's response is an OK packet. + None: If last server's response isn't an OK packet and no ERROR was raised. + + Raises: + errors.ErrorTypes: If got an ERROR response. + NotSupportedError: If got Authentication with old (insecure) passwords. + """ + if pkt[4] == AUTH_SWITCH_STATUS and len(pkt) == 5: + raise NotSupportedError( + "Authentication with old (insecure) passwords " + "is not supported. For more information, lookup " + "Password Hashing in the latest MySQL manual" + ) + + if pkt[4] == AUTH_SWITCH_STATUS: + logger.debug("Server's response is an auth switch request") + new_strategy_name, auth_data = MySQLProtocol.parse_auth_switch_request(pkt) + self._switch_auth_strategy(new_strategy_name) + pkt = await self._auth_strategy.auth_switch_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == EXCHANGE_FURTHER_STATUS: + logger.debug("Exchanging further packets") + auth_data = MySQLProtocol.parse_auth_more_data(pkt) + pkt = await self._auth_strategy.auth_more_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == OK_STATUS: + logger.debug("%s completed succesfully", self._auth_strategy.name) + return pkt + + if pkt[4] == MFA_STATUS: + logger.debug("Starting multi-factor authentication") + logger.debug("MFA 1 factor %s", self._auth_strategy.name) + return await self._mfa_n_factor(sock, pkt) + + if pkt[4] == ERR_STATUS: + raise get_exception(pkt) + + return None + + async def authenticate( + self, + sock: MySQLSocket, + handshake: HandShakeType, + username: str = "", + password1: str = "", + password2: str = "", + password3: str = "", + database: Optional[str] = None, + charset: int = DEFAULT_CHARSET_ID, + client_flags: int = 0, + max_allowed_packet: int = DEFAULT_MAX_ALLOWED_PACKET, + auth_plugin: Optional[str] = None, + auth_plugin_class: Optional[str] = None, + conn_attrs: Optional[Dict[str, str]] = None, + is_change_user_request: bool = False, + **plugin_config: Any, + ) -> bytes: + """Perform the authentication phase. + + During re-authentication you must set `is_change_user_request` to True. + + Args: + sock: Pointer to the socket connection. + handshake: Initial handshake. + username: Account's username. + password1: Account's password factor 1. + password2: Account's password factor 2. + password3: Account's password factor 3. + database: Initial database name for the connection. + charset: Client charset (see [1]), only the lower 8-bits. + client_flags: Integer representing client capabilities flags. + max_allowed_packet: Maximum packet size. + auth_plugin: Authorization plugin name. + auth_plugin_class: Authorization plugin class (has higher precedence + than the authorization plugin name). + conn_attrs: Connection attributes. + is_change_user_request: Whether is a `change user request` operation or not. + plugin_config: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the + ones defined in the auth plugin itself. + + Returns: + ok_packet: OK packet. + + Raises: + InterfaceError: If OK packet is NULL. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_basic_character_set.html#a_protocol_character_set + """ + # update credentials, plugin config and plugin class + self._username = username + self._passwords = {1: password1, 2: password2, 3: password3} + self._plugin_config = copy.deepcopy(plugin_config) + self._auth_plugin_class = auth_plugin_class + + # client's handshake response + response_payload, self._auth_strategy = MySQLProtocol.make_auth( + handshake=handshake, + username=username, + password=password1, + database=database, + charset=charset, + client_flags=client_flags, + max_allowed_packet=max_allowed_packet, + auth_plugin=auth_plugin, + auth_plugin_class=auth_plugin_class, + conn_attrs=conn_attrs, + is_change_user_request=is_change_user_request, + ssl_enabled=self.ssl_enabled, + plugin_config=self.plugin_config, + ) + + # client sends transaction response + send_args = (0, 0) if is_change_user_request else (None, None) + await sock.write(response_payload, *send_args) + + # server replies back + pkt = bytes(await sock.read()) + + ok_pkt = await self._handle_server_response(sock, pkt) + if ok_pkt is None: + raise InterfaceError("Got a NULL ok_pkt") from None + + return ok_pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/charsets.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/charsets.py new file mode 100644 index 0000000..56953e6 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/charsets.py @@ -0,0 +1,686 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""This module contains the MySQL Server Character Sets.""" + +__all__ = ["Charset", "charsets"] + +from collections import defaultdict +from dataclasses import dataclass +from typing import DefaultDict, Dict, Optional, Sequence, Tuple + +from ..errors import ProgrammingError + + +@dataclass +class Charset: + """Dataclass representing a character set.""" + + charset_id: int + name: str + collation: str + is_default: bool + + +class Charsets: + """MySQL supported character sets and collations class. + + This class holds the list of character sets with their collations supported by + MySQL, making available methods to get character sets by name, collation, or ID. + It uses a sparse matrix or tree-like representation using a dict in a dict to hold + the character set name and collations combinations. + The list is hardcoded, so we avoid a database query when getting the name of the + used character set or collation. + + The call of ``charsets.set_mysql_major_version()`` should be done before using any + of the retrieval methods. + + Usage: + >>> from mysql.connector.aio.charsets import charsets + >>> charsets.set_mysql_major_version(8) + >>> charsets.get_by_name("utf-8") + Charset(charset_id=255, + name='utf8mb4', + collation='utf8mb4_0900_ai_ci', + is_default=True) + """ + + def __init__(self) -> None: + self._charset_id_store: Dict[int, Charset] = {} + self._collation_store: Dict[str, Charset] = {} + self._name_store: DefaultDict[str, Dict[str, Charset]] = defaultdict(dict) + self._mysql_major_version: Optional[int] = None + + def set_mysql_major_version(self, version: int) -> None: + """Set the MySQL major version. + + Sets what tuple should be used based on the MySQL major version to store the + list of character sets and collations. + + Args: + version: The MySQL major version (i.e. 8 or 5) + """ + self._mysql_major_version = version + self._charset_id_store.clear() + self._collation_store.clear() + self._name_store.clear() + + charsets_tuple: Sequence[Tuple[int, str, str, bool]] = None + if version == 8: + charsets_tuple = MYSQL_8_CHARSETS + elif version == 5: + charsets_tuple = MYSQL_5_CHARSETS + else: + raise ProgrammingError("Invalid MySQL major version") + + for charset_id, name, collation, is_default in charsets_tuple: + charset = Charset(charset_id, name, collation, is_default) + self._charset_id_store[charset_id] = charset + self._collation_store[collation] = charset + self._name_store[name][collation] = charset + + def get_by_id(self, charset_id: int) -> Charset: + """Get character set by ID. + + Args: + charset_id: The charset ID. + + Returns: + Charset: The Charset dataclass instance. + """ + try: + return self._charset_id_store[charset_id] + except KeyError as err: + raise ProgrammingError(f"Character set ID {charset_id} unknown") from err + + def get_by_collation(self, collation: str) -> Charset: + """Get character set by collation. + + Args: + collation: The collation name. + + Returns: + Charset: The Charset dataclass instance. + """ + try: + return self._collation_store[collation] + except KeyError as err: + raise ProgrammingError(f"Collation {collation} unknown") from err + + def get_by_name(self, name: str) -> Charset: + """Get character set by name. + + Args: + name: The charset name. + + Returns: + Charset: The Charset dataclass instance. + """ + try: + if name in ("utf8", "utf-8") and self._mysql_major_version == 8: + name = "utf8mb4" + for charset in self._name_store[name].values(): + if charset.is_default: + return charset + except KeyError as err: + raise ProgrammingError(f"Character set name {name} unknown") from err + raise ProgrammingError(f"No default was found for character set '{name}'") + + def get_by_name_and_collation(self, name: str, collation: str) -> Charset: + """Get character set by name and collation. + + Args: + name: The charset name. + collation: The collation name. + + Returns: + Charset: The Charset dataclass instance. + """ + try: + return self._name_store[name][collation] + except KeyError as err: + raise ProgrammingError( + f"Character set name '{name}' with collation '{collation}' not found" + ) from err + + +MYSQL_8_CHARSETS = ( + (1, "big5", "big5_chinese_ci", True), + (2, "latin2", "latin2_czech_cs", False), + (3, "dec8", "dec8_swedish_ci", True), + (4, "cp850", "cp850_general_ci", True), + (5, "latin1", "latin1_german1_ci", False), + (6, "hp8", "hp8_english_ci", True), + (7, "koi8r", "koi8r_general_ci", True), + (8, "latin1", "latin1_swedish_ci", True), + (9, "latin2", "latin2_general_ci", True), + (10, "swe7", "swe7_swedish_ci", True), + (11, "ascii", "ascii_general_ci", True), + (12, "ujis", "ujis_japanese_ci", True), + (13, "sjis", "sjis_japanese_ci", True), + (14, "cp1251", "cp1251_bulgarian_ci", False), + (15, "latin1", "latin1_danish_ci", False), + (16, "hebrew", "hebrew_general_ci", True), + (18, "tis620", "tis620_thai_ci", True), + (19, "euckr", "euckr_korean_ci", True), + (20, "latin7", "latin7_estonian_cs", False), + (21, "latin2", "latin2_hungarian_ci", False), + (22, "koi8u", "koi8u_general_ci", True), + (23, "cp1251", "cp1251_ukrainian_ci", False), + (24, "gb2312", "gb2312_chinese_ci", True), + (25, "greek", "greek_general_ci", True), + (26, "cp1250", "cp1250_general_ci", True), + (27, "latin2", "latin2_croatian_ci", False), + (28, "gbk", "gbk_chinese_ci", True), + (29, "cp1257", "cp1257_lithuanian_ci", False), + (30, "latin5", "latin5_turkish_ci", True), + (31, "latin1", "latin1_german2_ci", False), + (32, "armscii8", "armscii8_general_ci", True), + (33, "utf8mb3", "utf8mb3_general_ci", True), + (34, "cp1250", "cp1250_czech_cs", False), + (35, "ucs2", "ucs2_general_ci", True), + (36, "cp866", "cp866_general_ci", True), + (37, "keybcs2", "keybcs2_general_ci", True), + (38, "macce", "macce_general_ci", True), + (39, "macroman", "macroman_general_ci", True), + (40, "cp852", "cp852_general_ci", True), + (41, "latin7", "latin7_general_ci", True), + (42, "latin7", "latin7_general_cs", False), + (43, "macce", "macce_bin", False), + (44, "cp1250", "cp1250_croatian_ci", False), + (45, "utf8mb4", "utf8mb4_general_ci", False), + (46, "utf8mb4", "utf8mb4_bin", False), + (47, "latin1", "latin1_bin", False), + (48, "latin1", "latin1_general_ci", False), + (49, "latin1", "latin1_general_cs", False), + (50, "cp1251", "cp1251_bin", False), + (51, "cp1251", "cp1251_general_ci", True), + (52, "cp1251", "cp1251_general_cs", False), + (53, "macroman", "macroman_bin", False), + (54, "utf16", "utf16_general_ci", True), + (55, "utf16", "utf16_bin", False), + (56, "utf16le", "utf16le_general_ci", True), + (57, "cp1256", "cp1256_general_ci", True), + (58, "cp1257", "cp1257_bin", False), + (59, "cp1257", "cp1257_general_ci", True), + (60, "utf32", "utf32_general_ci", True), + (61, "utf32", "utf32_bin", False), + (62, "utf16le", "utf16le_bin", False), + (63, "binary", "binary", True), + (64, "armscii8", "armscii8_bin", False), + (65, "ascii", "ascii_bin", False), + (66, "cp1250", "cp1250_bin", False), + (67, "cp1256", "cp1256_bin", False), + (68, "cp866", "cp866_bin", False), + (69, "dec8", "dec8_bin", False), + (70, "greek", "greek_bin", False), + (71, "hebrew", "hebrew_bin", False), + (72, "hp8", "hp8_bin", False), + (73, "keybcs2", "keybcs2_bin", False), + (74, "koi8r", "koi8r_bin", False), + (75, "koi8u", "koi8u_bin", False), + (76, "utf8mb3", "utf8mb3_tolower_ci", False), + (77, "latin2", "latin2_bin", False), + (78, "latin5", "latin5_bin", False), + (79, "latin7", "latin7_bin", False), + (80, "cp850", "cp850_bin", False), + (81, "cp852", "cp852_bin", False), + (82, "swe7", "swe7_bin", False), + (83, "utf8mb3", "utf8mb3_bin", False), + (84, "big5", "big5_bin", False), + (85, "euckr", "euckr_bin", False), + (86, "gb2312", "gb2312_bin", False), + (87, "gbk", "gbk_bin", False), + (88, "sjis", "sjis_bin", False), + (89, "tis620", "tis620_bin", False), + (90, "ucs2", "ucs2_bin", False), + (91, "ujis", "ujis_bin", False), + (92, "geostd8", "geostd8_general_ci", True), + (93, "geostd8", "geostd8_bin", False), + (94, "latin1", "latin1_spanish_ci", False), + (95, "cp932", "cp932_japanese_ci", True), + (96, "cp932", "cp932_bin", False), + (97, "eucjpms", "eucjpms_japanese_ci", True), + (98, "eucjpms", "eucjpms_bin", False), + (99, "cp1250", "cp1250_polish_ci", False), + (101, "utf16", "utf16_unicode_ci", False), + (102, "utf16", "utf16_icelandic_ci", False), + (103, "utf16", "utf16_latvian_ci", False), + (104, "utf16", "utf16_romanian_ci", False), + (105, "utf16", "utf16_slovenian_ci", False), + (106, "utf16", "utf16_polish_ci", False), + (107, "utf16", "utf16_estonian_ci", False), + (108, "utf16", "utf16_spanish_ci", False), + (109, "utf16", "utf16_swedish_ci", False), + (110, "utf16", "utf16_turkish_ci", False), + (111, "utf16", "utf16_czech_ci", False), + (112, "utf16", "utf16_danish_ci", False), + (113, "utf16", "utf16_lithuanian_ci", False), + (114, "utf16", "utf16_slovak_ci", False), + (115, "utf16", "utf16_spanish2_ci", False), + (116, "utf16", "utf16_roman_ci", False), + (117, "utf16", "utf16_persian_ci", False), + (118, "utf16", "utf16_esperanto_ci", False), + (119, "utf16", "utf16_hungarian_ci", False), + (120, "utf16", "utf16_sinhala_ci", False), + (121, "utf16", "utf16_german2_ci", False), + (122, "utf16", "utf16_croatian_ci", False), + (123, "utf16", "utf16_unicode_520_ci", False), + (124, "utf16", "utf16_vietnamese_ci", False), + (128, "ucs2", "ucs2_unicode_ci", False), + (129, "ucs2", "ucs2_icelandic_ci", False), + (130, "ucs2", "ucs2_latvian_ci", False), + (131, "ucs2", "ucs2_romanian_ci", False), + (132, "ucs2", "ucs2_slovenian_ci", False), + (133, "ucs2", "ucs2_polish_ci", False), + (134, "ucs2", "ucs2_estonian_ci", False), + (135, "ucs2", "ucs2_spanish_ci", False), + (136, "ucs2", "ucs2_swedish_ci", False), + (137, "ucs2", "ucs2_turkish_ci", False), + (138, "ucs2", "ucs2_czech_ci", False), + (139, "ucs2", "ucs2_danish_ci", False), + (140, "ucs2", "ucs2_lithuanian_ci", False), + (141, "ucs2", "ucs2_slovak_ci", False), + (142, "ucs2", "ucs2_spanish2_ci", False), + (143, "ucs2", "ucs2_roman_ci", False), + (144, "ucs2", "ucs2_persian_ci", False), + (145, "ucs2", "ucs2_esperanto_ci", False), + (146, "ucs2", "ucs2_hungarian_ci", False), + (147, "ucs2", "ucs2_sinhala_ci", False), + (148, "ucs2", "ucs2_german2_ci", False), + (149, "ucs2", "ucs2_croatian_ci", False), + (150, "ucs2", "ucs2_unicode_520_ci", False), + (151, "ucs2", "ucs2_vietnamese_ci", False), + (159, "ucs2", "ucs2_general_mysql500_ci", False), + (160, "utf32", "utf32_unicode_ci", False), + (161, "utf32", "utf32_icelandic_ci", False), + (162, "utf32", "utf32_latvian_ci", False), + (163, "utf32", "utf32_romanian_ci", False), + (164, "utf32", "utf32_slovenian_ci", False), + (165, "utf32", "utf32_polish_ci", False), + (166, "utf32", "utf32_estonian_ci", False), + (167, "utf32", "utf32_spanish_ci", False), + (168, "utf32", "utf32_swedish_ci", False), + (169, "utf32", "utf32_turkish_ci", False), + (170, "utf32", "utf32_czech_ci", False), + (171, "utf32", "utf32_danish_ci", False), + (172, "utf32", "utf32_lithuanian_ci", False), + (173, "utf32", "utf32_slovak_ci", False), + (174, "utf32", "utf32_spanish2_ci", False), + (175, "utf32", "utf32_roman_ci", False), + (176, "utf32", "utf32_persian_ci", False), + (177, "utf32", "utf32_esperanto_ci", False), + (178, "utf32", "utf32_hungarian_ci", False), + (179, "utf32", "utf32_sinhala_ci", False), + (180, "utf32", "utf32_german2_ci", False), + (181, "utf32", "utf32_croatian_ci", False), + (182, "utf32", "utf32_unicode_520_ci", False), + (183, "utf32", "utf32_vietnamese_ci", False), + (192, "utf8mb3", "utf8mb3_unicode_ci", False), + (193, "utf8mb3", "utf8mb3_icelandic_ci", False), + (194, "utf8mb3", "utf8mb3_latvian_ci", False), + (195, "utf8mb3", "utf8mb3_romanian_ci", False), + (196, "utf8mb3", "utf8mb3_slovenian_ci", False), + (197, "utf8mb3", "utf8mb3_polish_ci", False), + (198, "utf8mb3", "utf8mb3_estonian_ci", False), + (199, "utf8mb3", "utf8mb3_spanish_ci", False), + (200, "utf8mb3", "utf8mb3_swedish_ci", False), + (201, "utf8mb3", "utf8mb3_turkish_ci", False), + (202, "utf8mb3", "utf8mb3_czech_ci", False), + (203, "utf8mb3", "utf8mb3_danish_ci", False), + (204, "utf8mb3", "utf8mb3_lithuanian_ci", False), + (205, "utf8mb3", "utf8mb3_slovak_ci", False), + (206, "utf8mb3", "utf8mb3_spanish2_ci", False), + (207, "utf8mb3", "utf8mb3_roman_ci", False), + (208, "utf8mb3", "utf8mb3_persian_ci", False), + (209, "utf8mb3", "utf8mb3_esperanto_ci", False), + (210, "utf8mb3", "utf8mb3_hungarian_ci", False), + (211, "utf8mb3", "utf8mb3_sinhala_ci", False), + (212, "utf8mb3", "utf8mb3_german2_ci", False), + (213, "utf8mb3", "utf8mb3_croatian_ci", False), + (214, "utf8mb3", "utf8mb3_unicode_520_ci", False), + (215, "utf8mb3", "utf8mb3_vietnamese_ci", False), + (223, "utf8mb3", "utf8mb3_general_mysql500_ci", False), + (224, "utf8mb4", "utf8mb4_unicode_ci", False), + (225, "utf8mb4", "utf8mb4_icelandic_ci", False), + (226, "utf8mb4", "utf8mb4_latvian_ci", False), + (227, "utf8mb4", "utf8mb4_romanian_ci", False), + (228, "utf8mb4", "utf8mb4_slovenian_ci", False), + (229, "utf8mb4", "utf8mb4_polish_ci", False), + (230, "utf8mb4", "utf8mb4_estonian_ci", False), + (231, "utf8mb4", "utf8mb4_spanish_ci", False), + (232, "utf8mb4", "utf8mb4_swedish_ci", False), + (233, "utf8mb4", "utf8mb4_turkish_ci", False), + (234, "utf8mb4", "utf8mb4_czech_ci", False), + (235, "utf8mb4", "utf8mb4_danish_ci", False), + (236, "utf8mb4", "utf8mb4_lithuanian_ci", False), + (237, "utf8mb4", "utf8mb4_slovak_ci", False), + (238, "utf8mb4", "utf8mb4_spanish2_ci", False), + (239, "utf8mb4", "utf8mb4_roman_ci", False), + (240, "utf8mb4", "utf8mb4_persian_ci", False), + (241, "utf8mb4", "utf8mb4_esperanto_ci", False), + (242, "utf8mb4", "utf8mb4_hungarian_ci", False), + (243, "utf8mb4", "utf8mb4_sinhala_ci", False), + (244, "utf8mb4", "utf8mb4_german2_ci", False), + (245, "utf8mb4", "utf8mb4_croatian_ci", False), + (246, "utf8mb4", "utf8mb4_unicode_520_ci", False), + (247, "utf8mb4", "utf8mb4_vietnamese_ci", False), + (248, "gb18030", "gb18030_chinese_ci", True), + (249, "gb18030", "gb18030_bin", False), + (250, "gb18030", "gb18030_unicode_520_ci", False), + (255, "utf8mb4", "utf8mb4_0900_ai_ci", True), + (256, "utf8mb4", "utf8mb4_de_pb_0900_ai_ci", False), + (257, "utf8mb4", "utf8mb4_is_0900_ai_ci", False), + (258, "utf8mb4", "utf8mb4_lv_0900_ai_ci", False), + (259, "utf8mb4", "utf8mb4_ro_0900_ai_ci", False), + (260, "utf8mb4", "utf8mb4_sl_0900_ai_ci", False), + (261, "utf8mb4", "utf8mb4_pl_0900_ai_ci", False), + (262, "utf8mb4", "utf8mb4_et_0900_ai_ci", False), + (263, "utf8mb4", "utf8mb4_es_0900_ai_ci", False), + (264, "utf8mb4", "utf8mb4_sv_0900_ai_ci", False), + (265, "utf8mb4", "utf8mb4_tr_0900_ai_ci", False), + (266, "utf8mb4", "utf8mb4_cs_0900_ai_ci", False), + (267, "utf8mb4", "utf8mb4_da_0900_ai_ci", False), + (268, "utf8mb4", "utf8mb4_lt_0900_ai_ci", False), + (269, "utf8mb4", "utf8mb4_sk_0900_ai_ci", False), + (270, "utf8mb4", "utf8mb4_es_trad_0900_ai_ci", False), + (271, "utf8mb4", "utf8mb4_la_0900_ai_ci", False), + (273, "utf8mb4", "utf8mb4_eo_0900_ai_ci", False), + (274, "utf8mb4", "utf8mb4_hu_0900_ai_ci", False), + (275, "utf8mb4", "utf8mb4_hr_0900_ai_ci", False), + (277, "utf8mb4", "utf8mb4_vi_0900_ai_ci", False), + (278, "utf8mb4", "utf8mb4_0900_as_cs", False), + (279, "utf8mb4", "utf8mb4_de_pb_0900_as_cs", False), + (280, "utf8mb4", "utf8mb4_is_0900_as_cs", False), + (281, "utf8mb4", "utf8mb4_lv_0900_as_cs", False), + (282, "utf8mb4", "utf8mb4_ro_0900_as_cs", False), + (283, "utf8mb4", "utf8mb4_sl_0900_as_cs", False), + (284, "utf8mb4", "utf8mb4_pl_0900_as_cs", False), + (285, "utf8mb4", "utf8mb4_et_0900_as_cs", False), + (286, "utf8mb4", "utf8mb4_es_0900_as_cs", False), + (287, "utf8mb4", "utf8mb4_sv_0900_as_cs", False), + (288, "utf8mb4", "utf8mb4_tr_0900_as_cs", False), + (289, "utf8mb4", "utf8mb4_cs_0900_as_cs", False), + (290, "utf8mb4", "utf8mb4_da_0900_as_cs", False), + (291, "utf8mb4", "utf8mb4_lt_0900_as_cs", False), + (292, "utf8mb4", "utf8mb4_sk_0900_as_cs", False), + (293, "utf8mb4", "utf8mb4_es_trad_0900_as_cs", False), + (294, "utf8mb4", "utf8mb4_la_0900_as_cs", False), + (296, "utf8mb4", "utf8mb4_eo_0900_as_cs", False), + (297, "utf8mb4", "utf8mb4_hu_0900_as_cs", False), + (298, "utf8mb4", "utf8mb4_hr_0900_as_cs", False), + (300, "utf8mb4", "utf8mb4_vi_0900_as_cs", False), + (303, "utf8mb4", "utf8mb4_ja_0900_as_cs", False), + (304, "utf8mb4", "utf8mb4_ja_0900_as_cs_ks", False), + (305, "utf8mb4", "utf8mb4_0900_as_ci", False), + (306, "utf8mb4", "utf8mb4_ru_0900_ai_ci", False), + (307, "utf8mb4", "utf8mb4_ru_0900_as_cs", False), + (308, "utf8mb4", "utf8mb4_zh_0900_as_cs", False), + (309, "utf8mb4", "utf8mb4_0900_bin", False), + (310, "utf8mb4", "utf8mb4_nb_0900_ai_ci", False), + (311, "utf8mb4", "utf8mb4_nb_0900_as_cs", False), + (312, "utf8mb4", "utf8mb4_nn_0900_ai_ci", False), + (313, "utf8mb4", "utf8mb4_nn_0900_as_cs", False), + (314, "utf8mb4", "utf8mb4_sr_latn_0900_ai_ci", False), + (315, "utf8mb4", "utf8mb4_sr_latn_0900_as_cs", False), + (316, "utf8mb4", "utf8mb4_bs_0900_ai_ci", False), + (317, "utf8mb4", "utf8mb4_bs_0900_as_cs", False), + (318, "utf8mb4", "utf8mb4_bg_0900_ai_ci", False), + (319, "utf8mb4", "utf8mb4_bg_0900_as_cs", False), + (320, "utf8mb4", "utf8mb4_gl_0900_ai_ci", False), + (321, "utf8mb4", "utf8mb4_gl_0900_as_cs", False), + (322, "utf8mb4", "utf8mb4_mn_cyrl_0900_ai_ci", False), + (323, "utf8mb4", "utf8mb4_mn_cyrl_0900_as_cs", False), +) + +MYSQL_5_CHARSETS = ( + (1, "big5", "big5_chinese_ci", True), + (2, "latin2", "latin2_czech_cs", False), + (3, "dec8", "dec8_swedish_ci", True), + (4, "cp850", "cp850_general_ci", True), + (5, "latin1", "latin1_german1_ci", False), + (6, "hp8", "hp8_english_ci", True), + (7, "koi8r", "koi8r_general_ci", True), + (8, "latin1", "latin1_swedish_ci", True), + (9, "latin2", "latin2_general_ci", True), + (10, "swe7", "swe7_swedish_ci", True), + (11, "ascii", "ascii_general_ci", True), + (12, "ujis", "ujis_japanese_ci", True), + (13, "sjis", "sjis_japanese_ci", True), + (14, "cp1251", "cp1251_bulgarian_ci", False), + (15, "latin1", "latin1_danish_ci", False), + (16, "hebrew", "hebrew_general_ci", True), + (18, "tis620", "tis620_thai_ci", True), + (19, "euckr", "euckr_korean_ci", True), + (20, "latin7", "latin7_estonian_cs", False), + (21, "latin2", "latin2_hungarian_ci", False), + (22, "koi8u", "koi8u_general_ci", True), + (23, "cp1251", "cp1251_ukrainian_ci", False), + (24, "gb2312", "gb2312_chinese_ci", True), + (25, "greek", "greek_general_ci", True), + (26, "cp1250", "cp1250_general_ci", True), + (27, "latin2", "latin2_croatian_ci", False), + (28, "gbk", "gbk_chinese_ci", True), + (29, "cp1257", "cp1257_lithuanian_ci", False), + (30, "latin5", "latin5_turkish_ci", True), + (31, "latin1", "latin1_german2_ci", False), + (32, "armscii8", "armscii8_general_ci", True), + (33, "utf8", "utf8_general_ci", True), + (34, "cp1250", "cp1250_czech_cs", False), + (35, "ucs2", "ucs2_general_ci", True), + (36, "cp866", "cp866_general_ci", True), + (37, "keybcs2", "keybcs2_general_ci", True), + (38, "macce", "macce_general_ci", True), + (39, "macroman", "macroman_general_ci", True), + (40, "cp852", "cp852_general_ci", True), + (41, "latin7", "latin7_general_ci", True), + (42, "latin7", "latin7_general_cs", False), + (43, "macce", "macce_bin", False), + (44, "cp1250", "cp1250_croatian_ci", False), + (45, "utf8mb4", "utf8mb4_general_ci", True), + (46, "utf8mb4", "utf8mb4_bin", False), + (47, "latin1", "latin1_bin", False), + (48, "latin1", "latin1_general_ci", False), + (49, "latin1", "latin1_general_cs", False), + (50, "cp1251", "cp1251_bin", False), + (51, "cp1251", "cp1251_general_ci", True), + (52, "cp1251", "cp1251_general_cs", False), + (53, "macroman", "macroman_bin", False), + (54, "utf16", "utf16_general_ci", True), + (55, "utf16", "utf16_bin", False), + (56, "utf16le", "utf16le_general_ci", True), + (57, "cp1256", "cp1256_general_ci", True), + (58, "cp1257", "cp1257_bin", False), + (59, "cp1257", "cp1257_general_ci", True), + (60, "utf32", "utf32_general_ci", True), + (61, "utf32", "utf32_bin", False), + (62, "utf16le", "utf16le_bin", False), + (63, "binary", "binary", True), + (64, "armscii8", "armscii8_bin", False), + (65, "ascii", "ascii_bin", False), + (66, "cp1250", "cp1250_bin", False), + (67, "cp1256", "cp1256_bin", False), + (68, "cp866", "cp866_bin", False), + (69, "dec8", "dec8_bin", False), + (70, "greek", "greek_bin", False), + (71, "hebrew", "hebrew_bin", False), + (72, "hp8", "hp8_bin", False), + (73, "keybcs2", "keybcs2_bin", False), + (74, "koi8r", "koi8r_bin", False), + (75, "koi8u", "koi8u_bin", False), + (77, "latin2", "latin2_bin", False), + (78, "latin5", "latin5_bin", False), + (79, "latin7", "latin7_bin", False), + (80, "cp850", "cp850_bin", False), + (81, "cp852", "cp852_bin", False), + (82, "swe7", "swe7_bin", False), + (83, "utf8", "utf8_bin", False), + (84, "big5", "big5_bin", False), + (85, "euckr", "euckr_bin", False), + (86, "gb2312", "gb2312_bin", False), + (87, "gbk", "gbk_bin", False), + (88, "sjis", "sjis_bin", False), + (89, "tis620", "tis620_bin", False), + (90, "ucs2", "ucs2_bin", False), + (91, "ujis", "ujis_bin", False), + (92, "geostd8", "geostd8_general_ci", True), + (93, "geostd8", "geostd8_bin", False), + (94, "latin1", "latin1_spanish_ci", False), + (95, "cp932", "cp932_japanese_ci", True), + (96, "cp932", "cp932_bin", False), + (97, "eucjpms", "eucjpms_japanese_ci", True), + (98, "eucjpms", "eucjpms_bin", False), + (99, "cp1250", "cp1250_polish_ci", False), + (101, "utf16", "utf16_unicode_ci", False), + (102, "utf16", "utf16_icelandic_ci", False), + (103, "utf16", "utf16_latvian_ci", False), + (104, "utf16", "utf16_romanian_ci", False), + (105, "utf16", "utf16_slovenian_ci", False), + (106, "utf16", "utf16_polish_ci", False), + (107, "utf16", "utf16_estonian_ci", False), + (108, "utf16", "utf16_spanish_ci", False), + (109, "utf16", "utf16_swedish_ci", False), + (110, "utf16", "utf16_turkish_ci", False), + (111, "utf16", "utf16_czech_ci", False), + (112, "utf16", "utf16_danish_ci", False), + (113, "utf16", "utf16_lithuanian_ci", False), + (114, "utf16", "utf16_slovak_ci", False), + (115, "utf16", "utf16_spanish2_ci", False), + (116, "utf16", "utf16_roman_ci", False), + (117, "utf16", "utf16_persian_ci", False), + (118, "utf16", "utf16_esperanto_ci", False), + (119, "utf16", "utf16_hungarian_ci", False), + (120, "utf16", "utf16_sinhala_ci", False), + (121, "utf16", "utf16_german2_ci", False), + (122, "utf16", "utf16_croatian_ci", False), + (123, "utf16", "utf16_unicode_520_ci", False), + (124, "utf16", "utf16_vietnamese_ci", False), + (128, "ucs2", "ucs2_unicode_ci", False), + (129, "ucs2", "ucs2_icelandic_ci", False), + (130, "ucs2", "ucs2_latvian_ci", False), + (131, "ucs2", "ucs2_romanian_ci", False), + (132, "ucs2", "ucs2_slovenian_ci", False), + (133, "ucs2", "ucs2_polish_ci", False), + (134, "ucs2", "ucs2_estonian_ci", False), + (135, "ucs2", "ucs2_spanish_ci", False), + (136, "ucs2", "ucs2_swedish_ci", False), + (137, "ucs2", "ucs2_turkish_ci", False), + (138, "ucs2", "ucs2_czech_ci", False), + (139, "ucs2", "ucs2_danish_ci", False), + (140, "ucs2", "ucs2_lithuanian_ci", False), + (141, "ucs2", "ucs2_slovak_ci", False), + (142, "ucs2", "ucs2_spanish2_ci", False), + (143, "ucs2", "ucs2_roman_ci", False), + (144, "ucs2", "ucs2_persian_ci", False), + (145, "ucs2", "ucs2_esperanto_ci", False), + (146, "ucs2", "ucs2_hungarian_ci", False), + (147, "ucs2", "ucs2_sinhala_ci", False), + (148, "ucs2", "ucs2_german2_ci", False), + (149, "ucs2", "ucs2_croatian_ci", False), + (150, "ucs2", "ucs2_unicode_520_ci", False), + (151, "ucs2", "ucs2_vietnamese_ci", False), + (159, "ucs2", "ucs2_general_mysql500_ci", False), + (160, "utf32", "utf32_unicode_ci", False), + (161, "utf32", "utf32_icelandic_ci", False), + (162, "utf32", "utf32_latvian_ci", False), + (163, "utf32", "utf32_romanian_ci", False), + (164, "utf32", "utf32_slovenian_ci", False), + (165, "utf32", "utf32_polish_ci", False), + (166, "utf32", "utf32_estonian_ci", False), + (167, "utf32", "utf32_spanish_ci", False), + (168, "utf32", "utf32_swedish_ci", False), + (169, "utf32", "utf32_turkish_ci", False), + (170, "utf32", "utf32_czech_ci", False), + (171, "utf32", "utf32_danish_ci", False), + (172, "utf32", "utf32_lithuanian_ci", False), + (173, "utf32", "utf32_slovak_ci", False), + (174, "utf32", "utf32_spanish2_ci", False), + (175, "utf32", "utf32_roman_ci", False), + (176, "utf32", "utf32_persian_ci", False), + (177, "utf32", "utf32_esperanto_ci", False), + (178, "utf32", "utf32_hungarian_ci", False), + (179, "utf32", "utf32_sinhala_ci", False), + (180, "utf32", "utf32_german2_ci", False), + (181, "utf32", "utf32_croatian_ci", False), + (182, "utf32", "utf32_unicode_520_ci", False), + (183, "utf32", "utf32_vietnamese_ci", False), + (192, "utf8", "utf8_unicode_ci", False), + (193, "utf8", "utf8_icelandic_ci", False), + (194, "utf8", "utf8_latvian_ci", False), + (195, "utf8", "utf8_romanian_ci", False), + (196, "utf8", "utf8_slovenian_ci", False), + (197, "utf8", "utf8_polish_ci", False), + (198, "utf8", "utf8_estonian_ci", False), + (199, "utf8", "utf8_spanish_ci", False), + (200, "utf8", "utf8_swedish_ci", False), + (201, "utf8", "utf8_turkish_ci", False), + (202, "utf8", "utf8_czech_ci", False), + (203, "utf8", "utf8_danish_ci", False), + (204, "utf8", "utf8_lithuanian_ci", False), + (205, "utf8", "utf8_slovak_ci", False), + (206, "utf8", "utf8_spanish2_ci", False), + (207, "utf8", "utf8_roman_ci", False), + (208, "utf8", "utf8_persian_ci", False), + (209, "utf8", "utf8_esperanto_ci", False), + (210, "utf8", "utf8_hungarian_ci", False), + (211, "utf8", "utf8_sinhala_ci", False), + (212, "utf8", "utf8_german2_ci", False), + (213, "utf8", "utf8_croatian_ci", False), + (214, "utf8", "utf8_unicode_520_ci", False), + (215, "utf8", "utf8_vietnamese_ci", False), + (223, "utf8", "utf8_general_mysql500_ci", False), + (224, "utf8mb4", "utf8mb4_unicode_ci", False), + (225, "utf8mb4", "utf8mb4_icelandic_ci", False), + (226, "utf8mb4", "utf8mb4_latvian_ci", False), + (227, "utf8mb4", "utf8mb4_romanian_ci", False), + (228, "utf8mb4", "utf8mb4_slovenian_ci", False), + (229, "utf8mb4", "utf8mb4_polish_ci", False), + (230, "utf8mb4", "utf8mb4_estonian_ci", False), + (231, "utf8mb4", "utf8mb4_spanish_ci", False), + (232, "utf8mb4", "utf8mb4_swedish_ci", False), + (233, "utf8mb4", "utf8mb4_turkish_ci", False), + (234, "utf8mb4", "utf8mb4_czech_ci", False), + (235, "utf8mb4", "utf8mb4_danish_ci", False), + (236, "utf8mb4", "utf8mb4_lithuanian_ci", False), + (237, "utf8mb4", "utf8mb4_slovak_ci", False), + (238, "utf8mb4", "utf8mb4_spanish2_ci", False), + (239, "utf8mb4", "utf8mb4_roman_ci", False), + (240, "utf8mb4", "utf8mb4_persian_ci", False), + (241, "utf8mb4", "utf8mb4_esperanto_ci", False), + (242, "utf8mb4", "utf8mb4_hungarian_ci", False), + (243, "utf8mb4", "utf8mb4_sinhala_ci", False), + (244, "utf8mb4", "utf8mb4_german2_ci", False), + (245, "utf8mb4", "utf8mb4_croatian_ci", False), + (246, "utf8mb4", "utf8mb4_unicode_520_ci", False), + (247, "utf8mb4", "utf8mb4_vietnamese_ci", False), + (248, "gb18030", "gb18030_chinese_ci", True), + (249, "gb18030", "gb18030_bin", False), + (250, "gb18030", "gb18030_unicode_520_ci", False), +) + +charsets = Charsets() diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/connection.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/connection.py new file mode 100644 index 0000000..bacb62d --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/connection.py @@ -0,0 +1,1337 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="arg-type,operator,attr-defined,assignment" + +"""Implemention of the communication with MySQL servers in pure Python.""" + +__all__ = ["MySQLConnection"] + +import asyncio +import contextlib +import datetime +import getpass +import os +import socket +import struct +import warnings + +from decimal import Decimal +from io import IOBase +from typing import ( + Any, + AsyncGenerator, + BinaryIO, + Callable, + Dict, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from .. import version +from ..constants import ( + ClientFlag, + FieldType, + RefreshOption, + ServerCmd, + ServerFlag, + flag_is_set, +) +from ..errors import ( + DatabaseError, + Error, + InterfaceError, + InternalError, + NotSupportedError, + OperationalError, + ProgrammingError, + get_exception, +) +from ..types import ( + BinaryProtocolType, + DescriptionType, + EofPacketType, + OkPacketType, + ResultType, + RowType, + StatsPacketType, + StrOrBytes, +) +from ..utils import get_platform, int1store, int4store, lc_int +from .abstracts import MySQLConnectionAbstract, MySQLCursorAbstract, ServerInfo +from .charsets import charsets +from .cursor import ( + MySQLCursor, + MySQLCursorBuffered, + MySQLCursorBufferedDict, + MySQLCursorBufferedNamedTuple, + MySQLCursorBufferedRaw, + MySQLCursorDict, + MySQLCursorNamedTuple, + MySQLCursorPrepared, + MySQLCursorPreparedDict, + MySQLCursorPreparedNamedTuple, + MySQLCursorPreparedRaw, + MySQLCursorRaw, +) +from .logger import logger +from .network import MySQLTcpSocket, MySQLUnixSocket + + +class MySQLConnection(MySQLConnectionAbstract): + """Implementation of the pure Python MySQL connection.""" + + _mfa_nfactor: int = 1 + + @property + def connection_id(self) -> Optional[int]: + """MySQL connection ID.""" + if self._handshake: + return self._handshake.get("server_threadid") # type: ignore[return-value] + return None + + async def connect(self) -> None: + try: + if self._unix_socket and os.name == "posix": + self._socket = MySQLUnixSocket(unix_socket=self._unix_socket) + else: + self._socket = MySQLTcpSocket(host=self._host, port=self._port) + await asyncio.wait_for( + self._socket.open_connection(), self._connection_timeout + ) + await self._do_handshake() + await self._do_auth() + except (asyncio.CancelledError, asyncio.TimeoutError): + raise + except Exception: + await self._socket.close_connection() + raise + + if self._client_flags & ClientFlag.COMPRESS: + # Update the network layer accordingly + self._socket.switch_to_compressed_mode() + + # Set converter class + try: + self.set_converter_class(self._converter_class) + except TypeError as err: + raise AttributeError( + "Converter classA should be a subclass of " + "conversion.MySQLConverterBase" + ) from err + + # Post connection settings + await self._post_connection() + + def _add_default_conn_attrs(self) -> None: + """Add the default connection attributes.""" + platform = get_platform() + license_chunks = version.LICENSE.split(" ") + if license_chunks[0] == "GPLv2": + client_license = "GPL-2.0" + else: + client_license = "Commercial" + default_conn_attrs = { + "_pid": str(os.getpid()), + "_platform": platform["arch"], + "_source_host": socket.gethostname(), + "_client_name": "mysql-connector-python", + "_client_license": client_license, + "_client_version": ".".join([str(x) for x in version.VERSION[0:3]]), + "_os": platform["version"], + } + + self._connection_attrs.update((default_conn_attrs)) + + async def _execute_query(self, query: str) -> ResultType: + """Execute a query. + + This method simply calls cmd_query() after checking for unread result. If there + are still unread result, an InterfaceError is raised. Otherwise whatever + cmd_query() returns is returned. + """ + await self.handle_unread_result() + return await self.cmd_query(query) + + async def _do_handshake(self) -> None: + """Get the handshake from the MySQL server.""" + packet = await self._socket.read() + logger.debug("Protocol::Handshake packet: %s", packet) + if packet[4] == 255: + raise get_exception(packet) + + self._handshake = self._protocol.parse_handshake(packet) + + self._server_info = ServerInfo( + protocol=self._handshake["protocol"], + version=self._handshake["server_version_original"], + thread_id=self._handshake["server_threadid"], + charset=self._handshake["charset"], + status_flags=self._handshake["server_status"], + auth_plugin=self._handshake["auth_plugin"], + auth_data=self._handshake["auth_data"], + capabilities=self._handshake["capabilities"], + ) + + # Set the charsets for the correspondent server major version + charsets.set_mysql_major_version(self._server_info.version_tuple[0]) + logger.debug("Protocol::Handshake charset: %s", self._server_info.charset) + + # Set charset if provided else use the server default + if self._charset_name and self._charset_collation: + self._charset = charsets.get_by_name_and_collation( + self._charset_name, self._charset_collation + ) + elif self._charset_name: + self._charset = charsets.get_by_name(self._charset_name) + self._charset_collation = self._charset.collation + elif self._charset_collation: + self._charset = charsets.get_by_collation(self._charset_collation) + self._charset_name = self._charset.name + else: + # The default charset from the server handshake should be used instead, + # as `charsets.get_by_id(self._server_info.charset)`. + # The charset id 45 is used to be aligned with the current implementation. + self._charset = charsets.get_by_id(45) + self._charset_name = self._charset.name + self._charset_collation = self._charset.collation + + if not self._handshake["capabilities"] & ClientFlag.SSL: + if self._auth_plugin == "mysql_clear_password" and not self.is_secure: + raise InterfaceError( + "Clear password authentication is not supported over " + "insecure channels" + ) + if self._ssl_verify_cert: + raise InterfaceError( + "SSL is required but the server doesn't support it", + errno=2026, + ) + self._client_flags &= ~ClientFlag.SSL + elif not self._ssl_disabled: + self._client_flags |= ClientFlag.SSL + + if self._handshake["capabilities"] & ClientFlag.PLUGIN_AUTH: + self.set_client_flags([ClientFlag.PLUGIN_AUTH]) + + if self._handshake["capabilities"] & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + self._server_info.query_attrs_is_supported = True + self.set_client_flags([ClientFlag.CLIENT_QUERY_ATTRIBUTES]) + + if self._handshake["capabilities"] & ClientFlag.MULTI_FACTOR_AUTHENTICATION: + self.set_client_flags([ClientFlag.MULTI_FACTOR_AUTHENTICATION]) + + async def _do_auth(self) -> None: + """Authenticate with the MySQL server. + + Authentication happens in two parts. We first send a response to the + handshake. The MySQL server will then send either an AuthSwitchRequest + or an error packet. + + Raises NotSupportedError when we get the old, insecure password + reply back. Raises any error coming from MySQL. + """ + if ( # pylint: disable=too-many-boolean-expressions + self._auth_plugin + and self._auth_plugin.startswith("authentication_oci") + or ( + self._auth_plugin + and self._auth_plugin.startswith("authentication_kerberos") + and os.name == "nt" + ) + ) and not self._user: + self._user = getpass.getuser() + logger.debug( + "MySQL user is empty, OS user: %s will be used for %s", + self._user, + self._auth_plugin, + ) + + password = ( + self._password1 + if self._password1 and self._password != self._password1 + else self._password + ) + + self._ssl_active = False + if not self._ssl_disabled and (self._client_flags & ClientFlag.SSL): + ssl_context = self._socket.build_ssl_context( + ssl_ca=self._ssl_ca, + ssl_cert=self._ssl_cert, + ssl_key=self._ssl_key, + ssl_verify_cert=self._ssl_verify_cert, + ssl_verify_identity=self._ssl_verify_identity, + tls_versions=self._tls_versions, + tls_cipher_suites=self._tls_ciphersuites, + ) + packet: bytes = self._protocol.make_auth_ssl( + charset=self._charset.charset_id, client_flags=self._client_flags + ) + await self._socket.write(packet) + await self._socket.switch_to_ssl(ssl_context) + self._ssl_active = True + + ok_pkt = await self._authenticator.authenticate( + sock=self._socket, + handshake=self._handshake, + username=self._user, + password1=password, + password2=self._password2, + password3=self._password3, + database=self._database, + charset=self._charset.charset_id, + client_flags=self._client_flags, + auth_plugin=self._auth_plugin, + auth_plugin_class=self._auth_plugin_class, + conn_attrs=self._connection_attrs, + krb_service_principal=self._krb_service_principal, + oci_config_file=self._oci_config_file, + oci_config_profile=self._oci_config_profile, + webauthn_callback=self._webauthn_callback, + fido_callback=self._fido_callback, + ) + self._handle_ok(ok_pkt) + + if not (self._client_flags & ClientFlag.CONNECT_WITH_DB) and self._database: + await self.cmd_init_db(self._database) + + def _handle_ok(self, packet: bytes) -> OkPacketType: + """Handle a MySQL OK packet. + + This method handles a MySQL OK packet. When the packet is found to be an Error + packet, an error will be raised. If the packet is neither an OK or an Error + packet, InterfaceError will be raised. + """ + if packet[4] == 0: + ok_pkt = self._protocol.parse_ok(packet) + self._handle_server_status(ok_pkt["status_flag"]) + return ok_pkt + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected OK packet") + + def _handle_server_status(self, flags: int) -> None: + """Handle the server flags found in MySQL packets. + + This method handles the server flags send by MySQL OK and EOF packets. + It, for example, checks whether there exists more result sets or whether there + is an ongoing transaction. + """ + self._have_next_result = flag_is_set(ServerFlag.MORE_RESULTS_EXISTS, flags) + self._in_transaction = flag_is_set(ServerFlag.STATUS_IN_TRANS, flags) + + def _handle_eof(self, packet: bytes) -> EofPacketType: + """Handle a MySQL EOF packet. + + This method handles a MySQL EOF packet. When the packet is found to be an Error + packet, an error will be raised. If the packet is neither and OK or an Error + packet, InterfaceError will be raised. + """ + if packet[4] == 254: + eof = self._protocol.parse_eof(packet) + self._handle_server_status(eof["status_flag"]) + return eof + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected EOF packet") + + async def _handle_load_data_infile(self, filename: str) -> OkPacketType: + """Handle a LOAD DATA INFILE LOCAL request.""" + file_name = os.path.abspath(filename) + if os.path.islink(file_name): + raise OperationalError("Use of symbolic link is not allowed") + if not self._allow_local_infile and not self._allow_local_infile_in_path: + raise DatabaseError( + "LOAD DATA LOCAL INFILE file request rejected due to " + "restrictions on access." + ) + if not self._allow_local_infile and self._allow_local_infile_in_path: + # Validate filename is inside of allow_local_infile_in_path path. + infile_path = os.path.abspath(self._allow_local_infile_in_path) + c_path = None + try: + c_path = os.path.commonpath([infile_path, file_name]) + except ValueError as err: + err_msg = ( + "{} while loading file `{}` and path `{}` given" + " in allow_local_infile_in_path" + ) + raise InterfaceError( + err_msg.format(str(err), file_name, infile_path) + ) from err + + if c_path != infile_path: + err_msg = ( + "The file `{}` is not found in the given " + "allow_local_infile_in_path {}" + ) + raise DatabaseError(err_msg.format(file_name, infile_path)) + + try: + data_file = open(file_name, "rb") # pylint: disable=consider-using-with + return self._handle_ok( + await self._send_data(data_file, send_empty_packet=True) + ) + except IOError: + # Send a empty packet to cancel the operation + try: + await self._socket.write(b"") + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + raise InterfaceError(f"File '{file_name}' could not be read") from None + finally: + try: + data_file.close() + except (IOError, NameError): + pass + + async def _handle_result(self, packet: bytes) -> ResultType: + """Handle a MySQL Result. + + This method handles a MySQL result, for example, after sending the query + command. OK and EOF packets will be handled and returned. + If the packet is an Error packet, an Error-exception will be raised. + + The dictionary returned of: + - columns: column information + - eof: the EOF-packet information + """ + if not packet or len(packet) < 4: + raise InterfaceError("Empty response") + if packet[4] == 0: + return self._handle_ok(packet) + if packet[4] == 251: + filename = packet[5:].decode() + return await self._handle_load_data_infile(filename) + if packet[4] == 254: + return self._handle_eof(packet) + if packet[4] == 255: + raise get_exception(packet) + + # We have a text result set + column_count = self._protocol.parse_column_count(packet) + if not column_count or not isinstance(column_count, int): + raise InterfaceError("Illegal result set") + + self._columns_desc = [ + None, + ] * column_count + for i in range(0, column_count): + self._columns_desc[i] = self._protocol.parse_column( + await self._socket.read(), self.python_charset + ) + + eof = self._handle_eof(await self._socket.read()) + self.unread_result = True + return {"columns": self._columns_desc, "eof": eof} + + def _handle_binary_ok(self, packet: bytes) -> Dict[str, int]: + """Handle a MySQL Binary Protocol OK packet + + This method handles a MySQL Binary Protocol OK packet. When the + packet is found to be an Error packet, an error will be raised. If + the packet is neither an OK or an Error packet, InterfaceError + will be raised. + + Returns a dict() + """ + if packet[4] == 0: + return self._protocol.parse_binary_prepare_ok(packet) + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected Binary OK packet") + + async def _handle_binary_result( + self, packet: bytes + ) -> Union[OkPacketType, Tuple[int, List[DescriptionType], EofPacketType]]: + """Handle a MySQL Result + + This method handles a MySQL result, for example, after sending the + query command. OK and EOF packets will be handled and returned. If + the packet is an Error packet, an Error exception will be raised. + + The tuple returned by this method consist of: + - the number of columns in the result, + - a list of tuples with information about the columns, + - the EOF packet information as a dictionary. + + Returns tuple() or dict() + """ + if not packet or len(packet) < 4: + raise InterfaceError("Empty response") + if packet[4] == 0: + return self._handle_ok(packet) + if packet[4] == 254: + return self._handle_eof(packet) + if packet[4] == 255: + raise get_exception(packet) + + # We have a binary result set + column_count = self._protocol.parse_column_count(packet) + if not column_count or not isinstance(column_count, int): + raise InterfaceError("Illegal result set.") + + columns: List[DescriptionType] = [None] * column_count + for i in range(0, column_count): + columns[i] = self._protocol.parse_column( + await self._socket.read(), self.python_charset + ) + + eof = self._handle_eof(await self._socket.read()) + return (column_count, columns, eof) + + async def _send_cmd( + self, + command: int, + argument: Optional[bytes] = None, + packet_number: int = 0, + packet: Optional[bytes] = None, + expect_response: bool = True, + compressed_packet_number: int = 0, + ) -> Optional[bytearray]: + """Send a command to the MySQL server. + + This method sends a command with an optional argument. + If packet is not None, it will be sent and the argument will be ignored. + + The packet_number is optional and should usually not be used. + + Some commands might not result in the MySQL server returning a response. If a + command does not return anything, you should set expect_response to False. + The _send_cmd method will then return None instead of a MySQL packet. + """ + await self.handle_unread_result() + + try: + await self._socket.write( + self._protocol.make_command(command, packet or argument), + packet_number, + compressed_packet_number, + ) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + if not expect_response: + return None + return await self._socket.read() + + async def _send_data( + self, data_file: BinaryIO, send_empty_packet: bool = False + ) -> bytearray: + """Send data to the MySQL server + + This method accepts a file-like object and sends its data + as is to the MySQL server. If the send_empty_packet is + True, it will send an extra empty package (for example + when using LOAD LOCAL DATA INFILE). + + Returns a MySQL packet. + """ + await self.handle_unread_result() + + if not hasattr(data_file, "read"): + raise ValueError("expecting a file-like object") + + chunk_size = 131072 # 128 KB + try: + buf = data_file.read(chunk_size - 16) + while buf: + await self._socket.write(buf) + buf = data_file.read(chunk_size - 16) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + if send_empty_packet: + try: + await self._socket.write(b"") + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + res = await self._socket.read() + return res + + def is_socket_connected(self) -> bool: + """Reports whether the socket is connected. + + Instead of ping the server like ``is_connected()``, it only checks if the + socket connection flag is set. + """ + return bool(self._socket and self._socket.is_connected()) + + async def is_connected(self) -> bool: + """Reports whether the connection to MySQL Server is available. + + This method checks whether the connection to MySQL is available. + It is similar to ``ping()``, but unlike the ``ping()`` method, either `True` + or `False` is returned and no exception is raised. + """ + try: + await self.cmd_ping() + except Error: + return False + return True + + async def ping( + self, reconnect: bool = False, attempts: int = 1, delay: int = 0 + ) -> None: + """Check availability of the MySQL server. + + When reconnect is set to `True`, one or more attempts are made to try to + reconnect to the MySQL server using the ``reconnect()`` method. + + ``delay`` is the number of seconds to wait between each retry. + + When the connection is not available, an InterfaceError is raised. Use the + ``is_connected()`` method if you just want to check the connection without + raising an error. + + Raises: + InterfaceError: On errors. + """ + try: + await self.cmd_ping() + except Error as err: + if reconnect: + await self.reconnect(attempts=attempts, delay=delay) + else: + raise InterfaceError("Connection to MySQL is not available") from err + + async def shutdown(self) -> None: + """Shut down connection to MySQL Server. + + This method closes the socket. It raises no exceptions. + + Unlike `disconnect()`, `shutdown()` closes the client connection without + attempting to send a QUIT command to the server first. Thus, it will not + block if the connection is disrupted for some reason such as network failure. + """ + if not self._socket: + return + + try: + await self._socket.close_connection() + except Exception: # pylint: disable=broad-exception-caught + pass # Getting an exception would mean we are disconnected. + + async def close(self) -> None: + """Close the connection. + + It closes any opened cursor associated to this connection, and closes the + underling socket connection. + + `MySQLConnection.close()` is a synonymous for `MySQLConnection.disconnect()` + method name and more commonly used. + + This method tries to send a `QUIT` command and close the socket. It raises + no exceptions. + """ + for cursor in self._cursors: + await cursor.close() + self._cursors.clear() + + if self._socket and self._socket.is_connected(): + with contextlib.suppress(Error): + await self.cmd_quit() + + if self._socket: + await self._socket.close_connection() + self._socket = None + + disconnect: Callable[[], Any] = close + + async def cursor( + self, + buffered: Optional[bool] = None, + raw: Optional[bool] = None, + prepared: Optional[bool] = None, + cursor_class: Optional[Type[MySQLCursorAbstract]] = None, + dictionary: Optional[bool] = None, + named_tuple: Optional[bool] = None, + ) -> MySQLCursor: + """Instantiate and return a cursor. + + By default, MySQLCursor is returned. Depending on the options while + connecting, a buffered and/or raw cursor is instantiated instead. + Also depending upon the cursor options, rows can be returned as dictionary or + named tuple. + + It is possible to also give a custom cursor through the cursor_class + parameter, but it needs to be a subclass of + mysql.connector.aio.abstracts.MySQLCursorAbstract. + + Raises: + ProgrammingError: When cursor_class is not a subclass of + CursorBase. + ValueError: When cursor is not available. + """ + if not self._socket or not self._socket.is_connected(): + raise OperationalError("MySQL Connection not available") + + await self.handle_unread_result() + + if cursor_class is not None: + if not issubclass(cursor_class, MySQLCursor): + raise ProgrammingError( + "Cursor class needs be to subclass of MySQLCursorAbstract" + ) + return (cursor_class)(self) + + buffered = buffered if buffered is not None else self._buffered + raw = raw if raw is not None else self._raw + + cursor_type = 0 + if buffered is True: + cursor_type |= 1 + if raw is True: + cursor_type |= 2 + if dictionary is True: + cursor_type |= 4 + if named_tuple is True: + cursor_type |= 8 + if prepared is True: + cursor_type |= 16 + + types = { + 0: MySQLCursor, + 1: MySQLCursorBuffered, + 2: MySQLCursorRaw, + 3: MySQLCursorBufferedRaw, + 4: MySQLCursorDict, + 5: MySQLCursorBufferedDict, + 8: MySQLCursorNamedTuple, + 9: MySQLCursorBufferedNamedTuple, + 16: MySQLCursorPrepared, + 18: MySQLCursorPreparedRaw, + 20: MySQLCursorPreparedDict, + 24: MySQLCursorPreparedNamedTuple, + } + try: + return (types[cursor_type])(self) + except KeyError: + args = ("buffered", "raw", "dictionary", "named_tuple", "prepared") + criteria = ", ".join( + [args[i] for i in range(5) if cursor_type & (1 << i) != 0] + ) + raise ValueError( + f"Cursor not available with given criteria: {criteria}" + ) from None + + async def get_row( + self, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + ) -> Tuple[Optional[RowType], Optional[EofPacketType]]: + """Get the next rows returned by the MySQL server. + + This method gets one row from the result set after sending, for example, the + query command. The result is a tuple consisting of the row and the EOF packet. + If no row was available in the result set, the row data will be None. + """ + rows, eof = await self.get_rows( + count=1, binary=binary, columns=columns, raw=raw + ) + if rows: + return (rows[0], eof) + return (None, eof) + + async def get_rows( + self, + count: Optional[int] = None, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Any = None, + ) -> Tuple[List[RowType], Optional[EofPacketType]]: + """Get all rows returned by the MySQL server. + + This method gets all rows returned by the MySQL server after sending, for + example, the query command. The result is a tuple consisting of a list of rows + and the EOF packet. + """ + if raw is None: + raw = self._raw + + if not self.unread_result: + raise InternalError("No result set available") + + rows: Tuple[List[Tuple[Any, ...]], Optional[EofPacketType]] = ([], None) + try: + if binary: + charset = self.charset + if charset == "utf8mb4": + charset = "utf8" + rows = await self._protocol.read_binary_result( + self._socket, columns, count, charset + ) + else: + rows = await self._protocol.read_text_result( + self._socket, self._server_info.version, count=count + ) + except Error as err: + self.unread_result = False + raise err + + rows, eof_p = rows + if ( + not (binary or raw) + and self._columns_desc is not None + and rows + and hasattr(self, "converter") + ): + row_to_python = self.converter.row_to_python + rows = [row_to_python(row, self._columns_desc) for row in rows] + + if eof_p is not None: + self._handle_server_status( + eof_p["status_flag"] + if "status_flag" in eof_p + else eof_p["server_status"] + ) + self.unread_result = False + + return rows, eof_p + + async def commit(self) -> None: + """Commit current transaction.""" + await self._execute_query("COMMIT") + + async def rollback(self) -> None: + """Rollback current transaction.""" + if self.unread_result: + await self.get_rows() + await self._execute_query("ROLLBACK") + + async def cmd_reset_connection(self) -> bool: + """Resets the session state without re-authenticating. + + Reset command only works on MySQL server 5.7.3 or later. + The result is True for a successful reset otherwise False. + """ + try: + self._handle_ok(await self._send_cmd(ServerCmd.RESET_CONNECTION)) + await self._post_connection() + return True + except (NotSupportedError, OperationalError): + return False + + async def cmd_init_db(self, database: str) -> OkPacketType: + """Change the current database. + + This method changes the current (default) database by sending the INIT_DB + command. The result is a dictionary containing the OK packet infawaitormation. + """ + return self._handle_ok( + await self._send_cmd(ServerCmd.INIT_DB, database.encode("utf-8")) + ) + + async def cmd_query( + self, + query: StrOrBytes, + raw: bool = False, + buffered: bool = False, + raw_as_string: bool = False, + ) -> ResultType: + """Send a query to the MySQL server. + + This method send the query to the MySQL server and returns the result. + + If there was a text result, a tuple will be returned consisting of the number + of columns and a list containing information about these columns. + + When the query doesn't return a text result, the OK or EOF packet information + as dictionary will be returned. In case the result was an error, exception + Error will be raised. + """ + if not isinstance(query, bytearray): + if isinstance(query, str): + query = query.encode() + query = bytearray(query) + # Prepare query attrs + charset = self._charset.name if self._charset.name != "utf8mb4" else "utf8" + packet = bytearray() + if not self._server_info.query_attrs_is_supported and self._query_attrs: + warnings.warn( + "This version of the server does not support Query Attributes", + category=Warning, + ) + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + names = [] + types = [] + values: List[bytes] = [] + null_bitmap = [0] * ((len(self._query_attrs) + 7) // 8) + for pos, attr_tuple in enumerate(self._query_attrs): + value = attr_tuple[1] + flags = 0 + if value is None: + null_bitmap[(pos // 8)] |= 1 << (pos % 8) + types.append(int1store(FieldType.NULL) + int1store(flags)) + continue + if isinstance(value, int): + ( + packed, + field_type, + flags, + ) = self._protocol.prepare_binary_integer(value) + values.append(packed) + elif isinstance(value, str): + value = value.encode(charset) + values.append(lc_int(len(value)) + value) + field_type = FieldType.VARCHAR + elif isinstance(value, bytes): + values.append(lc_int(len(value)) + value) + field_type = FieldType.BLOB + elif isinstance(value, Decimal): + values.append( + lc_int(len(str(value).encode(charset))) + + str(value).encode(charset) + ) + field_type = FieldType.DECIMAL + elif isinstance(value, float): + values.append(struct.pack(" parameter_count Number of parameters + packet.extend(lc_int(len(self._query_attrs))) + # int parameter_set_count Number of parameter sets. + # Currently always 1 + packet.extend(lc_int(1)) + if values: + packet.extend( + b"".join([struct.pack("B", bit) for bit in null_bitmap]) + + int1store(1) + ) + for _type, name in zip(types, names): + packet.extend(_type) + packet.extend(name) + + for value in values: + packet.extend(value) + + packet.extend(query) + query = bytes(packet) + try: + result = await self._handle_result( + await self._send_cmd(ServerCmd.QUERY, query) + ) + except ProgrammingError as err: + if err.errno == 3948 and "Loading local data is disabled" in err.msg: + err_msg = ( + "LOAD DATA LOCAL INFILE file request rejected due " + "to restrictions on access." + ) + raise DatabaseError(err_msg) from err + raise + if self._have_next_result: + raise InterfaceError( + "Use cmd_query_iter for statements with multiple queries." + ) + + return result + + async def cmd_query_iter( # type: ignore[override] + self, statements: StrOrBytes + ) -> AsyncGenerator[ResultType, None]: + """Send one or more statements to the MySQL server. + + Similar to the cmd_query method, but instead returns a generator + object to iterate through results. It sends the statements to the + MySQL server and through the iterator you can get the results. + + statement = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2' + for result in await cnx.cmd_query(statement, iterate=True): + if 'columns' in result: + columns = result['columns'] + rows = await cnx.get_rows() + else: + # do something useful with INSERT result + """ + packet = bytearray() + if not isinstance(statements, bytearray): + if isinstance(statements, str): + statements = statements.encode("utf8") + statements = bytearray(statements) + + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + # int parameter_count Number of parameters + packet.extend(lc_int(0)) + # int parameter_set_count Number of parameter sets. + # Currently always 1 + packet.extend(lc_int(1)) + + packet.extend(statements) + query = bytes(packet) + # Handle the first query result + yield await self._handle_result(await self._send_cmd(ServerCmd.QUERY, query)) + + # Handle next results, if any + while self._have_next_result: + await self.handle_unread_result() + yield await self._handle_result(await self._socket.read()) + + async def cmd_stmt_fetch(self, statement_id: int, rows: int = 1) -> None: + """Fetch a MySQL statement Result Set. + + This method will send the FETCH command to MySQL together with the given + statement id and the number of rows to fetch. + """ + packet = self._protocol.make_stmt_fetch(statement_id, rows) + self.unread_result = False + await self._send_cmd(ServerCmd.STMT_FETCH, packet, expect_response=False) + self.unread_result = True + + async def cmd_stmt_prepare( + self, statement: bytes + ) -> Mapping[str, Union[int, List[DescriptionType]]]: + """Prepare a MySQL statement. + + This method will send the PREPARE command to MySQL together with the given + statement. + """ + packet = await self._send_cmd(ServerCmd.STMT_PREPARE, statement) + result = self._handle_binary_ok(packet) + + result["columns"] = [] + result["parameters"] = [] + if result["num_params"] > 0: + for _ in range(0, result["num_params"]): + result["parameters"].append( + self._protocol.parse_column( + await self._socket.read(), self.python_charset + ) + ) + self._handle_eof(await self._socket.read()) + if result["num_columns"] > 0: + for _ in range(0, result["num_columns"]): + result["columns"].append( + self._protocol.parse_column( + await self._socket.read(), self.python_charset + ) + ) + self._handle_eof(await self._socket.read()) + + return result + + async def cmd_stmt_execute( + self, + statement_id: int, # type: ignore[override] + data: Sequence[BinaryProtocolType] = (), + parameters: Sequence[Any] = (), + flags: int = 0, + ) -> Union[OkPacketType, Tuple[int, List[DescriptionType], EofPacketType]]: + """Execute a prepared MySQL statement.""" + parameters = list(parameters) + long_data_used = {} + + if data: + for param_id, _ in enumerate(parameters): + if isinstance(data[param_id], IOBase): + binary = True + try: + binary = "b" not in data[param_id].mode # type: ignore[union-attr] + except AttributeError: + pass + await self.cmd_stmt_send_long_data( + statement_id, param_id, data[param_id] + ) + long_data_used[param_id] = (binary,) + if not self._query_attrs_supported and self._query_attrs: + warnings.warn( + "This version of the server does not support Query Attributes", + category=Warning, + ) + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + execute_packet = self._protocol.make_stmt_execute( + statement_id, + data, + tuple(parameters), + flags, + long_data_used, + self.charset, + self._query_attrs, + self._converter_str_fallback, + ) + else: + execute_packet = self._protocol.make_stmt_execute( + statement_id, + data, + tuple(parameters), + flags, + long_data_used, + self.charset, + converter_str_fallback=self._converter_str_fallback, + ) + packet = await self._send_cmd(ServerCmd.STMT_EXECUTE, packet=execute_packet) + result = await self._handle_binary_result(packet) + return result + + async def cmd_stmt_reset(self, statement_id: int) -> None: + """Reset data for prepared statement sent as long data. + + The result is a dictionary with OK packet information. + """ + self._handle_ok( + await self._send_cmd(ServerCmd.STMT_RESET, int4store(statement_id)), + ) + + async def cmd_stmt_close(self, statement_id: int) -> None: + """Deallocate a prepared MySQL statement. + + This method deallocates the prepared statement using the statement_id. + Note that the MySQL server does not return anything. + """ + await self._send_cmd( + ServerCmd.STMT_CLOSE, + int4store(statement_id), + expect_response=False, + ) + + async def cmd_refresh(self, options: int) -> OkPacketType: + """Send the Refresh command to the MySQL server. + + This method sends the Refresh command to the MySQL server. The options + argument should be a bitwise value using constants.RefreshOption. + + Usage example: + RefreshOption = mysql.connector.RefreshOption + refresh = RefreshOption.LOG | RefreshOption.INFO + cnx.cmd_refresh(refresh) + """ + if not options & ( + RefreshOption.GRANT + | RefreshOption.LOG + | RefreshOption.TABLES + | RefreshOption.HOST + | RefreshOption.STATUS + | RefreshOption.REPLICA + ): + raise ValueError("Invalid command REFRESH option") + + if options & RefreshOption.GRANT: + res = await self.cmd_query("FLUSH PRIVILEGES") + if options & RefreshOption.LOG: + res = await self.cmd_query("FLUSH LOGS") + if options & RefreshOption.TABLES: + res = await self.cmd_query("FLUSH TABLES") + if options & RefreshOption.HOST: + res = await self.cmd_query("TRUNCATE TABLE performance_schema.host_cache") + if options & RefreshOption.STATUS: + res = await self.cmd_query("FLUSH STATUS") + if options & RefreshOption.REPLICA: + res = await self.cmd_query( + "RESET SLAVE" + if self._server_info.version_tuple < (8, 0, 22) + else "RESET REPLICA" + ) + + return res # type: ignore[return-value] + + async def cmd_stmt_send_long_data( + self, statement_id: int, param_id: int, data: BinaryIO + ) -> int: + """Send data for a column. + + This methods send data for a column (for example BLOB) for statement identified + by statement_id. The param_id indicate which parameter the data belongs too. + The data argument should be a file-like object. + + Since MySQL does not send anything back, no error is raised. When the MySQL + server is not reachable, an OperationalError is raised. + + cmd_stmt_send_long_data should be called before cmd_stmt_execute. + + The total bytes send is returned. + """ + chunk_size = 131072 # 128 KB + total_sent = 0 + try: + buf = data.read(chunk_size) + while buf: + packet = self._protocol.prepare_stmt_send_long_data( + statement_id, param_id, buf + ) + await self._send_cmd( + ServerCmd.STMT_SEND_LONG_DATA, + packet=packet, + expect_response=False, + ) + total_sent += len(buf) + buf = data.read(chunk_size) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + return total_sent + + async def cmd_quit(self) -> bytes: + """Close the current connection with the server. + + Send the QUIT command to the MySQL server, closing the current connection. + """ + await self.handle_unread_result() + packet = self._protocol.make_command(ServerCmd.QUIT) + await self._socket.write(packet) + return packet + + async def cmd_shutdown(self, shutdown_type: Optional[int] = None) -> None: + """Shut down the MySQL Server. + + This method sends the SHUTDOWN command to the MySQL server. + The `shutdown_type` is not used, and it's kept for backward compatibility. + """ + await self.cmd_query("SHUTDOWN") + + async def cmd_statistics(self) -> StatsPacketType: + """Send the statistics command to the MySQL Server. + + This method sends the STATISTICS command to the MySQL server. The result is a + dictionary with various statistical information. + """ + await self.handle_unread_result() + + packet = self._protocol.make_command(ServerCmd.STATISTICS) + await self._socket.write(packet, 0, 0) + return self._protocol.parse_statistics(await self._socket.read()) + + async def cmd_process_kill(self, mysql_pid: int) -> OkPacketType: + """Kill a MySQL process. + + This method send the PROCESS_KILL command to the server along with the + process ID. The result is a dictionary with the OK packet information. + """ + if not isinstance(mysql_pid, int): + raise ValueError("MySQL PID must be int") + return await self.cmd_query(f"KILL {mysql_pid}") # type: ignore[return-value] + + async def cmd_debug(self) -> EofPacketType: + """Send the DEBUG command. + + This method sends the DEBUG command to the MySQL server, which requires the + MySQL user to have SUPER privilege. The output will go to the MySQL server + error log and the result of this method is a dictionary with EOF packet + information. + """ + return self._handle_eof(await self._send_cmd(ServerCmd.DEBUG)) + + async def cmd_ping(self) -> OkPacketType: + """Send the PING command. + + This method sends the PING command to the MySQL server. It is used to check + if the the connection is still valid. The result of this method is dictionary + with OK packet information. + """ + return self._handle_ok(await self._send_cmd(ServerCmd.PING)) + + async def cmd_change_user( + self, + user: str = "", + password: str = "", + database: str = "", + charset: int = 45, + password1: str = "", + password2: str = "", + password3: str = "", + oci_config_file: str = "", + oci_config_profile: str = "", + ) -> Optional[OkPacketType]: + """Change the current logged in user. + + This method allows to change the current logged in user information. + The result is a dictionary with OK packet information. + """ + if not isinstance(charset, int): + raise ValueError("charset must be an integer") + if charset < 0: + raise ValueError("charset should be either zero or a postive integer") + + self._mfa_nfactor = 1 + self._user = user + self._password = password + self._password1 = password1 + self._password2 = password2 + self._password3 = password3 + + if self._password1 and password != self._password1: + password = self._password1 + + await self.handle_unread_result() + + if self._compress: + raise NotSupportedError("Change user is not supported with compression") + + if oci_config_file: + self._oci_config_file = oci_config_file + + self._oci_config_profile = oci_config_profile + + packet = self._protocol.make_auth( + handshake=self._handshake, + username=self._user, + password=self._password, + database=self._database, + charset=charset, + client_flags=self._client_flags, + ssl_enabled=self._ssl_active, + auth_plugin=self._auth_plugin, + conn_attrs=self._connection_attrs, + auth_plugin_class=self._auth_plugin_class, + ) + logger.debug("Protocol::HandshakeResponse packet: %s", packet) + await self._socket.write(packet[0]) + + ok_packet = self._handle_ok(await self._socket.read()) + + if not (self._client_flags & ClientFlag.CONNECT_WITH_DB) and database: + await self.cmd_init_db(database) + + self._charset = charsets.get_by_id(charset) + self._charset_name = self._charset.name + + # return ok_pkt + return self._handle_ok(ok_packet) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/cursor.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/cursor.py new file mode 100644 index 0000000..c73b455 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/cursor.py @@ -0,0 +1,1498 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="assignment,arg-type,attr-defined,index,override,call-overload" + +"""Implementation of cursor classes in pure Python.""" + +__all__ = ["MySQLCursor"] + +import re +import warnings + +from collections import namedtuple +from decimal import Decimal +from typing import ( + Any, + AsyncGenerator, + Dict, + Iterator, + List, + NoReturn, + Optional, + Sequence, + Tuple, + Union, +) + +from ..constants import ServerFlag +from ..cursor import ( + MAX_RESULTS, + RE_PY_MAPPING_PARAM, + RE_PY_PARAM, + RE_SQL_COMMENT, + RE_SQL_FIND_PARAM, + RE_SQL_INSERT_STMT, + RE_SQL_INSERT_VALUES, + RE_SQL_ON_DUPLICATE, + RE_SQL_PYTHON_CAPTURE_PARAM_NAME, + RE_SQL_PYTHON_REPLACE_PARAM, + is_eol_comment, + parse_multi_statement_query, +) +from ..errors import ( + Error, + InterfaceError, + NotSupportedError, + ProgrammingError, + get_mysql_exception, +) +from ..types import ( + DescriptionType, + EofPacketType, + ParamsDictType, + ParamsSequenceOrDictType, + ParamsSequenceType, + ResultType, + RowItemType, + RowType, + StrOrBytes, + WarningType, +) +from .abstracts import NAMED_TUPLE_CACHE, MySQLConnectionAbstract, MySQLCursorAbstract + +ERR_NO_RESULT_TO_FETCH = "No result set to fetch from" + + +class _ParamSubstitutor: + """Substitute parameters into a SQL statement.""" + + def __init__(self, params: Sequence[bytes]) -> None: + self.params: Sequence[bytes] = params + self.index: int = 0 + + def __call__(self, matchobj: re.Match) -> bytes: + index = self.index + self.index += 1 + try: + return bytes(self.params[index]) + except IndexError: + raise ProgrammingError( + "Not enough parameters for the SQL statement" + ) from None + + @property + def remaining(self) -> int: + """Return the number of parameters remaining to be substituted.""" + return len(self.params) - self.index + + +def _bytestr_format_dict(bytestr: bytes, value_dict: Dict[bytes, bytes]) -> bytes: + """ + >>> _bytestr_format_dict(b'%(a)s', {b'a': b'foobar'}) + b'foobar + >>> _bytestr_format_dict(b'%%(a)s', {b'a': b'foobar'}) + b'%%(a)s' + >>> _bytestr_format_dict(b'%%%(a)s', {b'a': b'foobar'}) + b'%%foobar' + >>> _bytestr_format_dict(b'%(x)s %(y)s', + ... {b'x': b'x=%(y)s', b'y': b'y=%(x)s'}) + b'x=%(y)s y=%(x)s' + """ + + def replace(matchobj: re.Match) -> bytes: + """Replace pattern.""" + value: Optional[bytes] = None + groups = matchobj.groupdict() + if groups["conversion_type"] == b"%": + value = b"%" + if groups["conversion_type"] == b"s": + key = groups["mapping_key"] + value = value_dict[key] + if value is None: + raise ValueError( + f"Unsupported conversion_type: {groups['conversion_type']}" + ) + return value + + stmt = RE_PY_MAPPING_PARAM.sub(replace, bytestr) + return stmt + + +class MySQLCursor(MySQLCursorAbstract): + """Default cursor for interacting with MySQL. + + This cursor will execute statements and handle the result. It will not automatically + fetch all rows. + + MySQLCursor should be inherited whenever other functionallity is required. + An example would to change the fetch* member functions to return dictionaries instead + of lists of values. + + Implements the Python Database API Specification v2.0 (PEP-249). + """ + + async def __anext__(self) -> RowType: + res = await self.fetchone() + if res is not None: + return res + raise StopAsyncIteration + + async def close(self) -> bool: + if not self._connection: + return False + self._connection.remove_cursor(self) + await self._connection.handle_unread_result() + await self._reset_result() + self._connection = None + return True + + def _process_params_dict( + self, params: ParamsDictType + ) -> Dict[bytes, Union[bytes, Decimal]]: + """Process query parameters given as dictionary.""" + res: Dict[bytes, Any] = {} + try: + to_mysql = self._connection.converter.to_mysql + escape = self._connection.converter.escape + quote = self._connection.converter.quote + for key, value in params.items(): + conv = value + conv = to_mysql(conv) + conv = escape(conv) + if not isinstance(value, Decimal): + conv = quote(conv) + res[key.encode()] = conv + except Exception as err: + raise ProgrammingError( + f"Failed processing pyformat-parameters; {err}" + ) from err + return res + + def _process_params( + self, params: ParamsSequenceType + ) -> Tuple[Union[bytes, Decimal], ...]: + """Process query parameters.""" + result = params[:] + try: + to_mysql = self._connection.converter.to_mysql + escape = self._connection.converter.escape + quote = self._connection.converter.quote + result = [to_mysql(value) for value in result] + result = [escape(value) for value in result] + result = [ + quote(value) if not isinstance(params[i], Decimal) else value + for i, value in enumerate(result) + ] + except Exception as err: + raise ProgrammingError( + f"Failed processing format-parameters; {err}" + ) from err + return tuple(result) + + async def _execute_iter( + self, query_iter: AsyncGenerator[ResultType, None] + ) -> AsyncGenerator[MySQLCursorAbstract, None]: + """Generator returns MySQLCursor objects for multiple statements + + This method is only used when multiple statements are executed + by the `cursor.execute(multi_stmt_query, multi=True)` method. + + It matches the given `query_iter` (result of `MySQLConnection.cmd_query_iter()`) + and the list of statements that were executed. + + How does this method work? To properly map each statement (stmt) to a result, + the following facts must be considered: + + 1. Read operations such as `SELECT` produce a non-empty result + (calling `next(query_iter)` gets a result that includes at least one column). + 2. Write operatios such as `INSERT` produce an empty result + (calling `next(query_iter)` gets a result with no columns - aka empty). + 3. End-of-line (EOL) comments do not produce a result, unless is the last stmt + in which case produces an empty result. + 4. Calling procedures such as `CALL my_proc` produce a sequence `(1)*0` which + means it may produce zero or more non-empty results followed by just one + empty result. In other words, a callproc stmt always terminates with an + empty result. E.g., `my_proc` includes an update + select + select + update, + then the result sequence will be `110` - note how the write ops results get + annulated, just the read ops results are produced. Other examples: + * insert + insert -> 0 + * select + select + insert + select -> 1110 + * select -> 10 + Observe how 0 indicates the end of the result sequence. This property is + vital to know what result corresponds to what callproc stmt. + + In this regard, the implementation is composed of: + 1. Parsing: the multi-statement is broken down into single statements, and then + for each of these, leading white spaces are removed (including + jumping line, vertical line, tab, etc.). Also, EOL comments are removed from + the stream, except when the comment is the last statement of the + multi-statement string. + 2. Mapping: the facts described above as used as "game rules" to properly match + statements and results. In case, if we run out of statements before running out + of results we use a sentinel named "stmt_overflow!" to indicate that the mapping + went wrong. + + Acronyms + 1: a non-empty result + 2: an empty result + """ + executed_list = parse_multi_statement_query(multi_stmt=self._executed) + self._executed = None + stmt = executed_list.popleft() if executed_list else b"stmt_overflow!" + async for result in query_iter: + await self._reset_result() + await self._handle_result(result) + + if is_eol_comment(stmt): + continue + + self._executed = stmt.rstrip() + yield self + + if not stmt.upper().startswith(b"CALL") or "columns" not in result: + stmt = executed_list.popleft() if executed_list else b"stmt_overflow!" + + async def _fetch_row(self, raw: bool = False) -> Optional[RowType]: + """Return the next row in the result set.""" + if not self._connection.unread_result: + return None + row = None + + if self._nextrow == (None, None): + (row, eof) = await self._connection.get_row( + binary=self._binary, columns=self.description, raw=raw + ) + else: + (row, eof) = self._nextrow + + if row: + self._nextrow = await self._connection.get_row( + binary=self._binary, columns=self.description, raw=raw + ) + eof = self._nextrow[1] + if eof is not None: + await self._handle_eof(eof) + if self._rowcount == -1: + self._rowcount = 1 + else: + self._rowcount += 1 + if eof: + await self._handle_eof(eof) + + return row + + async def _handle_result(self, result: ResultType) -> None: + """Handle the result after a command was send. + + The result can be either an OK-packet or a dictionary containing column/eof + information. + + Raises: + InterfaceError: When result is not a dict() or result is invalid. + """ + if not isinstance(result, dict): + raise InterfaceError("Result was not a dict()") + + if "columns" in result: + # Weak test, must be column/eof information + self._description = result["columns"] + self._connection.unread_result = True + await self._handle_resultset() + elif "affected_rows" in result: + # Weak test, must be an OK-packet + self._connection.unread_result = False + await self._handle_noresultset(result) + else: + raise InterfaceError("Invalid result") + + async def _handle_resultset(self) -> None: + """Handle the result set. + + This method handles the result set and is called after reading and storing + column information in _handle_result(). For non-buffering cursors, this method + is usually doing nothing. + """ + + async def _handle_noresultset(self, res: ResultType) -> None: + """Handle result of execute() when there is no result set. + + Raises: + ProgrammingError: When failing handling a non-resultset. + """ + try: + self._rowcount = res["affected_rows"] + self._last_insert_id = res["insert_id"] + self._warning_count = res["warning_count"] + except (KeyError, TypeError) as err: + raise ProgrammingError(f"Failed handling non-resultset; {err}") from None + + await self._handle_warnings() + + async def _handle_warnings(self) -> None: + """Handle possible warnings after all results are consumed. + + Raises: + Error: Also raises exceptions if raise_on_warnings is set. + """ + if self._connection.get_warnings and self._warning_count: + self._warnings = await self._fetch_warnings() + + if not self._warnings: + return + + err = get_mysql_exception( + self._warnings[0][1], + self._warnings[0][2], + warning=not self._connection.raise_on_warnings, + ) + + if self._connection.raise_on_warnings: + raise err + + warnings.warn(err, stacklevel=4) + + async def _handle_eof(self, eof: EofPacketType) -> None: + """Handle EOF packet.""" + self._connection.unread_result = False + self._nextrow = (None, None) + self._warning_count = eof["warning_count"] + await self._handle_warnings() + + async def _reset_result(self) -> None: + """Reset the cursor to default.""" + self._description = None + self._warnings = None + self._warning_count = 0 + self._executed = None + self._executed_list = [] + self._stored_results = [] + self._rowcount = -1 + self._nextrow = (None, None) + await self.reset() + + def _have_unread_result(self) -> bool: + """Check whether there is an unread result.""" + try: + return self._connection.unread_result + except AttributeError: + return False + + def _check_executed(self) -> None: + """Check if the statement has been executed. + + Raises: + InterfaceError: If the statement has not been executed. + """ + if self._executed is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + + def _prepare_statement( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + ) -> bytes: + """Prepare SQL statement for execution. + + Converts the SQL statement to bytes and replaces the parameters in the + placeholders. + + Raises: + ProgrammingError: On converting to bytes, missing parameters or invalid + parameters type. + """ + try: + stmt = ( + operation + if isinstance(operation, (bytes, bytearray)) + else operation.encode(self._connection.python_charset) + ) + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + + if params: + if isinstance(params, dict): + stmt = _bytestr_format_dict(stmt, self._process_params_dict(params)) + elif isinstance(params, (list, tuple)): + psub = _ParamSubstitutor(self._process_params(params)) + stmt = RE_PY_PARAM.sub(psub, stmt) + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + else: + raise ProgrammingError( + f"Could not process parameters: {type(params).__name__}({params})," + " it must be of type list, tuple or dict" + ) + + return stmt + + async def _fetch_warnings(self) -> Optional[List[WarningType]]: + """Fetch warnings doing a SHOW WARNINGS.""" + result = [] + async with await self._connection.cursor(raw=False) as cur: + await cur.execute("SHOW WARNINGS") + result = await cur.fetchall() + return result if result else None # type: ignore[return-value] + + def _batch_insert( + self, operation: str, seq_params: Sequence[ParamsSequenceOrDictType] + ) -> Optional[bytes]: + """Implements multi row insert""" + + def remove_comments(match: re.Match) -> str: + """Remove comments from INSERT statements. + + This function is used while removing comments from INSERT + statements. If the matched string is a comment not enclosed + by quotes, it returns an empty string, else the string itself. + """ + if match.group(1): + return "" + return match.group(2) + + tmp = re.sub( + RE_SQL_ON_DUPLICATE, + "", + re.sub(RE_SQL_COMMENT, remove_comments, operation), + ) + + matches = re.search(RE_SQL_INSERT_VALUES, tmp) + if not matches: + raise InterfaceError( + "Failed rewriting statement for multi-row INSERT. Check SQL syntax" + ) + fmt = matches.group(1).encode(self._connection.python_charset) + values = [] + + try: + stmt = operation.encode(self._connection.python_charset) + for params in seq_params: + tmp = fmt + if isinstance(params, dict): + tmp = _bytestr_format_dict(tmp, self._process_params_dict(params)) + else: + psub = _ParamSubstitutor(self._process_params(params)) + tmp = RE_PY_PARAM.sub(psub, tmp) + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + values.append(tmp) + if fmt in stmt: + stmt = stmt.replace(fmt, b",".join(values), 1) + self._executed = stmt + return stmt + return None + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + except Error: + raise + except Exception as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + + def stored_results(self) -> Iterator[MySQLCursorAbstract]: + """Returns an iterator for stored results. + + This method returns an iterator over results which are stored when callproc() + is called. The iterator will provide MySQLCursorBuffered instances. + """ + return iter(self._stored_results) + + async def callproc( + self, + procname: str, + args: Sequence[Any] = (), + ) -> Optional[Union[Dict[str, RowItemType], RowType]]: + """Calls a stored procedure with the given arguments + + The arguments will be set during this session, meaning + they will be called like ___arg where + is an enumeration (+1) of the arguments. + + Coding Example: + 1) Defining the Stored Routine in MySQL: + CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT) + BEGIN + SET pProd := pFac1 * pFac2; + END + + 2) Executing in Python: + args = (5, 5, 0) # 0 is to hold pprod + await cursor.callproc('multiply', args) + print(await cursor.fetchone()) + + For OUT and INOUT parameters the user should provide the + type of the parameter as well. The argument should be a + tuple with first item as the value of the parameter to pass + and second argument the type of the argument. + + In the above example, one can call callproc method like: + args = (5, 5, (0, 'INT')) + await cursor.callproc('multiply', args) + + The type of the argument given in the tuple will be used by + the MySQL CAST function to convert the values in the corresponding + MySQL type (See CAST in MySQL Reference for more information) + + Does not return a value, but a result set will be + available when the CALL-statement execute successfully. + Raises exceptions when something is wrong. + """ + if not procname or not isinstance(procname, str): + raise ValueError("procname must be a string") + + if not isinstance(args, (tuple, list)): + raise ValueError("args must be a sequence") + + self._stored_results = [] + + results = [] + try: + argnames = [] + argtypes = [] + + # MySQL itself does support calling procedures with their full + # name .. It's necessary to split + # by '.' and grab the procedure name from procname. + procname_abs = procname.split(".")[-1] + if args: + argvalues = [] + for idx, arg in enumerate(args): + argname = f"@_{procname_abs}_arg{idx + 1}" + argnames.append(argname) + if isinstance(arg, tuple): + argtypes.append(f" CAST({argname} AS {arg[1]})") + argvalues.append(arg[0]) + else: + argtypes.append(argname) + argvalues.append(arg) + + placeholders = ",".join(f"{arg}=%s" for arg in argnames) + await self.execute(f"SET {placeholders}", argvalues) + + call = f"CALL {procname}({','.join(argnames)})" + + # We disable consuming results temporary to make sure we + # getting all results + can_consume_results = self._connection.can_consume_results + async for result in self._connection.cmd_query_iter(call): + self._connection.can_consume_results = False + if isinstance(self, (MySQLCursorDict, MySQLCursorBufferedDict)): + cursor_class = MySQLCursorBufferedDict + elif isinstance( + self, + (MySQLCursorNamedTuple, MySQLCursorBufferedNamedTuple), + ): + cursor_class = MySQLCursorBufferedNamedTuple + elif self._raw: + cursor_class = MySQLCursorBufferedRaw + else: + cursor_class = MySQLCursorBuffered + # pylint: disable=protected-access + + # cursor_class = MySQLCursorBuffered + cur = cursor_class(self._connection.get_self()) + cur._executed = f"(a result of {call})" + await cur._handle_result(result) + # pylint: enable=protected-access + if cur.warnings is not None: + self._warnings = cur.warnings + if "columns" in result: + results.append(cur) + + self._connection.can_consume_results = can_consume_results + if argnames: + # Create names aliases to be compatible with namedtuples + args = [ + f"{name} AS {alias}" + for name, alias in zip( + argtypes, [arg.lstrip("@_") for arg in argnames] + ) + ] + select = f"SELECT {','.join(args)}" + await self.execute(select) + self._stored_results = results + return await self.fetchone() + + self._stored_results = results + return tuple() + except Error: + raise + except Exception as err: + raise InterfaceError(f"Failed calling stored routine; {err}") from None + + async def execute( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + multi: bool = False, + ) -> None: + """Executes the given operation. + + Executes the given operation substituting any markers with the given parameters. + + For example, getting all rows where id is 5: + await cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) + + If the `multi`` parameter is used a `ProgrammingError` is raised. The method for + executing multiple statements is `executemulti()`. + + If warnings where generated, and connection.get_warnings is True, then + self._warnings will be a list containing these warnings. + + Raises: + ProgramingError: If multi parameter is used. + """ + if not self._connection: + raise ProgrammingError("Cursor is not connected") + + if not operation: + return None + + if multi: + raise ProgrammingError( + "The `multi` parameter cannot be used in the Connector/Python Asyncio " + "implementation. Please use `executemulti()` for executing multiple " + "statements" + ) + + await self._connection.handle_unread_result() + await self._reset_result() + + stmt = self._prepare_statement(operation, params) + self._executed = stmt + + try: + await self._handle_result(await self._connection.cmd_query(stmt)) + except InterfaceError as err: + if self._connection.have_next_result: + raise InterfaceError( + "Use `executemulti()` when executing multiple statements" + ) from err + raise + return None + + async def executemulti( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + ) -> AsyncGenerator[MySQLCursorAbstract, None]: + """Execute multiple statements. + + Executes the given operation substituting any markers with the given + parameters. + """ + + if not self._connection: + raise ProgrammingError("Cursor is not connected") + + if not operation: + raise StopAsyncIteration + + await self._connection.handle_unread_result() + await self._reset_result() + + stmt = self._prepare_statement(operation, params) + self._executed = stmt + self._executed_list = [] + + async for result in self._execute_iter(self._connection.cmd_query_iter(stmt)): + yield result + + async def executemany( + self, + operation: str, + seq_params: Sequence[ParamsSequenceType], + ) -> None: + """Prepare and execute a MySQL Prepared Statement many times. + + This method will prepare the given operation and execute with each tuple found + the list seq_params. + + If the cursor instance already had a prepared statement, it is first closed. + """ + if not operation or not seq_params: + return None + await self._connection.handle_unread_result() + + try: + _ = iter(seq_params) + except TypeError as err: + raise ProgrammingError("Parameters for query must be an Iterable") from err + + # Optimize INSERTs by batching them + if re.match(RE_SQL_INSERT_STMT, operation): + if not seq_params: + self._rowcount = 0 + return None + stmt = self._batch_insert(operation, seq_params) + if stmt is not None: + self._executed = stmt + return await self.execute(stmt) + + rowcnt = 0 + try: + for params in seq_params: + await self.execute(operation, params) + if self.with_rows and self._have_unread_result(): + await self.fetchall() + rowcnt += self._rowcount + except (ValueError, TypeError) as err: + raise ProgrammingError(f"Failed executing the operation; {err}") from None + self._rowcount = rowcnt + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Raises: + InterfaceError: If there is no result to fetch. + + Returns: + tuple or None: A row from query result set. + """ + if self._executed is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + return await self._fetch_row() + + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + + if not self._connection.unread_result: + return [] + + rows, eof = await self._connection.get_rows() + if self._nextrow[0]: + rows.insert(0, self._nextrow[0]) + + await self._handle_eof(eof) + rowcount = len(rows) + if rowcount >= 0 and self._rowcount == -1: + self._rowcount = 0 + self._rowcount += rowcount + return rows + + async def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, which + defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0: + cnt -= 1 + row = await self.fetchone() + if row: + res.append(row) + + return res + + +class MySQLCursorBuffered(MySQLCursor): + """Cursor which fetches rows within execute().""" + + def __init__(self, connection: MySQLConnectionAbstract) -> None: + super().__init__(connection) + self._rows: Optional[List[RowType]] = None + self._next_row: int = 0 + + async def _handle_resultset(self) -> None: + """Handle the result set. + + This method handles the result set and is called after reading and storing + column information in _handle_result(). For non-buffering cursors, this method + is usually doing nothing. + """ + self._rows, eof = await self._connection.get_rows() + self._rowcount = len(self._rows) + await self._handle_eof(eof) + self._next_row = 0 + self._connection.unread_result = False + + async def _fetch_row(self, raw: bool = False) -> Optional[RowType]: + """Return the next row in the result set.""" + row = None + try: + row = self._rows[self._next_row] + except (IndexError, TypeError): + return None + self._next_row += 1 + return row + + async def reset(self, free: bool = True) -> None: + """Reset the cursor to default.""" + self._rows = None + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return await self._fetch_row() + + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + res = self._rows[self._next_row :] + self._next_row = len(self._rows) + return res + + @property + def with_rows(self) -> bool: + return self._rows is not None + + +class MySQLCursorRaw(MySQLCursor): + """Skip conversion from MySQL datatypes to Python types when fetching rows.""" + + def __init__(self, connection: MySQLConnectionAbstract) -> None: + super().__init__(connection) + self._raw: bool = True + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return await self._fetch_row(raw=True) + + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + if not self._have_unread_result(): + return [] + rows, eof = await self._connection.get_rows(raw=True) + if self._nextrow[0]: + rows.insert(0, self._nextrow[0]) + await self._handle_eof(eof) + rowcount = len(rows) + if rowcount >= 0 and self._rowcount == -1: + self._rowcount = 0 + self._rowcount += rowcount + return rows + + +class MySQLCursorBufferedRaw(MySQLCursorBuffered): + """ + Cursor which skips conversion from MySQL datatypes to Python types when + fetching rows and fetches rows within execute(). + """ + + _raw: bool = True + + @property + def with_rows(self) -> bool: + return self._rows is not None + + +class MySQLCursorDict(MySQLCursor): + """ + Cursor fetching rows as dictionaries. + + The fetch methods of this class will return dictionaries instead of tuples. + Each row is a dictionary that looks like: + row = { + "col1": value1, + "col2": value2 + } + """ + + def _row_to_python( + self, + rowdata: RowType, + desc: Optional[List[DescriptionType]] = None, # pylint: disable=unused-argument + ) -> Optional[Dict[str, RowItemType]]: + """Convert a MySQL text result row to Python types + + Returns a dictionary. + """ + return dict(zip(self.column_names, rowdata)) if rowdata else None + + async def fetchone(self) -> Optional[Dict[str, RowItemType]]: + """Return next row of a query result set. + + Returns: + dict or None: A dict from query result set. + """ + return self._row_to_python(await super().fetchone(), self.description) + + async def fetchall(self) -> List[Optional[Dict[str, RowItemType]]]: + """Return all rows of a query result set. + + Returns: + list: A list of dictionaries with all rows of a query + result set where column names are used as keys. + """ + return [ + self._row_to_python(row, self.description) + for row in await super().fetchall() + if row + ] + + +class MySQLCursorNamedTuple(MySQLCursor): + """ + Cursor fetching rows as named tuple. + + The fetch methods of this class will return namedtuples instead of tuples. + Each row is returned as a namedtuple and the values can be accessed as: + row.col1, row.col2 + """ + + def _row_to_python( + self, + rowdata: RowType, + desc: Optional[List[DescriptionType]] = None, # pylint: disable=unused-argument + ) -> Optional[RowType]: + """Convert a MySQL text result row to Python types + + Returns a named tuple. + """ + row = rowdata + + if row: + columns = tuple(self.column_names) + try: + named_tuple = NAMED_TUPLE_CACHE[columns] + except KeyError: + named_tuple = namedtuple("Row", columns) # type:ignore[no-redef, misc] + NAMED_TUPLE_CACHE[columns] = named_tuple + return named_tuple(*row) + return None + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + row = await super().fetchone() + if not row: + return None + return ( + self._row_to_python(row, self.description) + if hasattr(self._connection, "converter") + else row + ) + + async def fetchall(self) -> List[Optional[RowType]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + return [ + self._row_to_python(row, self.description) + for row in await super().fetchall() + if row + ] + + +class MySQLCursorBufferedDict(MySQLCursorDict, MySQLCursorBuffered): + """ + Buffered Cursor fetching rows as dictionaries. + """ + + async def fetchone(self) -> Optional[Dict[str, RowItemType]]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + row = await self._fetch_row() + if row: + return self._row_to_python(row, self.description) + return None + + async def fetchall(self) -> List[Optional[Dict[str, RowItemType]]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + for row in self._rows[self._next_row :]: + res.append(self._row_to_python(row, self.description)) + self._next_row = len(self._rows) + return res + + +class MySQLCursorBufferedNamedTuple(MySQLCursorNamedTuple, MySQLCursorBuffered): + """ + Buffered Cursor fetching rows as named tuple. + """ + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + row = await self._fetch_row() + if row: + return self._row_to_python(row, self.description) + return None + + async def fetchall(self) -> List[Optional[RowType]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + for row in self._rows[self._next_row :]: + res.append(self._row_to_python(row, self.description)) + self._next_row = len(self._rows) + return res + + +class MySQLCursorPrepared(MySQLCursor): + """Cursor using MySQL Prepared Statements""" + + def __init__(self, connection: MySQLConnectionAbstract): + super().__init__(connection) + self._rows: Optional[List[RowType]] = None + self._next_row: int = 0 + self._prepared: Optional[Dict[str, Union[int, List[DescriptionType]]]] = None + self._binary: bool = True + self._have_result: Optional[bool] = None + self._last_row_sent: bool = False + self._cursor_exists: bool = False + + async def reset(self, free: bool = True) -> None: + if self._prepared: + try: + await self._connection.cmd_stmt_close(self._prepared["statement_id"]) + except Error: + # We tried to deallocate, but it's OK when we fail. + pass + self._prepared = None + self._last_row_sent = False + self._cursor_exists = False + + async def _handle_noresultset(self, res: ResultType) -> None: + self._handle_server_status( + res.get("status_flag", res.get("server_status", 0)), + ) + await super()._handle_noresultset(res) + + def _handle_server_status(self, flags: int) -> None: + self._cursor_exists = flags & ServerFlag.STATUS_CURSOR_EXISTS != 0 + self._last_row_sent = flags & ServerFlag.STATUS_LAST_ROW_SENT != 0 + + async def _handle_eof(self, eof: EofPacketType) -> None: + self._handle_server_status( + eof.get("status_flag", eof.get("server_status", 0)), + ) + await super()._handle_eof(eof) + + async def callproc(self, procname: Any, args: Any = ()) -> NoReturn: + """Calls a stored procedue + + Not supported with MySQLCursorPrepared. + """ + raise NotSupportedError() + + async def executemulti( + self, + operation: StrOrBytes, + params: Union[Sequence[Any], Dict[str, Any]] = (), + ) -> AsyncGenerator[MySQLCursorAbstract, None]: + """Execute multiple statements. + + Executes the given operation substituting any markers with the given + parameters. + """ + raise NotSupportedError() + + async def close(self) -> None: + """Close the cursor + + This method will try to deallocate the prepared statement and close + the cursor. + """ + await self.reset() + await super().close() + + def _row_to_python(self, rowdata: Any, desc: Any = None) -> Any: + """Convert row data from MySQL to Python types + + The conversion is done while reading binary data in the protocol module. + """ + + async def _handle_result(self, result: ResultType) -> None: + """Handle result after execution""" + if isinstance(result, dict): + self._connection.unread_result = False + self._have_result = False + await self._handle_noresultset(result) + else: + self._description = result[1] + self._connection.unread_result = True + self._have_result = True + if "status_flag" in result[2]: # type: ignore[operator] + self._handle_server_status(result[2]["status_flag"]) + elif "server_status" in result[2]: # type: ignore[operator] + self._handle_server_status(result[2]["server_status"]) + + async def execute( + self, + operation: StrOrBytes, + params: Optional[ParamsSequenceOrDictType] = None, + multi: bool = False, + ) -> None: # multi is unused + """Prepare and execute a MySQL Prepared Statement + + This method will prepare the given operation and execute it using + the optionally given parameters. + + If the cursor instance already had a prepared statement, it is + first closed. + + Note: argument "multi" is unused. + """ + if not self._connection: + raise ProgrammingError("Cursor is not connected") + + if not operation: + return None + + charset = self._connection.charset + if charset == "utf8mb4": + charset = "utf8" + + if not isinstance(operation, str): + try: + operation = operation.decode(charset) + except UnicodeDecodeError as err: + raise ProgrammingError(str(err)) from err + + if isinstance(params, dict): + replacement_keys = re.findall(RE_SQL_PYTHON_CAPTURE_PARAM_NAME, operation) + try: + # Replace params dict with params tuple in correct order + params = tuple(params[key] for key in replacement_keys) + except KeyError as err: + raise ProgrammingError( + "Not all placeholders were found in the parameters dict" + ) from err + # Convert %(name)s to ? before sending it to MySQL + operation = re.sub(RE_SQL_PYTHON_REPLACE_PARAM, "?", operation) + + if operation is not self._executed: + if self._prepared: + await self._connection.cmd_stmt_close(self._prepared["statement_id"]) + self._executed = operation + + try: + operation = operation.encode(charset) + except UnicodeEncodeError as err: + raise ProgrammingError(str(err)) from err + + if b"%s" in operation: + # Convert %s to ? before sending it to MySQL + operation = re.sub(RE_SQL_FIND_PARAM, b"?", operation) + + try: + self._prepared = await self._connection.cmd_stmt_prepare(operation) + except Error: + self._executed = None + raise + + await self._connection.cmd_stmt_reset(self._prepared["statement_id"]) + + if self._prepared["parameters"] and not params: + return + if params: + if not isinstance(params, (tuple, list)): + raise ProgrammingError( + errno=1210, + msg="Incorrect type of argument: " + f"{type(params).__name__}({params})" + ", it must be of type tuple or list the argument given to " + "the prepared statement", + ) + if len(self._prepared["parameters"]) != len(params): + raise ProgrammingError( + errno=1210, + msg="Incorrect number of arguments executing prepared statement", + ) + + if params is None: + params = () + res = await self._connection.cmd_stmt_execute( + self._prepared["statement_id"], + data=params, + parameters=self._prepared["parameters"], + ) + await self._handle_result(res) + + async def executemany( + self, + operation: str, + seq_params: Sequence[ParamsSequenceType], + ) -> None: + """Prepare and execute a MySQL Prepared Statement many times + + This method will prepare the given operation and execute with each + tuple found the list seq_params. + + If the cursor instance already had a prepared statement, it is + first closed. + + executemany() simply calls execute(). + """ + if not operation or not seq_params: + return None + await self._connection.handle_unread_result() + + try: + _ = iter(seq_params) + except TypeError as err: + raise ProgrammingError("Parameters for query must be an Iterable") from err + + rowcnt = 0 + try: + for params in seq_params: + await self.execute(operation, params) + if self.with_rows and self._have_unread_result(): + await self.fetchall() + rowcnt += self._rowcount + except (ValueError, TypeError) as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + self._rowcount = rowcnt + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + if self._cursor_exists: + await self._connection.cmd_stmt_fetch(self._prepared["statement_id"]) + return await self._fetch_row() or None + + async def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._have_unread_result(): + cnt -= 1 + row = await self._fetch_row() + if row: + res.append(row) + return res + + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + rows = [] + if self._nextrow[0]: + rows.append(self._nextrow[0]) + while self._have_unread_result(): + if self._cursor_exists: + await self._connection.cmd_stmt_fetch( + self._prepared["statement_id"], MAX_RESULTS + ) + tmp, eof = await self._connection.get_rows( + binary=self._binary, columns=self.description + ) + rows.extend(tmp) + await self._handle_eof(eof) + self._rowcount = len(rows) + return rows + + +class MySQLCursorPreparedDict(MySQLCursorDict, MySQLCursorPrepared): # type: ignore[misc] + """ + This class is a blend of features from MySQLCursorDict and MySQLCursorPrepared + + Multiple inheritance in python is allowed but care must be taken + when assuming methods resolution. In the case of multiple + inheritance, a given attribute is first searched in the current + class if it's not found then it's searched in the parent classes. + The parent classes are searched in a left-right fashion and each + class is searched once. + Based on python's attribute resolution, in this case, attributes + are searched as follows: + 1. MySQLCursorPreparedDict (current class) + 2. MySQLCursorDict (left parent class) + 3. MySQLCursorPrepared (right parent class) + 4. MySQLCursor (base class) + """ + + async def fetchmany( + self, size: Optional[int] = None + ) -> List[Dict[str, RowItemType]]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set represented + as a list of dictionaries where column names are used as keys. + """ + return [ + self._row_to_python(row, self.description) + for row in await super().fetchmany(size=size) + if row + ] + + +class MySQLCursorPreparedNamedTuple(MySQLCursorNamedTuple, MySQLCursorPrepared): + """ + This class is a blend of features from MySQLCursorNamedTuple and MySQLCursorPrepared + """ + + async def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set represented + as a list of named tuples where column names are used as names. + """ + return [ + self._row_to_python(row, self.description) + for row in await super().fetchmany(size=size) + if row + ] + + +class MySQLCursorPreparedRaw(MySQLCursorPrepared): + """ + This class is a blend of features from MySQLCursorRaw and MySQLCursorPrepared + """ + + _raw: bool = True + + async def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + if self._cursor_exists: + await self._connection.cmd_stmt_fetch(self._prepared["statement_id"]) + return await self._fetch_row(raw=self._raw) or None + + async def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._have_unread_result(): + cnt -= 1 + row = await self._fetch_row(raw=self._raw) + if row: + res.append(row) + return res + + async def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + rows = [] + if self._nextrow[0]: + rows.append(self._nextrow[0]) + while self._have_unread_result(): + if self._cursor_exists: + await self._connection.cmd_stmt_fetch( + self._prepared["statement_id"], MAX_RESULTS + ) + tmp, eof = await self._connection.get_rows( + raw=self._raw, binary=self._binary, columns=self.description + ) + rows.extend(tmp) + await self._handle_eof(eof) + self._rowcount = len(rows) + return rows diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/logger.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/logger.py new file mode 100644 index 0000000..9843bf2 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/logger.py @@ -0,0 +1,33 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Setup of the `mysql.connector.aio` logger.""" + +import logging + +logger = logging.getLogger("mysql.connector.aio") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/network.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/network.py new file mode 100644 index 0000000..d07c5f2 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/network.py @@ -0,0 +1,670 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# pylint: disable=dangerous-default-value + +"""Module implementing low-level socket communication with MySQL servers.""" + + +__all__ = ["MySQLTcpSocket", "MySQLUnixSocket"] + +import asyncio +import struct +import zlib + +try: + import ssl + + TLS_VERSIONS = { + "TLSv1": ssl.PROTOCOL_TLSv1, + "TLSv1.1": ssl.PROTOCOL_TLSv1_1, + "TLSv1.2": ssl.PROTOCOL_TLSv1_2, + "TLSv1.3": ssl.PROTOCOL_TLS, + } +except ImportError: + ssl = None + +from abc import ABC, abstractmethod +from collections import deque +from typing import Any, Deque, List, Optional, Tuple + +from ..errors import ( + InterfaceError, + NotSupportedError, + OperationalError, + ProgrammingError, +) +from ..network import ( + COMPRESSED_PACKET_HEADER_LENGTH, + MAX_PAYLOAD_LENGTH, + MIN_COMPRESS_LENGTH, + PACKET_HEADER_LENGTH, +) +from .utils import StreamWriter, open_connection + + +def _strioerror(err: IOError) -> str: + """Reformat the IOError error message. + + This function reformats the IOError error message. + """ + return str(err) if not err.errno else f"{err.errno} {err.strerror}" + + +class NetworkBroker(ABC): + """Broker class interface. + + The network object is a broker used as a delegate by a socket object. Whenever the + socket wants to deliver or get packets to or from the MySQL server it needs to rely + on its network broker (netbroker). + + The netbroker sends `payloads` and receives `packets`. + + A packet is a bytes sequence, it has a header and body (referred to as payload). + The first `PACKET_HEADER_LENGTH` or `COMPRESSED_PACKET_HEADER_LENGTH` + (as appropriate) bytes correspond to the `header`, the remaining ones represent the + `payload`. + + The maximum payload length allowed to be sent per packet to the server is + `MAX_PAYLOAD_LENGTH`. When `send` is called with a payload whose length is greater + than `MAX_PAYLOAD_LENGTH` the netbroker breaks it down into packets, so the caller + of `send` can provide payloads of arbitrary length. + + Finally, data received by the netbroker comes directly from the server, expect to + get a packet for each call to `recv`. The received packet contains a header and + payload, the latter respecting `MAX_PAYLOAD_LENGTH`. + """ + + @abstractmethod + async def write( + self, + writer: StreamWriter, + address: str, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send `payload` to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + + Args: + sock: Object holding the socket connection. + address: Socket's location. + payload: Packet's body to send. + packet_number: Sequence id (packet ID) to attach to the header when sending + plain packets. + compressed_packet_number: Same as `packet_number` but used when sending + compressed packets. + + Raises: + :class:`OperationalError`: If something goes wrong while sending packets to + the MySQL server. + """ + + @abstractmethod + async def read(self, reader: asyncio.StreamReader, address: str) -> bytearray: + """Get the next available packet from the MySQL server. + + Args: + sock: Object holding the socket connection. + address: Socket's location. + + Returns: + packet: A packet from the MySQL server. + + Raises: + :class:`OperationalError`: If something goes wrong while receiving packets + from the MySQL server. + :class:`InterfaceError`: If something goes wrong while receiving packets + from the MySQL server. + """ + + +class NetworkBrokerPlain(NetworkBroker): + """Broker class for MySQL socket communication.""" + + def __init__(self) -> None: + self._pktnr: int = -1 # packet number + + @staticmethod + def get_header(pkt: bytes) -> Tuple[int, int]: + """Recover the header information from a packet.""" + if len(pkt) < PACKET_HEADER_LENGTH: + raise ValueError("Can't recover header info from an incomplete packet") + + pll, seqid = ( + struct.unpack(" None: + """Set the given packet id, if any, else increment packet id.""" + if next_id is None: + self._pktnr += 1 + else: + self._pktnr = next_id + self._pktnr %= 256 + + async def _write_pkt(self, writer: StreamWriter, address: str, pkt: bytes) -> None: + """Write packet to the comm channel.""" + try: + writer.write(pkt) + await writer.drain() + except IOError as err: + raise OperationalError( + errno=2055, values=(address, _strioerror(err)) + ) from err + except AttributeError as err: + raise OperationalError(errno=2006) from err + + async def _read_chunk( + self, reader: asyncio.StreamReader, size: int = 0 + ) -> bytearray: + """Read `size` bytes from the comm channel.""" + pkt = bytearray(b"") + while len(pkt) < size: + chunk = await reader.read(size - len(pkt)) + if not chunk: + raise InterfaceError(errno=2013) + pkt += chunk + return pkt + + async def write( + self, + writer: StreamWriter, + address: str, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send payload to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + """ + self._set_next_pktnr(packet_number) + + # If the payload is larger than or equal to MAX_PAYLOAD_LENGTH the length is + # set to 2^24 - 1 (ff ff ff) and additional packets are sent with the rest of + # the payload until the payload of a packet is less than MAX_PAYLOAD_LENGTH. + offset = 0 + for _ in range(len(payload) // MAX_PAYLOAD_LENGTH): + # payload_len, sequence_id, payload + await self._write_pkt( + writer, + address, + b"\xff" * 3 + + struct.pack(" bytearray: + """Receive `one` packet from the MySQL server.""" + try: + # Read the header of the MySQL packet. + header = await self._read_chunk(reader, size=PACKET_HEADER_LENGTH) + + # Pull the payload length and sequence id. + payload_len, self._pktnr = self.get_header(header) + + # Read the payload, and return packet. + return header + await self._read_chunk(reader, size=payload_len) + except IOError as err: + raise OperationalError( + errno=2055, values=(address, _strioerror(err)) + ) from err + + +class NetworkBrokerCompressed(NetworkBrokerPlain): + """Broker class for MySQL socket communication.""" + + def __init__(self) -> None: + super().__init__() + self._compressed_pktnr = -1 + self._queue_read: Deque[bytearray] = deque() + + @staticmethod + def _prepare_packets(payload: bytes, pktnr: int) -> List[bytes]: + """Prepare a payload for sending to the MySQL server.""" + offset = 0 + pkts = [] + + # If the payload is larger than or equal to MAX_PAYLOAD_LENGTH the length is + # set to 2^24 - 1 (ff ff ff) and additional packets are sent with the rest of + # the payload until the payload of a packet is less than MAX_PAYLOAD_LENGTH. + for _ in range(len(payload) // MAX_PAYLOAD_LENGTH): + # payload length + sequence id + payload + pkts.append( + b"\xff" * 3 + + struct.pack(" Tuple[int, int, int]: # type: ignore[override] + """Recover the header information from a packet.""" + if len(pkt) < COMPRESSED_PACKET_HEADER_LENGTH: + raise ValueError("Can't recover header info from an incomplete packet") + + compressed_pll, seqid, uncompressed_pll = ( + struct.unpack(" None: + """Set the given packet id, if any, else increment packet id.""" + if next_id is None: + self._compressed_pktnr += 1 + else: + self._compressed_pktnr = next_id + self._compressed_pktnr %= 256 + + async def _write_pkt(self, writer: StreamWriter, address: str, pkt: bytes) -> None: + """Compress packet and write it to the comm channel.""" + compressed_pkt = zlib.compress(pkt) + pkt = ( + struct.pack(" None: + """Send `payload` as compressed packets to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + """ + # Get next packet numbers. + self._set_next_pktnr(packet_number) + self._set_next_compressed_pktnr(compressed_packet_number) + + payload_prep = bytearray(b"").join(self._prepare_packets(payload, self._pktnr)) + if len(payload) >= MAX_PAYLOAD_LENGTH - PACKET_HEADER_LENGTH: + # Sending a MySQL payload of the size greater or equal to 2^24 - 5 via + # compression leads to at least one extra compressed packet WHY? let's say + # len(payload) is MAX_PAYLOAD_LENGTH - 3; when preparing the payload, a + # header of size PACKET_HEADER_LENGTH is pre-appended to the payload. + # This means that len(payload_prep) is + # MAX_PAYLOAD_LENGTH - 3 + PACKET_HEADER_LENGTH = MAX_PAYLOAD_LENGTH + 1 + # surpassing the maximum allowed payload size per packet. + offset = 0 + + # Send several MySQL packets. + for _ in range(len(payload_prep) // MAX_PAYLOAD_LENGTH): + await self._write_pkt( + writer, address, payload_prep[offset : offset + MAX_PAYLOAD_LENGTH] + ) + self._set_next_compressed_pktnr() + offset += MAX_PAYLOAD_LENGTH + await self._write_pkt(writer, address, payload_prep[offset:]) + else: + # Send one MySQL packet. + # For small packets it may be too costly to compress the packet. + # Usually payloads less than 50 bytes (MIN_COMPRESS_LENGTH) aren't + # compressed (see MySQL source code Documentation). + if len(payload) > MIN_COMPRESS_LENGTH: + # Perform compression. + await self._write_pkt(writer, address, payload_prep) + else: + # Skip compression. + await super()._write_pkt( + writer, + address, + struct.pack(" None: + """Handle reading of a compressed packet.""" + # compressed_pll stands for compressed payload length. + pkt = bytearray( + zlib.decompress(await super()._read_chunk(reader, size=compressed_pll)) + ) + offset = 0 + while offset < len(pkt): + # pll stands for payload length + pll = struct.unpack( + " len(pkt) - offset: + # More bytes need to be consumed. + # Read the header of the next MySQL packet. + header = await super()._read_chunk( + reader, size=COMPRESSED_PACKET_HEADER_LENGTH + ) + + # compressed payload length, sequence id, uncompressed payload length. + ( + compressed_pll, + self._compressed_pktnr, + uncompressed_pll, + ) = self.get_header(header) + compressed_pkt = await super()._read_chunk(reader, size=compressed_pll) + + # Recalling that if uncompressed payload length == 0, the packet comes + # in uncompressed, so no decompression is needed. + pkt += ( + compressed_pkt + if uncompressed_pll == 0 + else zlib.decompress(compressed_pkt) + ) + + self._queue_read.append(pkt[offset : offset + PACKET_HEADER_LENGTH + pll]) + offset += PACKET_HEADER_LENGTH + pll + + async def read(self, reader: asyncio.StreamReader, address: str) -> bytearray: + """Receive `one` or `several` packets from the MySQL server, enqueue them, and + return the packet at the head. + """ + if not self._queue_read: + try: + # Read the header of the next MySQL packet. + header = await super()._read_chunk( + reader, size=COMPRESSED_PACKET_HEADER_LENGTH + ) + + # compressed payload length, sequence id, uncompressed payload length + ( + compressed_pll, + self._compressed_pktnr, + uncompressed_pll, + ) = self.get_header(header) + + if uncompressed_pll == 0: + # Packet is not compressed, so just store it. + self._queue_read.append( + await super()._read_chunk(reader, size=compressed_pll) + ) + else: + # Packet comes in compressed, further action is needed. + await self._read_compressed_pkt(reader, compressed_pll) + except IOError as err: + raise OperationalError( + errno=2055, values=(address, _strioerror(err)) + ) from err + + if not self._queue_read: + return None + + pkt = self._queue_read.popleft() + self._pktnr = pkt[3] + + return pkt + + +class MySQLSocket(ABC): + """MySQL socket communication interface. + + Examples: + Subclasses: network.MySQLTCPSocket and network.MySQLUnixSocket. + """ + + def __init__(self) -> None: + """Network layer where transactions are made with plain (uncompressed) packets + is enabled by default. + """ + self._reader: Optional[asyncio.StreamReader] = None + self._writer: Optional[StreamWriter] = None + self._connection_timeout: Optional[int] = None + self._address: Optional[str] = None + self._netbroker: NetworkBroker = NetworkBrokerPlain() + self._is_connected: bool = False + + @property + def address(self) -> str: + """Socket location.""" + return self._address + + @abstractmethod + async def open_connection(self, **kwargs: Any) -> None: + """Open the socket.""" + + async def close_connection(self) -> None: + """Close the connection.""" + if self._writer: + self._writer.close() + # Without transport.abort(), an error is raised when using SSL + if self._writer.transport is not None: + self._writer.transport.abort() + await self._writer.wait_closed() + self._is_connected = False + + def is_connected(self) -> bool: + """Check if the socket is connected. + + Return: + bool: Returns `True` if the socket is connected to MySQL server. + """ + return self._is_connected + + def set_connection_timeout(self, timeout: int) -> None: + """Set the connection timeout.""" + self._connection_timeout = timeout + + def switch_to_compressed_mode(self) -> None: + """Enable network layer where transactions are made with compressed packets.""" + self._netbroker = NetworkBrokerCompressed() + + async def switch_to_ssl(self, ssl_context: ssl.SSLContext) -> None: + """Upgrade an existing stream-based connection to TLS. + + The `start_tls()` method from `asyncio.streams.StreamWriter` is only available + in Python 3.11. This method is used as a workaround. + + The MySQL TLS negotiation happens in the middle of the TCP connection. + Therefore, passing a socket to open connection will cause it to negotiate + TLS on an existing connection. + + Args: + ssl_context: The SSL Context to be used. + + Raises: + RuntimeError: If the transport does not expose the socket instance. + """ + # Ensure that self._writer is already created + assert self._writer is not None + + socket = self._writer.transport.get_extra_info("socket") + if socket.family == 1: # socket.AF_UNIX + raise ProgrammingError("SSL is not supported when using Unix sockets") + + await self._writer.start_tls(ssl_context) + + async def write( + self, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send packets to the MySQL server.""" + await self._netbroker.write( + self._writer, + self.address, + payload, + packet_number=packet_number, + compressed_packet_number=compressed_packet_number, + ) + + async def read(self) -> bytearray: + """Read packets from the MySQL server.""" + return await self._netbroker.read(self._reader, self.address) + + def build_ssl_context( + self, + ssl_ca: Optional[str] = None, + ssl_cert: Optional[str] = None, + ssl_key: Optional[str] = None, + ssl_verify_cert: Optional[bool] = False, + ssl_verify_identity: Optional[bool] = False, + tls_versions: Optional[List[str]] = [], + tls_cipher_suites: Optional[List[str]] = [], + ) -> ssl.SSLContext: + """Build a SSLContext.""" + if not self._reader: + raise InterfaceError(errno=2048) + + if ssl is None: + raise RuntimeError("Python installation has no SSL support") + + try: + if tls_versions: + tls_versions.sort(reverse=True) + tls_version = tls_versions[0] + ssl_protocol = TLS_VERSIONS[tls_version] + context = ssl.SSLContext(ssl_protocol) + + if tls_version == "TLSv1.3": + if "TLSv1.2" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1_2 + if "TLSv1.1" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1_1 + if "TLSv1" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1 + else: + context = ssl.create_default_context() + + context.check_hostname = ssl_verify_identity + + if ssl_verify_cert: + context.verify_mode = ssl.CERT_REQUIRED + elif ssl_verify_identity: + context.verify_mode = ssl.CERT_OPTIONAL + else: + context.verify_mode = ssl.CERT_NONE + + context.load_default_certs() + + if ssl_ca: + try: + context.load_verify_locations(ssl_ca) + except (IOError, ssl.SSLError) as err: + raise InterfaceError(f"Invalid CA Certificate: {err}") from err + if ssl_cert: + try: + context.load_cert_chain(ssl_cert, ssl_key) + except (IOError, ssl.SSLError) as err: + raise InterfaceError(f"Invalid Certificate/Key: {err}") from err + + if tls_cipher_suites: + context.set_ciphers(":".join(tls_cipher_suites)) + + return context + except NameError as err: + raise NotSupportedError("Python installation has no SSL support") from err + except ( + IOError, + NotImplementedError, + ssl.CertificateError, + ssl.SSLError, + ) as err: + raise InterfaceError(str(err)) from err + + +class MySQLTcpSocket(MySQLSocket): + """MySQL socket class using TCP/IP. + + Args: + host: MySQL host name. + port: MySQL port. + force_ipv6: Force IPv6 usage. + """ + + def __init__( + self, host: str = "127.0.0.1", port: int = 3306, force_ipv6: bool = False + ): + super().__init__() + self._host: str = host + self._port: int = port + self._force_ipv6: bool = force_ipv6 + self._address: str = f"{host}:{port}" + + async def open_connection(self, **kwargs: Any) -> None: + """Open TCP/IP connection.""" + self._reader, self._writer = await open_connection( + host=self._host, port=self._port, **kwargs + ) + self._is_connected = True + + +class MySQLUnixSocket(MySQLSocket): + """MySQL socket class using UNIX sockets. + + Args: + unix_socket: UNIX socket file path. + """ + + def __init__(self, unix_socket: str = "/tmp/mysql.sock"): + super().__init__() + self._address: str = unix_socket + + async def open_connection(self, **kwargs: Any) -> None: + """Open UNIX socket connection.""" + ( + self._reader, + self._writer, + ) = await asyncio.open_unix_connection( # type: ignore[assignment] + path=self._address, **kwargs + ) + self._is_connected = True diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__init__.py new file mode 100644 index 0000000..0463074 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__init__.py @@ -0,0 +1,162 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Base Authentication Plugin class.""" + +__all__ = ["MySQLAuthPlugin", "get_auth_plugin"] + +import importlib + +from abc import ABC, abstractmethod +from functools import lru_cache +from typing import TYPE_CHECKING, Any, Optional, Type + +from mysql.connector.errors import NotSupportedError, ProgrammingError +from mysql.connector.logger import logger + +if TYPE_CHECKING: + from ..network import MySQLSocket + +DEFAULT_PLUGINS_PKG = "mysql.connector.aio.plugins" + + +class MySQLAuthPlugin(ABC): + """Authorization plugin interface.""" + + def __init__( + self, + username: str, + password: str, + ssl_enabled: bool = False, + ) -> None: + """Constructor.""" + self._username: str = "" if username is None else username + self._password: str = "" if password is None else password + self._ssl_enabled: bool = ssl_enabled + + @property + def ssl_enabled(self) -> bool: + """Signals whether or not SSL is enabled.""" + return self._ssl_enabled + + @property + @abstractmethod + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + + @property + @abstractmethod + def name(self) -> str: + """Plugin official name.""" + + @abstractmethod + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Make the client's authorization response. + + Args: + auth_data: Authorization data. + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Client's authorization response. + """ + + async def auth_more_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth more data` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Authentication method data (from a packet representing + an `auth more data` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth communication. + """ + raise NotImplementedError + + @abstractmethod + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth communication. + """ + + +@lru_cache(maxsize=10, typed=False) +def get_auth_plugin( + plugin_name: str, + auth_plugin_class: Optional[str] = None, +) -> Type[MySQLAuthPlugin]: + """Return authentication class based on plugin name + + This function returns the class for the authentication plugin plugin_name. + The returned class is a subclass of BaseAuthPlugin. + + Args: + plugin_name (str): Authentication plugin name. + auth_plugin_class (str): Authentication plugin class name. + + Raises: + NotSupportedError: When plugin_name is not supported. + + Returns: + Subclass of `MySQLAuthPlugin`. + """ + package = DEFAULT_PLUGINS_PKG + if plugin_name: + try: + logger.info("package: %s", package) + logger.info("plugin_name: %s", plugin_name) + plugin_module = importlib.import_module(f".{plugin_name}", package) + if not auth_plugin_class or not hasattr(plugin_module, auth_plugin_class): + auth_plugin_class = plugin_module.AUTHENTICATION_PLUGIN_CLASS + logger.info("AUTHENTICATION_PLUGIN_CLASS: %s", auth_plugin_class) + return getattr(plugin_module, auth_plugin_class) + except ModuleNotFoundError as err: + logger.warning("Requested Module was not found: %s", err) + except ValueError as err: + raise ProgrammingError(f"Invalid module name: {err}") from err + raise NotSupportedError(f"Authentication plugin '{plugin_name}' is not supported") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..d6abf4d Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_kerberos_client.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_kerberos_client.cpython-310.pyc new file mode 100644 index 0000000..efcf7a2 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_kerberos_client.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_ldap_sasl_client.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_ldap_sasl_client.cpython-310.pyc new file mode 100644 index 0000000..4198484 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_ldap_sasl_client.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_oci_client.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_oci_client.cpython-310.pyc new file mode 100644 index 0000000..562970a Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_oci_client.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_webauthn_client.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_webauthn_client.cpython-310.pyc new file mode 100644 index 0000000..7d418a9 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/authentication_webauthn_client.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/caching_sha2_password.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/caching_sha2_password.cpython-310.pyc new file mode 100644 index 0000000..ac0eb66 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/caching_sha2_password.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_clear_password.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_clear_password.cpython-310.pyc new file mode 100644 index 0000000..0d1734c Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_clear_password.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_native_password.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_native_password.cpython-310.pyc new file mode 100644 index 0000000..9b6ac5b Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/mysql_native_password.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/sha256_password.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/sha256_password.cpython-310.pyc new file mode 100644 index 0000000..0f7a69c Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/__pycache__/sha256_password.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_kerberos_client.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_kerberos_client.py new file mode 100644 index 0000000..e67c0d6 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_kerberos_client.py @@ -0,0 +1,575 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="str-bytes-safe,misc" + +"""Kerberos Authentication Plugin.""" + +import getpass +import os +import struct + +from abc import abstractmethod +from pathlib import Path +from typing import TYPE_CHECKING, Any, Optional, Tuple + +from mysql.connector.errors import InterfaceError, ProgrammingError +from mysql.connector.logger import logger + +from ..authentication import ERR_STATUS + +if TYPE_CHECKING: + from ..network import MySQLSocket + +try: + import gssapi +except ImportError: + gssapi = None + if os.name != "nt": + raise ProgrammingError( + "Module gssapi is required for GSSAPI authentication " + "mechanism but was not found. Unable to authenticate " + "with the server" + ) from None + +try: + import sspi + import sspicon +except ImportError: + sspi = None + sspicon = None + +from . import MySQLAuthPlugin + +AUTHENTICATION_PLUGIN_CLASS = ( + "MySQLSSPIKerberosAuthPlugin" if os.name == "nt" else "MySQLKerberosAuthPlugin" +) + + +class MySQLBaseKerberosAuthPlugin(MySQLAuthPlugin): + """Base class for the MySQL Kerberos authentication plugin.""" + + @property + def name(self) -> str: + """Plugin official name.""" + return "authentication_kerberos_client" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + @abstractmethod + def auth_continue( + self, tgt_auth_challenge: Optional[bytes] + ) -> Tuple[Optional[bytes], bool]: + """Continue with the Kerberos TGT service request. + + With the TGT authentication service given response generate a TGT + service request. This method must be invoked sequentially (in a loop) + until the security context is completed and an empty response needs to + be send to acknowledge the server. + + Args: + tgt_auth_challenge: the challenge for the negotiation. + + Returns: + tuple (bytearray TGS service request, + bool True if context is completed otherwise False). + """ + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + logger.debug("# auth_data: %s", auth_data) + response = self.auth_response(auth_data, ignore_auth_data=False, **kwargs) + if response is None: + raise InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + packet = await sock.read() + logger.debug("# server response packet: %s", packet) + + if packet != ERR_STATUS: + rcode_size = 5 # Reader size for the response status code + logger.debug("# Continue with GSSAPI authentication") + logger.debug("# Response header: %s", packet[: rcode_size + 1]) + logger.debug("# Response size: %s", len(packet)) + logger.debug("# Negotiate a service request") + complete = False + tries = 0 + + while not complete and tries < 5: + logger.debug("%s Attempt %s %s", "-" * 20, tries + 1, "-" * 20) + logger.debug("<< Server response: %s", packet) + logger.debug("# Response code: %s", packet[: rcode_size + 1]) + token, complete = self.auth_continue(packet[rcode_size:]) + if token: + await sock.write(token) + if complete: + break + packet = await sock.read() + + logger.debug(">> Response to server: %s", token) + tries += 1 + + if not complete: + raise InterfaceError( + f"Unable to fulfill server request after {tries} " + f"attempts. Last server response: {packet}" + ) + + logger.debug( + "Last response from server: %s length: %d", + packet, + len(packet), + ) + + # Receive OK packet from server. + packet = await sock.read() + logger.debug("<< Ok packet from server: %s", packet) + + return bytes(packet) + + +# pylint: disable=c-extension-no-member,no-member +class MySQLKerberosAuthPlugin(MySQLBaseKerberosAuthPlugin): + """Implement the MySQL Kerberos authentication plugin.""" + + context: Optional[gssapi.SecurityContext] = None + + @staticmethod + def get_user_from_credentials() -> str: + """Get user from credentials without realm.""" + try: + creds = gssapi.Credentials(usage="initiate") + user = str(creds.name) + if user.find("@") != -1: + user, _ = user.split("@", 1) + return user + except gssapi.raw.misc.GSSError: + return getpass.getuser() + + @staticmethod + def get_store() -> dict: + """Get a credentials store dictionary. + + Returns: + dict: Credentials store dictionary with the krb5 ccache name. + + Raises: + InterfaceError: If 'KRB5CCNAME' environment variable is empty. + """ + krb5ccname = os.environ.get( + "KRB5CCNAME", + f"/tmp/krb5cc_{os.getuid()}" + if os.name == "posix" + else Path("%TEMP%").joinpath("krb5cc"), + ) + if not krb5ccname: + raise InterfaceError( + "The 'KRB5CCNAME' environment variable is set to empty" + ) + logger.debug("Using krb5 ccache name: FILE:%s", krb5ccname) + store = {b"ccache": f"FILE:{krb5ccname}".encode("utf-8")} + return store + + def _acquire_cred_with_password(self, upn: str) -> gssapi.raw.creds.Creds: + """Acquire and store credentials through provided password. + + Args: + upn (str): User Principal Name. + + Returns: + gssapi.raw.creds.Creds: GSSAPI credentials. + """ + logger.debug("Attempt to acquire credentials through provided password") + user = gssapi.Name(upn, gssapi.NameType.user) + password = self._password.encode("utf-8") + + try: + acquire_cred_result = gssapi.raw.acquire_cred_with_password( + user, password, usage="initiate" + ) + creds = acquire_cred_result.creds + gssapi.raw.store_cred_into( + self.get_store(), + creds=creds, + mech=gssapi.MechType.kerberos, + overwrite=True, + set_default=True, + ) + except gssapi.raw.misc.GSSError as err: + raise ProgrammingError( + f"Unable to acquire credentials with the given password: {err}" + ) from err + return creds + + @staticmethod + def _parse_auth_data(packet: bytes) -> Tuple[str, str]: + """Parse authentication data. + + Get the SPN and REALM from the authentication data packet. + + Format: + SPN string length two bytes + + SPN string + + UPN realm string length two bytes + + UPN realm string + + Returns: + tuple: With 'spn' and 'realm'. + """ + spn_len = struct.unpack(" Optional[bytes]: + """Prepare the first message to the server.""" + spn = None + realm = None + + if auth_data and not kwargs.get("ignore_auth_data", True): + try: + spn, realm = self._parse_auth_data(auth_data) + except struct.error as err: + raise InterruptedError(f"Invalid authentication data: {err}") from err + + if spn is None: + return self._password.encode() + b"\x00" + + upn = f"{self._username}@{realm}" if self._username else None + + logger.debug("Service Principal: %s", spn) + logger.debug("Realm: %s", realm) + + try: + # Attempt to retrieve credentials from cache file + creds: Any = gssapi.Credentials(usage="initiate") + creds_upn = str(creds.name) + + logger.debug("Cached credentials found") + logger.debug("Cached credentials UPN: %s", creds_upn) + + # Remove the realm from user + if creds_upn.find("@") != -1: + creds_user, creds_realm = creds_upn.split("@", 1) + else: + creds_user = creds_upn + creds_realm = None + + upn = f"{self._username}@{realm}" if self._username else creds_upn + + # The user from cached credentials matches with the given user? + if self._username and self._username != creds_user: + logger.debug( + "The user from cached credentials doesn't match with the " + "given user" + ) + if self._password is not None: + creds = self._acquire_cred_with_password(upn) + if creds_realm and creds_realm != realm and self._password is not None: + creds = self._acquire_cred_with_password(upn) + except gssapi.raw.exceptions.ExpiredCredentialsError as err: + if upn and self._password is not None: + creds = self._acquire_cred_with_password(upn) + else: + raise InterfaceError(f"Credentials has expired: {err}") from err + except gssapi.raw.misc.GSSError as err: + if upn and self._password is not None: + creds = self._acquire_cred_with_password(upn) + else: + raise InterfaceError( + f"Unable to retrieve cached credentials error: {err}" + ) from err + + flags = ( + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.extended_error, + gssapi.RequirementFlag.delegate_to_peer, + ) + name = gssapi.Name(spn, name_type=gssapi.NameType.kerberos_principal) + cname = name.canonicalize(gssapi.MechType.kerberos) + self.context = gssapi.SecurityContext( + name=cname, creds=creds, flags=sum(flags), usage="initiate" + ) + + try: + initial_client_token: Optional[bytes] = self.context.step() + except gssapi.raw.misc.GSSError as err: + raise InterfaceError(f"Unable to initiate security context: {err}") from err + + logger.debug("Initial client token: %s", initial_client_token) + return initial_client_token + + def auth_continue( + self, tgt_auth_challenge: Optional[bytes] + ) -> Tuple[Optional[bytes], bool]: + """Continue with the Kerberos TGT service request. + + With the TGT authentication service given response generate a TGT + service request. This method must be invoked sequentially (in a loop) + until the security context is completed and an empty response needs to + be send to acknowledge the server. + + Args: + tgt_auth_challenge: the challenge for the negotiation. + + Returns: + tuple (bytearray TGS service request, + bool True if context is completed otherwise False). + """ + logger.debug("tgt_auth challenge: %s", tgt_auth_challenge) + + resp: Optional[bytes] = self.context.step(tgt_auth_challenge) + + logger.debug("Context step response: %s", resp) + logger.debug("Context completed?: %s", self.context.complete) + + return resp, self.context.complete + + def auth_accept_close_handshake(self, message: bytes) -> bytes: + """Accept handshake and generate closing handshake message for server. + + This method verifies the server authenticity from the given message + and included signature and generates the closing handshake for the + server. + + When this method is invoked the security context is already established + and the client and server can send GSSAPI formated secure messages. + + To finish the authentication handshake the server sends a message + with the security layer availability and the maximum buffer size. + + Since the connector only uses the GSSAPI authentication mechanism to + authenticate the user with the server, the server will verify clients + message signature and terminate the GSSAPI authentication and send two + messages; an authentication acceptance b'\x01\x00\x00\x08\x01' and a + OK packet (that must be received after sent the returned message from + this method). + + Args: + message: a wrapped gssapi message from the server. + + Returns: + bytearray (closing handshake message to be send to the server). + """ + if not self.context.complete: + raise ProgrammingError("Security context is not completed") + logger.debug("Server message: %s", message) + logger.debug("GSSAPI flags in use: %s", self.context.actual_flags) + try: + unwraped = self.context.unwrap(message) + logger.debug("Unwraped: %s", unwraped) + except gssapi.raw.exceptions.BadMICError as err: + logger.debug("Unable to unwrap server message: %s", err) + raise InterfaceError(f"Unable to unwrap server message: {err}") from err + + logger.debug("Unwrapped server message: %s", unwraped) + # The message contents for the clients closing message: + # - security level 1 byte, must be always 1. + # - conciliated buffer size 3 bytes, without importance as no + # further GSSAPI messages will be sends. + response = bytearray(b"\x01\x00\x00\00") + # Closing handshake must not be encrypted. + logger.debug("Message response: %s", response) + wraped = self.context.wrap(response, encrypt=False) + logger.debug( + "Wrapped message response: %s, length: %d", + wraped[0], + len(wraped[0]), + ) + + return wraped.message + + +class MySQLSSPIKerberosAuthPlugin(MySQLBaseKerberosAuthPlugin): + """Implement the MySQL Kerberos authentication plugin with Windows SSPI""" + + context: Any = None + clientauth: Any = None + + @staticmethod + def _parse_auth_data(packet: bytes) -> Tuple[str, str]: + """Parse authentication data. + + Get the SPN and REALM from the authentication data packet. + + Format: + SPN string length two bytes + + SPN string + + UPN realm string length two bytes + + UPN realm string + + Returns: + tuple: With 'spn' and 'realm'. + """ + spn_len = struct.unpack(" Optional[bytes]: + """Prepare the first message to the server. + + Args: + kwargs: + ignore_auth_data (bool): if True, the provided auth data is ignored. + """ + logger.debug("auth_response for sspi") + spn = None + realm = None + + if auth_data and not kwargs.get("ignore_auth_data", True): + try: + spn, realm = self._parse_auth_data(auth_data) + except struct.error as err: + raise InterruptedError(f"Invalid authentication data: {err}") from err + + logger.debug("Service Principal: %s", spn) + logger.debug("Realm: %s", realm) + + if sspicon is None or sspi is None: + raise ProgrammingError( + 'Package "pywin32" (Python for Win32 (pywin32) extensions)' + " is not installed." + ) + + flags = (sspicon.ISC_REQ_MUTUAL_AUTH, sspicon.ISC_REQ_DELEGATE) + + if self._username and self._password: + _auth_info = (self._username, realm, self._password) + else: + _auth_info = None + + targetspn = spn + logger.debug("targetspn: %s", targetspn) + logger.debug("_auth_info is None: %s", _auth_info is None) + + # The Security Support Provider Interface (SSPI) is an interface + # that allows us to choose from a set of SSPs available in the + # system; the idea of SSPI is to keep interface consistent no + # matter what back end (a.k.a., SSP) we choose. + + # When using SSPI we should not use Kerberos directly as SSP, + # as remarked in [2], but we can use it indirectly via another + # SSP named Negotiate that acts as an application layer between + # SSPI and the other SSPs [1]. + + # Negotiate can select between Kerberos and NTLM on the fly; + # it chooses Kerberos unless it cannot be used by one of the + # systems involved in the authentication or the calling + # application did not provide sufficient information to use + # Kerberos. + + # prefix: https://docs.microsoft.com/en-us/windows/win32/secauthn + # [1] prefix/microsoft-negotiate?source=recommendations + # [2] prefix/microsoft-kerberos?source=recommendations + self.clientauth = sspi.ClientAuth( + "Negotiate", + targetspn=targetspn, + auth_info=_auth_info, + scflags=sum(flags), + datarep=sspicon.SECURITY_NETWORK_DREP, + ) + + try: + data = None + err, out_buf = self.clientauth.authorize(data) + logger.debug("Context step err: %s", err) + logger.debug("Context step out_buf: %s", out_buf) + logger.debug("Context completed?: %s", self.clientauth.authenticated) + initial_client_token = out_buf[0].Buffer + logger.debug("pkg_info: %s", self.clientauth.pkg_info) + except Exception as err: + raise InterfaceError(f"Unable to initiate security context: {err}") from err + + logger.debug("Initial client token: %s", initial_client_token) + return initial_client_token + + def auth_continue( + self, tgt_auth_challenge: Optional[bytes] + ) -> Tuple[Optional[bytes], bool]: + """Continue with the Kerberos TGT service request. + + With the TGT authentication service given response generate a TGT + service request. This method must be invoked sequentially (in a loop) + until the security context is completed and an empty response needs to + be send to acknowledge the server. + + Args: + tgt_auth_challenge: the challenge for the negotiation. + + Returns: + tuple (bytearray TGS service request, + bool True if context is completed otherwise False). + """ + logger.debug("tgt_auth challenge: %s", tgt_auth_challenge) + + err, out_buf = self.clientauth.authorize(tgt_auth_challenge) + + logger.debug("Context step err: %s", err) + logger.debug("Context step out_buf: %s", out_buf) + resp = out_buf[0].Buffer + logger.debug("Context step resp: %s", resp) + logger.debug("Context completed?: %s", self.clientauth.authenticated) + + return resp, self.clientauth.authenticated diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_ldap_sasl_client.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_ldap_sasl_client.py new file mode 100644 index 0000000..fb4b44f --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_ldap_sasl_client.py @@ -0,0 +1,595 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""LDAP SASL Authentication Plugin.""" + +import hmac + +from base64 import b64decode, b64encode +from hashlib import sha1, sha256 +from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple +from uuid import uuid4 + +from mysql.connector.authentication import ERR_STATUS +from mysql.connector.errors import InterfaceError, ProgrammingError +from mysql.connector.logger import logger +from mysql.connector.types import StrOrBytes +from mysql.connector.utils import ( + normalize_unicode_string as norm_ustr, + validate_normalized_unicode_string as valid_norm, +) + +if TYPE_CHECKING: + from ..network import MySQLSocket + +try: + import gssapi +except ImportError: + raise ProgrammingError( + "Module gssapi is required for GSSAPI authentication " + "mechanism but was not found. Unable to authenticate " + "with the server" + ) from None + +from . import MySQLAuthPlugin + +AUTHENTICATION_PLUGIN_CLASS = "MySQLLdapSaslPasswordAuthPlugin" + + +# pylint: disable=c-extension-no-member,no-member +class MySQLLdapSaslPasswordAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL ldap sasl authentication plugin. + + The MySQL's ldap sasl authentication plugin support two authentication + methods SCRAM-SHA-1 and GSSAPI (using Kerberos). This implementation only + support SCRAM-SHA-1 and SCRAM-SHA-256. + + SCRAM-SHA-1 amd SCRAM-SHA-256 + This method requires 2 messages from client and 2 responses from + server. + + The first message from client will be generated by prepare_password(), + after receive the response from the server, it is required that this + response is passed back to auth_continue() which will return the + second message from the client. After send this second message to the + server, the second server respond needs to be passed to auth_finalize() + to finish the authentication process. + """ + + sasl_mechanisms: List[str] = ["SCRAM-SHA-1", "SCRAM-SHA-256", "GSSAPI"] + def_digest_mode: Callable = sha1 + client_nonce: Optional[str] = None + client_salt: Any = None + server_salt: Optional[str] = None + krb_service_principal: Optional[str] = None + iterations: int = 0 + server_auth_var: Optional[str] = None + target_name: Optional[gssapi.Name] = None + ctx: gssapi.SecurityContext = None + servers_first: Optional[str] = None + server_nonce: Optional[str] = None + + @staticmethod + def _xor(bytes1: bytes, bytes2: bytes) -> bytes: + return bytes([b1 ^ b2 for b1, b2 in zip(bytes1, bytes2)]) + + def _hmac(self, password: bytes, salt: bytes) -> bytes: + digest_maker = hmac.new(password, salt, self.def_digest_mode) + return digest_maker.digest() + + def _hi(self, password: str, salt: bytes, count: int) -> bytes: + """Prepares Hi + Hi(password, salt, iterations) where Hi(p,s,i) is defined as + PBKDF2 (HMAC, p, s, i, output length of H). + """ + pw = password.encode() + hi = self._hmac(pw, salt + b"\x00\x00\x00\x01") + aux = hi + for _ in range(count - 1): + aux = self._hmac(pw, aux) + hi = self._xor(hi, aux) + return hi + + @staticmethod + def _normalize(string: str) -> str: + norm_str = norm_ustr(string) + broken_rule = valid_norm(norm_str) + if broken_rule is not None: + raise InterfaceError(f"broken_rule: {broken_rule}") + return norm_str + + def _first_message(self) -> bytes: + """This method generates the first message to the server to start the + + The client-first message consists of a gs2-header, + the desired username, and a randomly generated client nonce cnonce. + + The first message from the server has the form: + b'n,a=,n=,r= + + Returns client's first message + """ + cfm_fprnat = "n,a={user_name},n={user_name},r={client_nonce}" + self.client_nonce = str(uuid4()).replace("-", "") + cfm: StrOrBytes = cfm_fprnat.format( + user_name=self._normalize(self._username), + client_nonce=self.client_nonce, + ) + + if isinstance(cfm, str): + cfm = cfm.encode("utf8") + return cfm + + def _first_message_krb(self) -> Optional[bytes]: + """Get a TGT Authentication request and initiates security context. + + This method will contact the Kerberos KDC in order of obtain a TGT. + """ + user_name = gssapi.raw.names.import_name( + self._username.encode("utf8"), name_type=gssapi.NameType.user + ) + + # Use defaults store = {'ccache': 'FILE:/tmp/krb5cc_1000'}#, + # 'keytab':'/etc/some.keytab' } + # Attempt to retrieve credential from default cache file. + try: + cred: Any = gssapi.Credentials() + logger.debug( + "# Stored credentials found, if password was given it will be ignored." + ) + try: + # validate credentials has not expired. + cred.lifetime + except gssapi.raw.exceptions.ExpiredCredentialsError as err: + logger.warning(" Credentials has expired: %s", err) + cred.acquire(user_name) + raise InterfaceError(f"Credentials has expired: {err}") from err + except gssapi.raw.misc.GSSError as err: + if not self._password: + raise InterfaceError( + f"Unable to retrieve stored credentials error: {err}" + ) from err + try: + logger.debug("# Attempt to retrieve credentials with given password") + acquire_cred_result = gssapi.raw.acquire_cred_with_password( + user_name, + self._password.encode("utf8"), + usage="initiate", + ) + cred = acquire_cred_result[0] + except gssapi.raw.misc.GSSError as err2: + raise ProgrammingError( + f"Unable to retrieve credentials with the given password: {err2}" + ) from err + + flags_l = ( + gssapi.RequirementFlag.mutual_authentication, + gssapi.RequirementFlag.extended_error, + gssapi.RequirementFlag.delegate_to_peer, + ) + + if self.krb_service_principal: + service_principal = self.krb_service_principal + else: + service_principal = "ldap/ldapauth" + logger.debug("# service principal: %s", service_principal) + servk = gssapi.Name( + service_principal, name_type=gssapi.NameType.kerberos_principal + ) + self.target_name = servk + self.ctx = gssapi.SecurityContext( + name=servk, creds=cred, flags=sum(flags_l), usage="initiate" + ) + + try: + # step() returns bytes | None, see documentation, + # so this method could return a NULL payload. + # ref: https://pythongssapi.github.io/ + # suffix: python-gssapi/latest/gssapi.html#gssapi.sec_contexts.SecurityContext + initial_client_token = self.ctx.step() + except gssapi.raw.misc.GSSError as err: + raise InterfaceError(f"Unable to initiate security context: {err}") from err + + logger.debug("# initial client token: %s", initial_client_token) + return initial_client_token + + def auth_continue_krb( + self, tgt_auth_challenge: Optional[bytes] + ) -> Tuple[Optional[bytes], bool]: + """Continue with the Kerberos TGT service request. + + With the TGT authentication service given response generate a TGT + service request. This method must be invoked sequentially (in a loop) + until the security context is completed and an empty response needs to + be send to acknowledge the server. + + Args: + tgt_auth_challenge the challenge for the negotiation. + + Returns: tuple (bytearray TGS service request, + bool True if context is completed otherwise False). + """ + logger.debug("tgt_auth challenge: %s", tgt_auth_challenge) + + resp = self.ctx.step(tgt_auth_challenge) + logger.debug("# context step response: %s", resp) + logger.debug("# context completed?: %s", self.ctx.complete) + + return resp, self.ctx.complete + + def auth_accept_close_handshake(self, message: bytes) -> bytes: + """Accept handshake and generate closing handshake message for server. + + This method verifies the server authenticity from the given message + and included signature and generates the closing handshake for the + server. + + When this method is invoked the security context is already established + and the client and server can send GSSAPI formated secure messages. + + To finish the authentication handshake the server sends a message + with the security layer availability and the maximum buffer size. + + Since the connector only uses the GSSAPI authentication mechanism to + authenticate the user with the server, the server will verify clients + message signature and terminate the GSSAPI authentication and send two + messages; an authentication acceptance b'\x01\x00\x00\x08\x01' and a + OK packet (that must be received after sent the returned message from + this method). + + Args: + message a wrapped hssapi message from the server. + + Returns: bytearray closing handshake message to be send to the server. + """ + if not self.ctx.complete: + raise ProgrammingError("Security context is not completed.") + logger.debug("# servers message: %s", message) + logger.debug("# GSSAPI flags in use: %s", self.ctx.actual_flags) + try: + unwraped = self.ctx.unwrap(message) + logger.debug("# unwraped: %s", unwraped) + except gssapi.raw.exceptions.BadMICError as err: + raise InterfaceError(f"Unable to unwrap server message: {err}") from err + + logger.debug("# unwrapped server message: %s", unwraped) + # The message contents for the clients closing message: + # - security level 1 byte, must be always 1. + # - conciliated buffer size 3 bytes, without importance as no + # further GSSAPI messages will be sends. + response = bytearray(b"\x01\x00\x00\00") + # Closing handshake must not be encrypted. + logger.debug("# message response: %s", response) + wraped = self.ctx.wrap(response, encrypt=False) + logger.debug( + "# wrapped message response: %s, length: %d", + wraped[0], + len(wraped[0]), + ) + + return wraped.message + + def auth_response( + self, + auth_data: bytes, + **kwargs: Any, + ) -> Optional[bytes]: + """This method will prepare the fist message to the server. + + Returns bytes to send to the server as the first message. + """ + # pylint: disable=attribute-defined-outside-init + self._auth_data = auth_data + + auth_mechanism = self._auth_data.decode() + logger.debug("read_method_name_from_server: %s", auth_mechanism) + if auth_mechanism not in self.sasl_mechanisms: + auth_mechanisms = '", "'.join(self.sasl_mechanisms[:-1]) + raise InterfaceError( + f'The sasl authentication method "{auth_mechanism}" requested ' + f'from the server is not supported. Only "{auth_mechanisms}" ' + f'and "{self.sasl_mechanisms[-1]}" are supported' + ) + + if b"GSSAPI" in self._auth_data: + return self._first_message_krb() + + if self._auth_data == b"SCRAM-SHA-256": + self.def_digest_mode = sha256 + + return self._first_message() + + def _second_message(self) -> bytes: + """This method generates the second message to the server + + Second message consist on the concatenation of the client and the + server nonce, and cproof. + + c=>,r=,p= + where: + : xor(, ) + + : hmac(salted_password, b"Client Key") + : hmac(, ) + : h() + : ,, + c=,r= + : n=r= + """ + if not self._auth_data: + raise InterfaceError("Missing authentication data (seed)") + + passw = self._normalize(self._password) + salted_password = self._hi(passw, b64decode(self.server_salt), self.iterations) + logger.debug("salted_password: %s", b64encode(salted_password).decode()) + + client_key = self._hmac(salted_password, b"Client Key") + logger.debug("client_key: %s", b64encode(client_key).decode()) + + stored_key = self.def_digest_mode(client_key).digest() + logger.debug("stored_key: %s", b64encode(stored_key).decode()) + + server_key = self._hmac(salted_password, b"Server Key") + logger.debug("server_key: %s", b64encode(server_key).decode()) + + client_first_no_header = ",".join( + [ + f"n={self._normalize(self._username)}", + f"r={self.client_nonce}", + ] + ) + logger.debug("client_first_no_header: %s", client_first_no_header) + + client_header = b64encode( + f"n,a={self._normalize(self._username)},".encode() + ).decode() + + auth_msg = ",".join( + [ + client_first_no_header, + self.servers_first, + f"c={client_header}", + f"r={self.server_nonce}", + ] + ) + logger.debug("auth_msg: %s", auth_msg) + + client_signature = self._hmac(stored_key, auth_msg.encode()) + logger.debug("client_signature: %s", b64encode(client_signature).decode()) + + client_proof = self._xor(client_key, client_signature) + logger.debug("client_proof: %s", b64encode(client_proof).decode()) + + self.server_auth_var = b64encode( + self._hmac(server_key, auth_msg.encode()) + ).decode() + logger.debug("server_auth_var: %s", self.server_auth_var) + + msg = ",".join( + [ + f"c={client_header}", + f"r={self.server_nonce}", + f"p={b64encode(client_proof).decode()}", + ] + ) + logger.debug("second_message: %s", msg) + return msg.encode() + + def _validate_first_reponse(self, servers_first: bytes) -> None: + """Validates first message from the server. + + Extracts the server's salt and iterations from the servers 1st response. + First message from the server is in the form: + ,i= + """ + if not servers_first or not isinstance(servers_first, (bytearray, bytes)): + raise InterfaceError(f"Unexpected server message: {repr(servers_first)}") + try: + servers_first_str = servers_first.decode() + self.servers_first = servers_first_str + r_server_nonce, s_salt, i_counter = servers_first_str.split(",") + except ValueError: + raise InterfaceError( + f"Unexpected server message: {servers_first_str}" + ) from None + if ( + not r_server_nonce.startswith("r=") + or not s_salt.startswith("s=") + or not i_counter.startswith("i=") + ): + raise InterfaceError( + f"Incomplete reponse from the server: {servers_first_str}" + ) + if self.client_nonce in r_server_nonce: + self.server_nonce = r_server_nonce[2:] + logger.debug("server_nonce: %s", self.server_nonce) + else: + raise InterfaceError( + "Unable to authenticate response: response not well formed " + f"{servers_first_str}" + ) + self.server_salt = s_salt[2:] + logger.debug( + "server_salt: %s length: %s", + self.server_salt, + len(self.server_salt), + ) + try: + i_counter = i_counter[2:] + logger.debug("iterations: %s", i_counter) + self.iterations = int(i_counter) + except Exception as err: + raise InterfaceError( + f"Unable to authenticate: iterations not found {servers_first_str}" + ) from err + + def auth_continue(self, servers_first_response: bytes) -> bytes: + """return the second message from the client. + + Returns bytes to send to the server as the second message. + """ + self._validate_first_reponse(servers_first_response) + return self._second_message() + + def _validate_second_reponse(self, servers_second: bytearray) -> bool: + """Validates second message from the server. + + The client and the server prove to each other they have the same Auth + variable. + + The second message from the server consist of the server's proof: + server_proof = HMAC(, ) + where: + : hmac(, b"Server Key") + : ,, + c=,r= + + Our server_proof must be equal to the Auth variable send on this second + response. + """ + if ( + not servers_second + or not isinstance(servers_second, bytearray) + or len(servers_second) <= 2 + or not servers_second.startswith(b"v=") + ): + raise InterfaceError("The server's proof is not well formated") + server_var = servers_second[2:].decode() + logger.debug("server auth variable: %s", server_var) + return self.server_auth_var == server_var + + def auth_finalize(self, servers_second_response: bytearray) -> bool: + """finalize the authentication process. + + Raises InterfaceError if the ervers_second_response is invalid. + + Returns True in successful authentication False otherwise. + """ + if not self._validate_second_reponse(servers_second_response): + raise InterfaceError( + "Authentication failed: Unable to proof server identity" + ) + return True + + @property + def name(self) -> str: + """Plugin official name.""" + return "authentication_ldap_sasl_client" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + logger.debug("# auth_data: %s", auth_data) + self.krb_service_principal = kwargs.get("krb_service_principal") + + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + packet = await sock.read() + logger.debug("# server response packet: %s", packet) + + if len(packet) >= 6 and packet[5] == 114 and packet[6] == 61: # 'r' and '=' + # Continue with sasl authentication + dec_response = packet[5:] + cresponse = self.auth_continue(dec_response) + await sock.write(cresponse) + packet = await sock.read() + if packet[5] == 118 and packet[6] == 61: # 'v' and '=' + if self.auth_finalize(packet[5:]): + # receive packed OK + packet = await sock.read() + elif auth_data == b"GSSAPI" and packet[4] != ERR_STATUS: + rcode_size = 5 # header size for the response status code. + logger.debug("# Continue with sasl GSSAPI authentication") + logger.debug("# response header: %s", packet[: rcode_size + 1]) + logger.debug("# response size: %s", len(packet)) + + logger.debug("# Negotiate a service request") + complete = False + tries = 0 # To avoid a infinite loop attempt no more than feedback messages + while not complete and tries < 5: + logger.debug("%s Attempt %s %s", "-" * 20, tries + 1, "-" * 20) + logger.debug("<< server response: %s", packet) + logger.debug("# response code: %s", packet[: rcode_size + 1]) + step, complete = self.auth_continue_krb(packet[rcode_size:]) + logger.debug(" >> response to server: %s", step) + await sock.write(step or b"") + packet = await sock.read() + tries += 1 + if not complete: + raise InterfaceError( + f"Unable to fulfill server request after {tries} " + f"attempts. Last server response: {packet}" + ) + logger.debug( + " last GSSAPI response from server: %s length: %d", + packet, + len(packet), + ) + last_step = self.auth_accept_close_handshake(packet[rcode_size:]) + logger.debug( + " >> last response to server: %s length: %d", + last_step, + len(last_step), + ) + await sock.write(last_step) + # Receive final handshake from server + packet = await sock.read() + logger.debug("<< final handshake from server: %s", packet) + + # receive OK packet from server. + packet = await sock.read() + logger.debug("<< ok packet from server: %s", packet) + + return bytes(packet) + + +# pylint: enable=c-extension-no-member,no-member diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_oci_client.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_oci_client.py new file mode 100644 index 0000000..0ef58f2 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_oci_client.py @@ -0,0 +1,234 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="arg-type,union-attr,call-arg" + +"""OCI Authentication Plugin.""" + +import json +import os + +from base64 import b64encode +from pathlib import Path +from typing import TYPE_CHECKING, Any, Dict, Optional + +from mysql.connector import errors +from mysql.connector.logger import logger + +if TYPE_CHECKING: + from ..network import MySQLSocket + +try: + from cryptography.exceptions import UnsupportedAlgorithm + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import padding + from cryptography.hazmat.primitives.asymmetric.types import PRIVATE_KEY_TYPES +except ImportError: + raise errors.ProgrammingError("Package 'cryptography' is not installed") from None + +try: + from oci import config, exceptions +except ImportError: + raise errors.ProgrammingError( + "Package 'oci' (Oracle Cloud Infrastructure Python SDK) is not installed" + ) from None + +from . import MySQLAuthPlugin + +AUTHENTICATION_PLUGIN_CLASS = "MySQLOCIAuthPlugin" +OCI_SECURITY_TOKEN_MAX_SIZE = 10 * 1024 # In bytes +OCI_SECURITY_TOKEN_TOO_LARGE = "Ephemeral security token is too large (10KB max)" +OCI_SECURITY_TOKEN_FILE_NOT_AVAILABLE = ( + "Ephemeral security token file ('security_token_file') could not be read" +) +OCI_PROFILE_MISSING_PROPERTIES = ( + "OCI configuration file does not contain a 'fingerprint' or 'key_file' entry" +) + + +class MySQLOCIAuthPlugin(MySQLAuthPlugin): + """Implement the MySQL OCI IAM authentication plugin.""" + + context: Any = None + oci_config_profile: str = "DEFAULT" + oci_config_file: str = config.DEFAULT_LOCATION + + @staticmethod + def _prepare_auth_response(signature: bytes, oci_config: Dict[str, Any]) -> str: + """Prepare client's authentication response + + Prepares client's authentication response in JSON format + Args: + signature (bytes): server's nonce to be signed by client. + oci_config (dict): OCI configuration object. + + Returns: + str: JSON string with the following format: + {"fingerprint": str, "signature": str, "token": base64.base64.base64} + + Raises: + ProgrammingError: If the ephemeral security token file can't be open or the + token is too large. + """ + signature_64 = b64encode(signature) + auth_response = { + "fingerprint": oci_config["fingerprint"], + "signature": signature_64.decode(), + } + + # The security token, if it exists, should be a JWT (JSON Web Token), consisted + # of a base64-encoded header, body, and signature, separated by '.', + # e.g. "Base64.Base64.Base64", stored in a file at the path specified by the + # security_token_file configuration property + if oci_config.get("security_token_file"): + try: + security_token_file = Path(oci_config["security_token_file"]) + # Check if token exceeds the maximum size + if security_token_file.stat().st_size > OCI_SECURITY_TOKEN_MAX_SIZE: + raise errors.ProgrammingError(OCI_SECURITY_TOKEN_TOO_LARGE) + auth_response["token"] = security_token_file.read_text(encoding="utf-8") + except (OSError, UnicodeError) as err: + raise errors.ProgrammingError( + OCI_SECURITY_TOKEN_FILE_NOT_AVAILABLE + ) from err + return json.dumps(auth_response, separators=(",", ":")) + + @staticmethod + def _get_private_key(key_path: str) -> PRIVATE_KEY_TYPES: + """Get the private_key form the given location""" + try: + with open(os.path.expanduser(key_path), "rb") as key_file: + private_key = serialization.load_pem_private_key( + key_file.read(), + password=None, + ) + except (TypeError, OSError, ValueError, UnsupportedAlgorithm) as err: + raise errors.ProgrammingError( + "An error occurred while reading the API_KEY from " + f'"{key_path}": {err}' + ) + + return private_key + + def _get_valid_oci_config(self) -> Dict[str, Any]: + """Get a valid OCI config from the given configuration file path""" + error_list = [] + req_keys = { + "fingerprint": (lambda x: len(x) > 32), + "key_file": (lambda x: os.path.exists(os.path.expanduser(x))), + } + + oci_config: Dict[str, Any] = {} + try: + # key_file is validated by oci.config if present + oci_config = config.from_file( + self.oci_config_file or config.DEFAULT_LOCATION, + self.oci_config_profile or "DEFAULT", + ) + for req_key, req_value in req_keys.items(): + try: + # Verify parameter in req_key is present and valid + if oci_config[req_key] and not req_value(oci_config[req_key]): + error_list.append(f'Parameter "{req_key}" is invalid') + except KeyError: + error_list.append(f"Does not contain parameter {req_key}") + except ( + exceptions.ConfigFileNotFound, + exceptions.InvalidConfig, + exceptions.InvalidKeyFilePath, + exceptions.InvalidPrivateKey, + exceptions.ProfileNotFound, + ) as err: + error_list.append(str(err)) + + # Raise errors if any + if error_list: + raise errors.ProgrammingError( + f"Invalid oci-config-file: {self.oci_config_file}. " + f"Errors found: {error_list}" + ) + + return oci_config + + @property + def name(self) -> str: + """Plugin official name.""" + return "authentication_oci_client" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Prepare authentication string for the server.""" + logger.debug("server nonce: %s, len %d", auth_data, len(auth_data)) + + oci_config = self._get_valid_oci_config() + + private_key = self._get_private_key(oci_config["key_file"]) + signature = private_key.sign(auth_data, padding.PKCS1v15(), hashes.SHA256()) + + auth_response = self._prepare_auth_response(signature, oci_config) + logger.debug("authentication response: %s", auth_response) + return auth_response.encode() + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + self.oci_config_file = kwargs.get("oci_config_file", "DEFAULT") + self.oci_config_profile = kwargs.get( + "oci_config_profile", config.DEFAULT_LOCATION + ) + logger.debug("# oci configuration file path: %s", self.oci_config_file) + + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise errors.InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + packet = await sock.read() + logger.debug("# server response packet: %s", packet) + + return bytes(packet) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_webauthn_client.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_webauthn_client.py new file mode 100644 index 0000000..a4dfdbe --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/authentication_webauthn_client.py @@ -0,0 +1,291 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""WebAuthn Authentication Plugin.""" + + +from typing import TYPE_CHECKING, Any, Callable, Optional + +from mysql.connector import errors, utils + +from ..logger import logger +from . import MySQLAuthPlugin + +if TYPE_CHECKING: + from ..network import MySQLSocket + +try: + from fido2.cbor import dump_bytes as cbor_dump_bytes + from fido2.client import Fido2Client, UserInteraction + from fido2.hid import CtapHidDevice + from fido2.webauthn import PublicKeyCredentialRequestOptions +except ImportError as import_err: + raise errors.ProgrammingError( + "Module fido2 is required for WebAuthn authentication mechanism but was " + "not found. Unable to authenticate with the server" + ) from import_err + +try: + from fido2.pcsc import CtapPcscDevice + + CTAP_PCSC_DEVICE_AVAILABLE = True +except ModuleNotFoundError: + CTAP_PCSC_DEVICE_AVAILABLE = False + + +AUTHENTICATION_PLUGIN_CLASS = "MySQLWebAuthnAuthPlugin" + + +class ClientInteraction(UserInteraction): + """Provides user interaction to the Client.""" + + def __init__(self, callback: Optional[Callable] = None): + self.callback = callback + self.msg = ( + "Please insert FIDO device and perform gesture action for authentication " + "to complete." + ) + + def prompt_up(self) -> None: + """Prompt message for the user interaction with the FIDO device.""" + if self.callback is None: + print(self.msg) + else: + self.callback(self.msg) + + +class MySQLWebAuthnAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL WebAuthn authentication plugin.""" + + client: Optional[Fido2Client] = None + callback: Optional[Callable] = None + options: dict = {"rpId": None, "challenge": None, "allowCredentials": []} + + @property + def name(self) -> str: + """Plugin official name.""" + return "authentication_webauthn_client" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + def get_assertion_response( + self, credential_id: Optional[bytearray] = None + ) -> bytes: + """Get assertion from authenticator and return the response. + + Args: + credential_id (Optional[bytearray]): The credential ID. + + Returns: + bytearray: The response packet with the data from the assertion. + """ + if self.client is None: + raise errors.InterfaceError("No WebAuthn client found") + + if credential_id is not None: + # If credential_id is not None, it's because the FIDO device does not + # support resident keys and the credential_id was requested from the server + self.options["allowCredentials"] = [ + { + "id": credential_id, + "type": "public-key", + } + ] + + # Get assertion from authenticator + assertion = self.client.get_assertion( + PublicKeyCredentialRequestOptions.from_dict(self.options) + ) + number_of_assertions = len(assertion.get_assertions()) + client_data_json = b"" + + # Build response packet + # + # Format: + # int<1> 0x02 (2) status tag + # int number of assertions length encoded number of assertions + # string authenticator data variable length raw binary string + # string signed challenge variable length raw binary string + # ... + # ... + # string authenticator data variable length raw binary string + # string signed challenge variable length raw binary string + # string ClientDataJSON variable length raw binary string + packet = utils.lc_int(2) + packet += utils.lc_int(number_of_assertions) + + # Add authenticator data and signed challenge for each assertion + for i in range(number_of_assertions): + assertion_response = assertion.get_response(i) + + # string authenticator_data + authenticator_data = cbor_dump_bytes(assertion_response.authenticator_data) + + # string signed_challenge + signature = assertion_response.signature + + packet += utils.lc_int(len(authenticator_data)) + packet += authenticator_data + packet += utils.lc_int(len(signature)) + packet += signature + + # string client_data_json + client_data_json = assertion_response.client_data + + packet += utils.lc_int(len(client_data_json)) + packet += client_data_json + + logger.debug("WebAuthn - payload response packet: %s", packet) + return packet + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Find authenticator device and check if supports resident keys. + + It also creates a Fido2Client using the relying party ID from the server. + + Raises: + InterfaceError: When the FIDO device is not found. + + Returns: + bytes: 2 if the authenticator supports resident keys else 1. + """ + try: + packets, capability = utils.read_int(auth_data, 1) + challenge, rp_id = utils.read_lc_string_list(packets) + self.options["challenge"] = challenge + self.options["rpId"] = rp_id.decode() + logger.debug("WebAuthn - capability: %d", capability) + logger.debug("WebAuthn - challenge: %s", self.options["challenge"]) + logger.debug("WebAuthn - relying party id: %s", self.options["rpId"]) + except ValueError as err: + raise errors.InterfaceError( + "Unable to parse MySQL WebAuthn authentication data" + ) from err + + # Locate a device + device = next(CtapHidDevice.list_devices(), None) + if device is not None: + logger.debug("WebAuthn - Use USB HID channel") + elif CTAP_PCSC_DEVICE_AVAILABLE: + device = next(CtapPcscDevice.list_devices(), None) + + if device is None: + raise errors.InterfaceError("No FIDO device found") + + # Set up a FIDO 2 client using the origin relying party id + self.client = Fido2Client( + device, + f"https://{self.options['rpId']}", + user_interaction=ClientInteraction(self.callback), + ) + + if not self.client.info.options.get("rk"): + logger.debug("WebAuthn - Authenticator doesn't support resident keys") + return b"1" + + logger.debug("WebAuthn - Authenticator with support for resident key found") + return b"2" + + async def auth_more_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth more data` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Authentication method data (from a packet representing + an `auth more data` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + _, credential_id = utils.read_lc_string(auth_data) + + response = self.get_assertion_response(credential_id) + + logger.debug("WebAuthn - request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("WebAuthn - server response packet: %s", pkt) + + return pkt + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + webauth_callback = kwargs.get("webauthn_callback") or kwargs.get( + "fido_callback" + ) + self.callback = ( + utils.import_object(webauth_callback) + if isinstance(webauth_callback, str) + else webauth_callback + ) + + response = self.auth_response(auth_data) + credential_id = None + + if response == b"1": + # Authenticator doesn't support resident keys, request credential_id + logger.debug("WebAuthn - request credential_id") + await sock.write(utils.lc_int(int(response))) + + # return a packet representing an `auth more data` response + return bytes(await sock.read()) + + response = self.get_assertion_response(credential_id) + + logger.debug("WebAuthn - request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("WebAuthn - server response packet: %s", pkt) + + return pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/caching_sha2_password.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/caching_sha2_password.py new file mode 100644 index 0000000..a0bbf5c --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/caching_sha2_password.py @@ -0,0 +1,160 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Caching SHA2 Password Authentication Plugin.""" + +import struct + +from hashlib import sha256 +from typing import TYPE_CHECKING, Any, Optional + +from mysql.connector.errors import InterfaceError +from mysql.connector.logger import logger + +from . import MySQLAuthPlugin + +if TYPE_CHECKING: + from ..network import MySQLSocket + +AUTHENTICATION_PLUGIN_CLASS = "MySQLCachingSHA2PasswordAuthPlugin" + + +class MySQLCachingSHA2PasswordAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL caching_sha2_password authentication plugin + + Note that encrypting using RSA is not supported since the Python + Standard Library does not provide this OpenSSL functionality. + """ + + perform_full_authentication: int = 4 + + def _scramble(self, auth_data: bytes) -> bytes: + """Return a scramble of the password using a Nonce sent by the + server. + + The scramble is of the form: + XOR(SHA2(password), SHA2(SHA2(SHA2(password)), Nonce)) + """ + if not auth_data: + raise InterfaceError("Missing authentication data (seed)") + + if not self._password: + return b"" + + hash1 = sha256(self._password.encode()).digest() + hash2 = sha256() + hash2.update(sha256(hash1).digest()) + hash2.update(auth_data) + hash2_digest = hash2.digest() + xored = [h1 ^ h2 for (h1, h2) in zip(hash1, hash2_digest)] + hash3 = struct.pack("32B", *xored) + return hash3 + + @property + def name(self) -> str: + """Plugin official name.""" + return "caching_sha2_password" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Make the client's authorization response. + + Args: + auth_data: Authorization data. + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Client's authorization response. + """ + if not auth_data: + return None + if len(auth_data) > 1: + return self._scramble(auth_data) + if auth_data[0] == self.perform_full_authentication: + # return password as clear text. + return self._password.encode() + b"\x00" + + return None + + async def auth_more_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth more data` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Authentication method data (from a packet representing + an `auth more data` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + response = self.auth_response(auth_data, **kwargs) + if response: + await sock.write(response) + + return bytes(await sock.read()) + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("# server response packet: %s", pkt) + + return pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_clear_password.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_clear_password.py new file mode 100644 index 0000000..f27bf9a --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_clear_password.py @@ -0,0 +1,105 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Clear Password Authentication Plugin.""" + +from typing import TYPE_CHECKING, Any, Optional + +from mysql.connector import errors +from mysql.connector.logger import logger + +from . import MySQLAuthPlugin + +if TYPE_CHECKING: + from ..network import MySQLSocket + +AUTHENTICATION_PLUGIN_CLASS = "MySQLClearPasswordAuthPlugin" + + +class MySQLClearPasswordAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL Clear Password authentication plugin""" + + def _prepare_password(self) -> bytes: + """Prepare and return password as as clear text. + + Returns: + bytes: Prepared password. + """ + return self._password.encode() + b"\x00" + + @property + def name(self) -> str: + """Plugin official name.""" + return "mysql_clear_password" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Return the prepared password to send to MySQL. + + Raises: + InterfaceError: When SSL is required by not enabled. + + Returns: + str: The prepared password. + """ + if self.requires_ssl and not self._ssl_enabled: + raise errors.InterfaceError(f"{self.name} requires SSL") + return self._prepare_password() + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise errors.InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("# server response packet: %s", pkt) + + return pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_native_password.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_native_password.py new file mode 100644 index 0000000..77ee254 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/mysql_native_password.py @@ -0,0 +1,121 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Native Password Authentication Plugin.""" + +import struct + +from hashlib import sha1 +from typing import TYPE_CHECKING, Any, Optional + +from mysql.connector.errors import InterfaceError +from mysql.connector.logger import logger + +from . import MySQLAuthPlugin + +if TYPE_CHECKING: + from ..network import MySQLSocket + +AUTHENTICATION_PLUGIN_CLASS = "MySQLNativePasswordAuthPlugin" + + +class MySQLNativePasswordAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL Native Password authentication plugin""" + + def _prepare_password(self, auth_data: bytes) -> bytes: + """Prepares and returns password as native MySQL 4.1+ password""" + if not auth_data: + raise InterfaceError("Missing authentication data (seed)") + + if not self._password: + return b"" + + hash4 = None + try: + hash1 = sha1(self._password.encode()).digest() + hash2 = sha1(hash1).digest() + hash3 = sha1(auth_data + hash2).digest() + xored = [h1 ^ h3 for (h1, h3) in zip(hash1, hash3)] + hash4 = struct.pack("20B", *xored) + except (struct.error, TypeError) as err: + raise InterfaceError(f"Failed scrambling password; {err}") from err + + return hash4 + + @property + def name(self) -> str: + """Plugin official name.""" + return "mysql_native_password" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return False + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Make the client's authorization response. + + Args: + auth_data: Authorization data. + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Client's authorization response. + """ + return self._prepare_password(auth_data) + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("# server response packet: %s", pkt) + + return pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/sha256_password.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/sha256_password.py new file mode 100644 index 0000000..0779a7f --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/plugins/sha256_password.py @@ -0,0 +1,109 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""SHA256 Password Authentication Plugin.""" + +from typing import TYPE_CHECKING, Any, Optional + +from mysql.connector import errors +from mysql.connector.logger import logger + +from . import MySQLAuthPlugin + +if TYPE_CHECKING: + from ..network import MySQLSocket + +AUTHENTICATION_PLUGIN_CLASS = "MySQLSHA256PasswordAuthPlugin" + + +class MySQLSHA256PasswordAuthPlugin(MySQLAuthPlugin): + """Class implementing the MySQL SHA256 authentication plugin + + Note that encrypting using RSA is not supported since the Python + Standard Library does not provide this OpenSSL functionality. + """ + + def _prepare_password(self) -> bytes: + """Prepare and return password as as clear text. + + Returns: + password (bytes): Prepared password. + """ + return self._password.encode() + b"\x00" + + @property + def name(self) -> str: + """Plugin official name.""" + return "sha256_password" + + @property + def requires_ssl(self) -> bool: + """Signals whether or not SSL is required.""" + return True + + def auth_response(self, auth_data: bytes, **kwargs: Any) -> Optional[bytes]: + """Return the prepared password to send to MySQL. + + Raises: + InterfaceError: When SSL is required by not enabled. + + Returns: + str: The prepared password. + """ + if self.requires_ssl and not self.ssl_enabled: + raise errors.InterfaceError(f"{self.name} requires SSL") + return self._prepare_password() + + async def auth_switch_response( + self, sock: "MySQLSocket", auth_data: bytes, **kwargs: Any + ) -> bytes: + """Handles server's `auth switch request` response. + + Args: + sock: Pointer to the socket connection. + auth_data: Plugin provided data (extracted from a packet + representing an `auth switch request` response). + kwargs: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the ones + defined in the auth plugin itself. + + Returns: + packet: Last server's response after back-and-forth + communication. + """ + response = self.auth_response(auth_data, **kwargs) + if response is None: + raise errors.InterfaceError("Got a NULL auth response") + + logger.debug("# request: %s size: %s", response, len(response)) + await sock.write(response) + + pkt = bytes(await sock.read()) + logger.debug("# server response packet: %s", pkt) + + return pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/protocol.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/protocol.py new file mode 100644 index 0000000..26d1c35 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/protocol.py @@ -0,0 +1,320 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Implements the MySQL Client/Server protocol.""" + +__all__ = ["MySQLProtocol"] + +import struct + +from typing import Any, Dict, List, Optional, Tuple + +from ..constants import ClientFlag, ServerCmd +from ..errors import InterfaceError, ProgrammingError, get_exception +from ..logger import logger +from ..protocol import ( + DEFAULT_CHARSET_ID, + DEFAULT_MAX_ALLOWED_PACKET, + MySQLProtocol as _MySQLProtocol, +) +from ..types import BinaryProtocolType, DescriptionType, EofPacketType, HandShakeType +from ..utils import int1store, read_lc_string_list +from .network import MySQLSocket +from .plugins import MySQLAuthPlugin, get_auth_plugin +from .plugins.caching_sha2_password import MySQLCachingSHA2PasswordAuthPlugin + + +class MySQLProtocol(_MySQLProtocol): + """Implements MySQL client/server protocol. + + Create and parses MySQL packets. + """ + + @staticmethod + def auth_plugin_first_response( # type: ignore[override] + auth_data: bytes, + username: str, + password: str, + client_flags: int, + auth_plugin: str, + auth_plugin_class: Optional[str] = None, + ssl_enabled: bool = False, + plugin_config: Optional[Dict[str, Any]] = None, + ) -> Tuple[bytes, MySQLAuthPlugin]: + """Prepare the first authentication response. + + Args: + auth_data: Authorization data from initial handshake. + username: Account's username. + password: Account's password. + client_flags: Integer representing client capabilities flags. + auth_plugin: Authorization plugin name. + auth_plugin_class: Authorization plugin class (has higher precedence + than the authorization plugin name). + ssl_enabled: Whether SSL is enabled or not. + plugin_config: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override + the ones defined in the auth plugin itself. + + Returns: + auth_response: Authorization plugin response. + auth_strategy: Authorization plugin instance created based + on the provided `auth_plugin` and `auth_plugin_class` + parameters. + + Raises: + InterfaceError: If authentication fails or when got a NULL auth response. + """ + if not password: + # return auth response and an arbitrary auth strategy + return b"\x00", MySQLCachingSHA2PasswordAuthPlugin( + username, password, ssl_enabled=ssl_enabled + ) + + if plugin_config is None: + plugin_config = {} + + try: + auth_strategy = get_auth_plugin(auth_plugin, auth_plugin_class)( + username, password, ssl_enabled=ssl_enabled + ) + auth_response = auth_strategy.auth_response(auth_data, **plugin_config) + except (TypeError, InterfaceError) as err: + raise InterfaceError(f"Failed authentication: {err}") from err + + if auth_response is None: + raise InterfaceError( + "Got NULL auth response while authenticating with " + f"plugin {auth_strategy.name}" + ) + + auth_response = ( + int1store(len(auth_response)) + auth_response + if client_flags & ClientFlag.SECURE_CONNECTION + else auth_response + b"\x00" + ) + + return auth_response, auth_strategy + + @staticmethod + def make_auth( # type: ignore[override] + handshake: HandShakeType, + username: str, + password: str, + database: Optional[str] = None, + charset: int = DEFAULT_CHARSET_ID, + client_flags: int = 0, + max_allowed_packet: int = DEFAULT_MAX_ALLOWED_PACKET, + auth_plugin: Optional[str] = None, + auth_plugin_class: Optional[str] = None, + conn_attrs: Optional[Dict[str, str]] = None, + is_change_user_request: bool = False, + ssl_enabled: bool = False, + plugin_config: Optional[Dict[str, Any]] = None, + ) -> Tuple[bytes, MySQLAuthPlugin]: + """Make a MySQL Authentication packet. + + Args: + handshake: Initial handshake. + username: Account's username. + password: Account's password. + database: Initial database name for the connection + charset: Client charset (see [2]), only the lower 8-bits. + client_flags: Integer representing client capabilities flags. + max_allowed_packet: Maximum packet size. + auth_plugin: Authorization plugin name. + auth_plugin_class: Authorization plugin class (has higher precedence + than the authorization plugin name). + conn_attrs: Connection attributes. + is_change_user_request: Whether is a `change user request` operation or not. + ssl_enabled: Whether SSL is enabled or not. + plugin_config: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override + the one defined in the auth plugin itself. + + Returns: + handshake_response: Handshake response as per [1]. + auth_strategy: Authorization plugin instance created based + on the provided `auth_plugin` and `auth_plugin_class`. + + Raises: + ProgrammingError: Handshake misses authentication info. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_connection_phase_packets_protocol_handshake_response.html + + [2]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_basic_character_set.html#a_protocol_character_set + """ + b_username = username.encode() + response_payload = [] + + if is_change_user_request: + logger.debug("Got a `change user` request") + + logger.debug("Starting authorization phase") + if handshake is None: + raise ProgrammingError("Got a NULL handshake") from None + + if handshake.get("auth_data") is None: + raise ProgrammingError("Handshake misses authentication info") from None + + try: + auth_plugin = auth_plugin or handshake["auth_plugin"] # type: ignore[assignment] + except (TypeError, KeyError) as err: + raise ProgrammingError( + f"Handshake misses authentication plugin info ({err})" + ) from None + + logger.debug("The provided initial strategy is %s", auth_plugin) + + if is_change_user_request: + response_payload.append( + struct.pack( + f" Tuple[List[Tuple[BinaryProtocolType, ...]], Optional[EofPacketType],]: + """Read MySQL binary protocol result. + + Reads all or given number of binary resultset rows from the socket. + """ + rows = [] + eof = None + values = None + i = 0 + while True: + if eof or i == count: + break + packet = await sock.read() + if packet[4] == 254: + eof = self.parse_eof(packet) + values = None + elif packet[4] == 0: + eof = None + values = self._parse_binary_values(columns, packet[5:], charset) + if eof is None and values is not None: + rows.append(values) + elif eof is None and values is None: + raise get_exception(packet) + i += 1 + return (rows, eof) + + # pylint: disable=invalid-overridden-method + async def read_text_result( # type: ignore[override] + self, sock: MySQLSocket, version: Tuple[int, ...], count: int = 1 + ) -> Tuple[List[Tuple[Optional[bytes], ...]], Optional[EofPacketType],]: + """Read MySQL text result. + + Reads all or given number of rows from the socket. + + Returns a tuple with 2 elements: a list with all rows and + the EOF packet. + """ + # Keep unused 'version' for API backward compatibility + _ = version + rows = [] + eof = None + rowdata = None + i = 0 + while True: + if eof or i == count: + break + packet = await sock.read() + if packet.startswith(b"\xff\xff\xff"): + datas = [packet[4:]] + packet = await sock.read() + while packet.startswith(b"\xff\xff\xff"): + datas.append(packet[4:]) + packet = await sock.read() + datas.append(packet[4:]) + rowdata = read_lc_string_list(bytearray(b"").join(datas)) + elif packet[4] == 254 and packet[0] < 7: + eof = self.parse_eof(packet) + rowdata = None + else: + eof = None + rowdata = read_lc_string_list(packet[4:]) + if eof is None and rowdata is not None: + rows.append(rowdata) + elif eof is None and rowdata is None: + raise get_exception(packet) + i += 1 + return rows, eof diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/utils.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/utils.py new file mode 100644 index 0000000..f79eaa4 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/aio/utils.py @@ -0,0 +1,153 @@ +# Copyright (c) 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="attr-defined" +# pylint: disable=protected-access + +"""Utilities.""" + +__all__ = ["to_thread", "open_connection"] + +import asyncio +import contextvars +import functools + +try: + import ssl +except ImportError: + ssl = None + +from typing import TYPE_CHECKING, Any, Callable, Tuple + +if TYPE_CHECKING: + __all__.append("StreamWriter") + + +class StreamReaderProtocol(asyncio.StreamReaderProtocol): + """Extends asyncio.streams.StreamReaderProtocol for adding start_tls(). + + The ``start_tls()`` is based on ``asyncio.streams.StreamWriter`` introduced + in Python 3.11. It provides the same functionality for older Python versions. + """ + + def _replace_writer(self, writer: asyncio.StreamWriter) -> None: + """Replace stream writer. + + Args: + writer: Stream Writer. + """ + transport = writer.transport + self._stream_writer = writer + self._transport = transport + self._over_ssl = transport.get_extra_info("sslcontext") is not None + + +class StreamWriter(asyncio.streams.StreamWriter): + """Extends asyncio.streams.StreamWriter for adding start_tls(). + + The ``start_tls()`` is based on ``asyncio.streams.StreamWriter`` introduced + in Python 3.11. It provides the same functionality for older Python versions. + """ + + async def start_tls( + self, + ssl_context: ssl.SSLContext, + *, + server_hostname: str = None, + ssl_handshake_timeout: int = None, + ) -> None: + """Upgrade an existing stream-based connection to TLS. + + Args: + ssl_context: Configured SSL context. + server_hostname: Server host name. + ssl_handshake_timeout: SSL handshake timeout. + """ + server_side = self._protocol._client_connected_cb is not None + protocol = self._protocol + await self.drain() + new_transport = await self._loop.start_tls( + # pylint: disable=access-member-before-definition + self._transport, # type: ignore[has-type] + protocol, + ssl_context, + server_side=server_side, + server_hostname=server_hostname, + ssl_handshake_timeout=ssl_handshake_timeout, + ) + self._transport = ( # pylint: disable=attribute-defined-outside-init + new_transport + ) + protocol._replace_writer(self) + + +async def open_connection( + host: str = None, port: int = None, *, limit: int = 2**16, **kwds: Any +) -> Tuple[asyncio.StreamReader, StreamWriter]: + """A wrapper for create_connection() returning a (reader, writer) pair. + + This function is based on ``asyncio.streams.open_connection`` and adds a custom + stream reader. + + MySQL expects TLS negotiation to happen in the middle of a TCP connection, not at + the start. + This function in conjunction with ``_StreamReaderProtocol`` and ``_StreamWriter`` + allows the TLS negotiation on an existing connection. + + Args: + host: Server host name. + port: Server port. + limit: The buffer size limit used by the returned ``StreamReader`` instance. + By default the limit is set to 64 KiB. + + Returns: + tuple: Returns a pair of reader and writer objects that are instances of + ``StreamReader`` and ``StreamWriter`` classes. + """ + loop = asyncio.get_running_loop() + reader = asyncio.streams.StreamReader(limit=limit, loop=loop) + protocol = StreamReaderProtocol(reader, loop=loop) + transport, _ = await loop.create_connection(lambda: protocol, host, port, **kwds) + writer = StreamWriter(transport, protocol, reader, loop) + return reader, writer + + +async def to_thread(func: Callable, *args: Any, **kwargs: Any) -> asyncio.Future: + """Asynchronously run function ``func`` in a separate thread. + + This function is based on ``asyncio.to_thread()`` introduced in Python 3.9, which + provides the same functionality for older Python versions. + + Returns: + coroutine: A coroutine that can be awaited to get the eventual result of + ``func``. + """ + loop = asyncio.get_running_loop() + ctx = contextvars.copy_context() + func_call = functools.partial(ctx.run, func, *args, **kwargs) + return await loop.run_in_executor(None, func_call) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/authentication.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/authentication.py new file mode 100644 index 0000000..a3c4cc4 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/authentication.py @@ -0,0 +1,375 @@ +# Copyright (c) 2014, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Implementing support for MySQL Authentication Plugins""" +from __future__ import annotations + +import copy + +from typing import TYPE_CHECKING, Any, Dict, Optional + +from .errors import InterfaceError, NotSupportedError, get_exception +from .logger import logger +from .plugins import MySQLAuthPlugin, get_auth_plugin +from .protocol import ( + AUTH_SWITCH_STATUS, + DEFAULT_CHARSET_ID, + DEFAULT_MAX_ALLOWED_PACKET, + ERR_STATUS, + EXCHANGE_FURTHER_STATUS, + MFA_STATUS, + OK_STATUS, + MySQLProtocol, +) +from .types import HandShakeType + +if TYPE_CHECKING: + from .network import MySQLSocket + + +class MySQLAuthenticator: + """Implements the authentication phase.""" + + def __init__(self) -> None: + """Constructor.""" + self._username: str = "" + self._passwords: Dict[int, str] = {} + self._plugin_config: Dict[str, Any] = {} + self._ssl_enabled: bool = False + self._auth_strategy: Optional[MySQLAuthPlugin] = None + self._auth_plugin_class: Optional[str] = None + + @property + def ssl_enabled(self) -> bool: + """Signals whether or not SSL is enabled.""" + return self._ssl_enabled + + @property + def plugin_config(self) -> Dict[str, Any]: + """Custom arguments that are being provided to the authentication plugin when called. + + The parameters defined here will override the ones defined in the + auth plugin itself. + + The plugin config is a read-only property - the plugin configuration + provided when invoking `authenticate()` is recorded and can be queried + by accessing this property. + + Returns: + dict: The latest plugin configuration provided when invoking + `authenticate()`. + """ + return self._plugin_config + + def setup_ssl( + self, + sock: MySQLSocket, + host: str, + ssl_options: Optional[Dict[str, Any]], + charset: int = DEFAULT_CHARSET_ID, + client_flags: int = 0, + max_allowed_packet: int = DEFAULT_MAX_ALLOWED_PACKET, + ) -> bytes: + """Sets up an SSL communication channel. + + Args: + sock: Pointer to the socket connection. + host: Server host name. + ssl_options: SSL and TLS connection options (see + `network.MySQLSocket.build_ssl_context`). + charset: Client charset (see [1]), only the lower 8-bits. + client_flags: Integer representing client capabilities flags. + max_allowed_packet: Maximum packet size. + + Returns: + ssl_request_payload: Payload used to carry out SSL authentication. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_basic_character_set.html#a_protocol_character_set + """ + if ssl_options is None: + ssl_options = {} + + # SSL connection request packet + ssl_request_payload = MySQLProtocol.make_auth_ssl( + charset=charset, + client_flags=client_flags, + max_allowed_packet=max_allowed_packet, + ) + sock.send(ssl_request_payload) + + logger.debug("Building SSL context") + ssl_context = sock.build_ssl_context( + ssl_ca=ssl_options.get("ca"), + ssl_cert=ssl_options.get("cert"), + ssl_key=ssl_options.get("key"), + ssl_verify_cert=ssl_options.get("verify_cert", False), + ssl_verify_identity=ssl_options.get("verify_identity", False), + tls_versions=ssl_options.get("tls_versions"), + tls_cipher_suites=ssl_options.get("tls_ciphersuites"), + ) + + logger.debug("Switching to SSL") + sock.switch_to_ssl(ssl_context, host) + + logger.debug("SSL has been enabled") + self._ssl_enabled = True + + return ssl_request_payload + + def _switch_auth_strategy( + self, + new_strategy_name: str, + strategy_class: Optional[str] = None, + username: Optional[str] = None, + password_factor: int = 1, + ) -> None: + """Switches the authorization plugin. + + Args: + new_strategy_name: New authorization plugin name to switch to. + strategy_class: New authorization plugin class to switch to + (has higher precedence than the authorization plugin name). + username: Username to be used - if not defined, the username + provided when `authentication()` was invoked is used. + password_factor: Up to three levels of authentication (MFA) are allowed, + hence you can choose the password corresponding to the 1st, + 2nd, or 3rd factor - 1st is the default. + """ + if username is None: + username = self._username + + if strategy_class is None: + strategy_class = self._auth_plugin_class + + logger.debug("Switching to strategy %s", new_strategy_name) + self._auth_strategy = get_auth_plugin( + plugin_name=new_strategy_name, auth_plugin_class=strategy_class + )( + username, + self._passwords.get(password_factor, ""), + ssl_enabled=self.ssl_enabled, + ) + + def _mfa_n_factor( + self, + sock: MySQLSocket, + pkt: bytes, + ) -> Optional[bytes]: + """Handles MFA (Multi-Factor Authentication) response. + + Up to three levels of authentication (MFA) are allowed. + + Args: + sock: Pointer to the socket connection. + pkt: MFA response. + + Returns: + ok_packet: If last server's response is an OK packet. + None: If last server's response isn't an OK packet and no ERROR was raised. + + Raises: + InterfaceError: If got an invalid N factor. + errors.ErrorTypes: If got an ERROR response. + """ + n_factor = 2 + while pkt[4] == MFA_STATUS: + if n_factor not in self._passwords: + raise InterfaceError( + "Failed Multi Factor Authentication (invalid N factor)" + ) + + new_strategy_name, auth_data = MySQLProtocol.parse_auth_next_factor(pkt) + self._switch_auth_strategy(new_strategy_name, password_factor=n_factor) + logger.debug("MFA %i factor %s", n_factor, self._auth_strategy.name) + + pkt = self._auth_strategy.auth_switch_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == EXCHANGE_FURTHER_STATUS: + auth_data = MySQLProtocol.parse_auth_more_data(pkt) + pkt = self._auth_strategy.auth_more_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == OK_STATUS: + logger.debug("MFA completed succesfully") + return pkt + + if pkt[4] == ERR_STATUS: + raise get_exception(pkt) + + n_factor += 1 + + logger.warning("MFA terminated with a no ok packet") + return None + + def _handle_server_response( + self, + sock: MySQLSocket, + pkt: bytes, + ) -> Optional[bytes]: + """Handles server's response. + + Args: + sock: Pointer to the socket connection. + pkt: Server's response after completing the `HandShakeResponse`. + + Returns: + ok_packet: If last server's response is an OK packet. + None: If last server's response isn't an OK packet and no ERROR was raised. + + Raises: + errors.ErrorTypes: If got an ERROR response. + NotSupportedError: If got Authentication with old (insecure) passwords. + """ + if pkt[4] == AUTH_SWITCH_STATUS and len(pkt) == 5: + raise NotSupportedError( + "Authentication with old (insecure) passwords " + "is not supported. For more information, lookup " + "Password Hashing in the latest MySQL manual" + ) + + if pkt[4] == AUTH_SWITCH_STATUS: + logger.debug("Server's response is an auth switch request") + new_strategy_name, auth_data = MySQLProtocol.parse_auth_switch_request(pkt) + self._switch_auth_strategy(new_strategy_name) + pkt = self._auth_strategy.auth_switch_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == EXCHANGE_FURTHER_STATUS: + logger.debug("Exchanging further packets") + auth_data = MySQLProtocol.parse_auth_more_data(pkt) + pkt = self._auth_strategy.auth_more_response( + sock, auth_data, **self._plugin_config + ) + + if pkt[4] == OK_STATUS: + logger.debug("%s completed succesfully", self._auth_strategy.name) + return pkt + + if pkt[4] == MFA_STATUS: + logger.debug("Starting multi-factor authentication") + logger.debug("MFA 1 factor %s", self._auth_strategy.name) + return self._mfa_n_factor(sock, pkt) + + if pkt[4] == ERR_STATUS: + raise get_exception(pkt) + + return None + + def authenticate( + self, + sock: MySQLSocket, + handshake: HandShakeType, + username: str = "", + password1: str = "", + password2: str = "", + password3: str = "", + database: Optional[str] = None, + charset: int = DEFAULT_CHARSET_ID, + client_flags: int = 0, + max_allowed_packet: int = DEFAULT_MAX_ALLOWED_PACKET, + auth_plugin: Optional[str] = None, + auth_plugin_class: Optional[str] = None, + conn_attrs: Optional[Dict[str, str]] = None, + is_change_user_request: bool = False, + **plugin_config: Any, + ) -> bytes: + """Performs the authentication phase. + + During re-authentication you must set `is_change_user_request` to True. + + Args: + sock: Pointer to the socket connection. + handshake: Initial handshake. + username: Account's username. + password1: Account's password factor 1. + password2: Account's password factor 2. + password3: Account's password factor 3. + database: Initial database name for the connection. + charset: Client charset (see [1]), only the lower 8-bits. + client_flags: Integer representing client capabilities flags. + max_allowed_packet: Maximum packet size. + auth_plugin: Authorization plugin name. + auth_plugin_class: Authorization plugin class (has higher precedence + than the authorization plugin name). + conn_attrs: Connection attributes. + is_change_user_request: Whether is a `change user request` operation or not. + plugin_config: Custom configuration to be passed to the auth plugin + when invoked. The parameters defined here will override the + ones defined in the auth plugin itself. + + Returns: + ok_packet: OK packet. + + Raises: + InterfaceError: If OK packet is NULL. + + References: + [1]: https://dev.mysql.com/doc/dev/mysql-server/latest/\ + page_protocol_basic_character_set.html#a_protocol_character_set + """ + # update credentials, plugin config and plugin class + self._username = username + self._passwords = {1: password1, 2: password2, 3: password3} + self._plugin_config = copy.deepcopy(plugin_config) + self._auth_plugin_class = auth_plugin_class + + # client's handshake response + response_payload, self._auth_strategy = MySQLProtocol.make_auth( + handshake=handshake, + username=username, + password=password1, + database=database, + charset=charset, + client_flags=client_flags, + max_allowed_packet=max_allowed_packet, + auth_plugin=auth_plugin, + auth_plugin_class=auth_plugin_class, + conn_attrs=conn_attrs, + is_change_user_request=is_change_user_request, + ssl_enabled=self.ssl_enabled, + plugin_config=self.plugin_config, + ) + + # client sends transaction response + send_args = (0, 0) if is_change_user_request else (None, None) + sock.send(response_payload, *send_args) + + # server replies back + pkt = bytes(sock.recv()) + + ok_pkt = self._handle_server_response(sock, pkt) + if ok_pkt is None: + raise InterfaceError("Got a NULL ok_pkt") from None + + return ok_pkt diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/charsets.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/charsets.py new file mode 100644 index 0000000..5ce9b1a --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/charsets.py @@ -0,0 +1,620 @@ +# -*- coding: utf-8 -*- # pylint: disable=missing-module-docstring + +# Copyright (c) 2013, 2022, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +from typing import List, Optional, Tuple + +"""This module contains the MySQL Server Character Sets.""" # pylint: disable=pointless-string-statement + +# This file was auto-generated. +_GENERATED_ON: str = "2022-05-09" +_MYSQL_VERSION: Tuple[int, int, int] = (8, 0, 30) + +MYSQL_CHARACTER_SETS: List[Optional[Tuple[str, str, bool]]] = [ + # (character set name, collation, default) + None, + ("big5", "big5_chinese_ci", True), # 1 + ("latin2", "latin2_czech_cs", False), # 2 + ("dec8", "dec8_swedish_ci", True), # 3 + ("cp850", "cp850_general_ci", True), # 4 + ("latin1", "latin1_german1_ci", False), # 5 + ("hp8", "hp8_english_ci", True), # 6 + ("koi8r", "koi8r_general_ci", True), # 7 + ("latin1", "latin1_swedish_ci", True), # 8 + ("latin2", "latin2_general_ci", True), # 9 + ("swe7", "swe7_swedish_ci", True), # 10 + ("ascii", "ascii_general_ci", True), # 11 + ("ujis", "ujis_japanese_ci", True), # 12 + ("sjis", "sjis_japanese_ci", True), # 13 + ("cp1251", "cp1251_bulgarian_ci", False), # 14 + ("latin1", "latin1_danish_ci", False), # 15 + ("hebrew", "hebrew_general_ci", True), # 16 + None, + ("tis620", "tis620_thai_ci", True), # 18 + ("euckr", "euckr_korean_ci", True), # 19 + ("latin7", "latin7_estonian_cs", False), # 20 + ("latin2", "latin2_hungarian_ci", False), # 21 + ("koi8u", "koi8u_general_ci", True), # 22 + ("cp1251", "cp1251_ukrainian_ci", False), # 23 + ("gb2312", "gb2312_chinese_ci", True), # 24 + ("greek", "greek_general_ci", True), # 25 + ("cp1250", "cp1250_general_ci", True), # 26 + ("latin2", "latin2_croatian_ci", False), # 27 + ("gbk", "gbk_chinese_ci", True), # 28 + ("cp1257", "cp1257_lithuanian_ci", False), # 29 + ("latin5", "latin5_turkish_ci", True), # 30 + ("latin1", "latin1_german2_ci", False), # 31 + ("armscii8", "armscii8_general_ci", True), # 32 + ("utf8mb3", "utf8mb3_general_ci", True), # 33 + ("cp1250", "cp1250_czech_cs", False), # 34 + ("ucs2", "ucs2_general_ci", True), # 35 + ("cp866", "cp866_general_ci", True), # 36 + ("keybcs2", "keybcs2_general_ci", True), # 37 + ("macce", "macce_general_ci", True), # 38 + ("macroman", "macroman_general_ci", True), # 39 + ("cp852", "cp852_general_ci", True), # 40 + ("latin7", "latin7_general_ci", True), # 41 + ("latin7", "latin7_general_cs", False), # 42 + ("macce", "macce_bin", False), # 43 + ("cp1250", "cp1250_croatian_ci", False), # 44 + ("utf8mb4", "utf8mb4_general_ci", False), # 45 + ("utf8mb4", "utf8mb4_bin", False), # 46 + ("latin1", "latin1_bin", False), # 47 + ("latin1", "latin1_general_ci", False), # 48 + ("latin1", "latin1_general_cs", False), # 49 + ("cp1251", "cp1251_bin", False), # 50 + ("cp1251", "cp1251_general_ci", True), # 51 + ("cp1251", "cp1251_general_cs", False), # 52 + ("macroman", "macroman_bin", False), # 53 + ("utf16", "utf16_general_ci", True), # 54 + ("utf16", "utf16_bin", False), # 55 + ("utf16le", "utf16le_general_ci", True), # 56 + ("cp1256", "cp1256_general_ci", True), # 57 + ("cp1257", "cp1257_bin", False), # 58 + ("cp1257", "cp1257_general_ci", True), # 59 + ("utf32", "utf32_general_ci", True), # 60 + ("utf32", "utf32_bin", False), # 61 + ("utf16le", "utf16le_bin", False), # 62 + ("binary", "binary", True), # 63 + ("armscii8", "armscii8_bin", False), # 64 + ("ascii", "ascii_bin", False), # 65 + ("cp1250", "cp1250_bin", False), # 66 + ("cp1256", "cp1256_bin", False), # 67 + ("cp866", "cp866_bin", False), # 68 + ("dec8", "dec8_bin", False), # 69 + ("greek", "greek_bin", False), # 70 + ("hebrew", "hebrew_bin", False), # 71 + ("hp8", "hp8_bin", False), # 72 + ("keybcs2", "keybcs2_bin", False), # 73 + ("koi8r", "koi8r_bin", False), # 74 + ("koi8u", "koi8u_bin", False), # 75 + ("utf8mb3", "utf8mb3_tolower_ci", False), # 76 + ("latin2", "latin2_bin", False), # 77 + ("latin5", "latin5_bin", False), # 78 + ("latin7", "latin7_bin", False), # 79 + ("cp850", "cp850_bin", False), # 80 + ("cp852", "cp852_bin", False), # 81 + ("swe7", "swe7_bin", False), # 82 + ("utf8mb3", "utf8mb3_bin", False), # 83 + ("big5", "big5_bin", False), # 84 + ("euckr", "euckr_bin", False), # 85 + ("gb2312", "gb2312_bin", False), # 86 + ("gbk", "gbk_bin", False), # 87 + ("sjis", "sjis_bin", False), # 88 + ("tis620", "tis620_bin", False), # 89 + ("ucs2", "ucs2_bin", False), # 90 + ("ujis", "ujis_bin", False), # 91 + ("geostd8", "geostd8_general_ci", True), # 92 + ("geostd8", "geostd8_bin", False), # 93 + ("latin1", "latin1_spanish_ci", False), # 94 + ("cp932", "cp932_japanese_ci", True), # 95 + ("cp932", "cp932_bin", False), # 96 + ("eucjpms", "eucjpms_japanese_ci", True), # 97 + ("eucjpms", "eucjpms_bin", False), # 98 + ("cp1250", "cp1250_polish_ci", False), # 99 + None, + ("utf16", "utf16_unicode_ci", False), # 101 + ("utf16", "utf16_icelandic_ci", False), # 102 + ("utf16", "utf16_latvian_ci", False), # 103 + ("utf16", "utf16_romanian_ci", False), # 104 + ("utf16", "utf16_slovenian_ci", False), # 105 + ("utf16", "utf16_polish_ci", False), # 106 + ("utf16", "utf16_estonian_ci", False), # 107 + ("utf16", "utf16_spanish_ci", False), # 108 + ("utf16", "utf16_swedish_ci", False), # 109 + ("utf16", "utf16_turkish_ci", False), # 110 + ("utf16", "utf16_czech_ci", False), # 111 + ("utf16", "utf16_danish_ci", False), # 112 + ("utf16", "utf16_lithuanian_ci", False), # 113 + ("utf16", "utf16_slovak_ci", False), # 114 + ("utf16", "utf16_spanish2_ci", False), # 115 + ("utf16", "utf16_roman_ci", False), # 116 + ("utf16", "utf16_persian_ci", False), # 117 + ("utf16", "utf16_esperanto_ci", False), # 118 + ("utf16", "utf16_hungarian_ci", False), # 119 + ("utf16", "utf16_sinhala_ci", False), # 120 + ("utf16", "utf16_german2_ci", False), # 121 + ("utf16", "utf16_croatian_ci", False), # 122 + ("utf16", "utf16_unicode_520_ci", False), # 123 + ("utf16", "utf16_vietnamese_ci", False), # 124 + None, + None, + None, + ("ucs2", "ucs2_unicode_ci", False), # 128 + ("ucs2", "ucs2_icelandic_ci", False), # 129 + ("ucs2", "ucs2_latvian_ci", False), # 130 + ("ucs2", "ucs2_romanian_ci", False), # 131 + ("ucs2", "ucs2_slovenian_ci", False), # 132 + ("ucs2", "ucs2_polish_ci", False), # 133 + ("ucs2", "ucs2_estonian_ci", False), # 134 + ("ucs2", "ucs2_spanish_ci", False), # 135 + ("ucs2", "ucs2_swedish_ci", False), # 136 + ("ucs2", "ucs2_turkish_ci", False), # 137 + ("ucs2", "ucs2_czech_ci", False), # 138 + ("ucs2", "ucs2_danish_ci", False), # 139 + ("ucs2", "ucs2_lithuanian_ci", False), # 140 + ("ucs2", "ucs2_slovak_ci", False), # 141 + ("ucs2", "ucs2_spanish2_ci", False), # 142 + ("ucs2", "ucs2_roman_ci", False), # 143 + ("ucs2", "ucs2_persian_ci", False), # 144 + ("ucs2", "ucs2_esperanto_ci", False), # 145 + ("ucs2", "ucs2_hungarian_ci", False), # 146 + ("ucs2", "ucs2_sinhala_ci", False), # 147 + ("ucs2", "ucs2_german2_ci", False), # 148 + ("ucs2", "ucs2_croatian_ci", False), # 149 + ("ucs2", "ucs2_unicode_520_ci", False), # 150 + ("ucs2", "ucs2_vietnamese_ci", False), # 151 + None, + None, + None, + None, + None, + None, + None, + ("ucs2", "ucs2_general_mysql500_ci", False), # 159 + ("utf32", "utf32_unicode_ci", False), # 160 + ("utf32", "utf32_icelandic_ci", False), # 161 + ("utf32", "utf32_latvian_ci", False), # 162 + ("utf32", "utf32_romanian_ci", False), # 163 + ("utf32", "utf32_slovenian_ci", False), # 164 + ("utf32", "utf32_polish_ci", False), # 165 + ("utf32", "utf32_estonian_ci", False), # 166 + ("utf32", "utf32_spanish_ci", False), # 167 + ("utf32", "utf32_swedish_ci", False), # 168 + ("utf32", "utf32_turkish_ci", False), # 169 + ("utf32", "utf32_czech_ci", False), # 170 + ("utf32", "utf32_danish_ci", False), # 171 + ("utf32", "utf32_lithuanian_ci", False), # 172 + ("utf32", "utf32_slovak_ci", False), # 173 + ("utf32", "utf32_spanish2_ci", False), # 174 + ("utf32", "utf32_roman_ci", False), # 175 + ("utf32", "utf32_persian_ci", False), # 176 + ("utf32", "utf32_esperanto_ci", False), # 177 + ("utf32", "utf32_hungarian_ci", False), # 178 + ("utf32", "utf32_sinhala_ci", False), # 179 + ("utf32", "utf32_german2_ci", False), # 180 + ("utf32", "utf32_croatian_ci", False), # 181 + ("utf32", "utf32_unicode_520_ci", False), # 182 + ("utf32", "utf32_vietnamese_ci", False), # 183 + None, + None, + None, + None, + None, + None, + None, + None, + ("utf8mb3", "utf8mb3_unicode_ci", False), # 192 + ("utf8mb3", "utf8mb3_icelandic_ci", False), # 193 + ("utf8mb3", "utf8mb3_latvian_ci", False), # 194 + ("utf8mb3", "utf8mb3_romanian_ci", False), # 195 + ("utf8mb3", "utf8mb3_slovenian_ci", False), # 196 + ("utf8mb3", "utf8mb3_polish_ci", False), # 197 + ("utf8mb3", "utf8mb3_estonian_ci", False), # 198 + ("utf8mb3", "utf8mb3_spanish_ci", False), # 199 + ("utf8mb3", "utf8mb3_swedish_ci", False), # 200 + ("utf8mb3", "utf8mb3_turkish_ci", False), # 201 + ("utf8mb3", "utf8mb3_czech_ci", False), # 202 + ("utf8mb3", "utf8mb3_danish_ci", False), # 203 + ("utf8mb3", "utf8mb3_lithuanian_ci", False), # 204 + ("utf8mb3", "utf8mb3_slovak_ci", False), # 205 + ("utf8mb3", "utf8mb3_spanish2_ci", False), # 206 + ("utf8mb3", "utf8mb3_roman_ci", False), # 207 + ("utf8mb3", "utf8mb3_persian_ci", False), # 208 + ("utf8mb3", "utf8mb3_esperanto_ci", False), # 209 + ("utf8mb3", "utf8mb3_hungarian_ci", False), # 210 + ("utf8mb3", "utf8mb3_sinhala_ci", False), # 211 + ("utf8mb3", "utf8mb3_german2_ci", False), # 212 + ("utf8mb3", "utf8mb3_croatian_ci", False), # 213 + ("utf8mb3", "utf8mb3_unicode_520_ci", False), # 214 + ("utf8mb3", "utf8mb3_vietnamese_ci", False), # 215 + None, + None, + None, + None, + None, + None, + None, + ("utf8mb3", "utf8mb3_general_mysql500_ci", False), # 223 + ("utf8mb4", "utf8mb4_unicode_ci", False), # 224 + ("utf8mb4", "utf8mb4_icelandic_ci", False), # 225 + ("utf8mb4", "utf8mb4_latvian_ci", False), # 226 + ("utf8mb4", "utf8mb4_romanian_ci", False), # 227 + ("utf8mb4", "utf8mb4_slovenian_ci", False), # 228 + ("utf8mb4", "utf8mb4_polish_ci", False), # 229 + ("utf8mb4", "utf8mb4_estonian_ci", False), # 230 + ("utf8mb4", "utf8mb4_spanish_ci", False), # 231 + ("utf8mb4", "utf8mb4_swedish_ci", False), # 232 + ("utf8mb4", "utf8mb4_turkish_ci", False), # 233 + ("utf8mb4", "utf8mb4_czech_ci", False), # 234 + ("utf8mb4", "utf8mb4_danish_ci", False), # 235 + ("utf8mb4", "utf8mb4_lithuanian_ci", False), # 236 + ("utf8mb4", "utf8mb4_slovak_ci", False), # 237 + ("utf8mb4", "utf8mb4_spanish2_ci", False), # 238 + ("utf8mb4", "utf8mb4_roman_ci", False), # 239 + ("utf8mb4", "utf8mb4_persian_ci", False), # 240 + ("utf8mb4", "utf8mb4_esperanto_ci", False), # 241 + ("utf8mb4", "utf8mb4_hungarian_ci", False), # 242 + ("utf8mb4", "utf8mb4_sinhala_ci", False), # 243 + ("utf8mb4", "utf8mb4_german2_ci", False), # 244 + ("utf8mb4", "utf8mb4_croatian_ci", False), # 245 + ("utf8mb4", "utf8mb4_unicode_520_ci", False), # 246 + ("utf8mb4", "utf8mb4_vietnamese_ci", False), # 247 + ("gb18030", "gb18030_chinese_ci", True), # 248 + ("gb18030", "gb18030_bin", False), # 249 + ("gb18030", "gb18030_unicode_520_ci", False), # 250 + None, + None, + None, + None, + ("utf8mb4", "utf8mb4_0900_ai_ci", True), # 255 + ("utf8mb4", "utf8mb4_de_pb_0900_ai_ci", False), # 256 + ("utf8mb4", "utf8mb4_is_0900_ai_ci", False), # 257 + ("utf8mb4", "utf8mb4_lv_0900_ai_ci", False), # 258 + ("utf8mb4", "utf8mb4_ro_0900_ai_ci", False), # 259 + ("utf8mb4", "utf8mb4_sl_0900_ai_ci", False), # 260 + ("utf8mb4", "utf8mb4_pl_0900_ai_ci", False), # 261 + ("utf8mb4", "utf8mb4_et_0900_ai_ci", False), # 262 + ("utf8mb4", "utf8mb4_es_0900_ai_ci", False), # 263 + ("utf8mb4", "utf8mb4_sv_0900_ai_ci", False), # 264 + ("utf8mb4", "utf8mb4_tr_0900_ai_ci", False), # 265 + ("utf8mb4", "utf8mb4_cs_0900_ai_ci", False), # 266 + ("utf8mb4", "utf8mb4_da_0900_ai_ci", False), # 267 + ("utf8mb4", "utf8mb4_lt_0900_ai_ci", False), # 268 + ("utf8mb4", "utf8mb4_sk_0900_ai_ci", False), # 269 + ("utf8mb4", "utf8mb4_es_trad_0900_ai_ci", False), # 270 + ("utf8mb4", "utf8mb4_la_0900_ai_ci", False), # 271 + None, + ("utf8mb4", "utf8mb4_eo_0900_ai_ci", False), # 273 + ("utf8mb4", "utf8mb4_hu_0900_ai_ci", False), # 274 + ("utf8mb4", "utf8mb4_hr_0900_ai_ci", False), # 275 + None, + ("utf8mb4", "utf8mb4_vi_0900_ai_ci", False), # 277 + ("utf8mb4", "utf8mb4_0900_as_cs", False), # 278 + ("utf8mb4", "utf8mb4_de_pb_0900_as_cs", False), # 279 + ("utf8mb4", "utf8mb4_is_0900_as_cs", False), # 280 + ("utf8mb4", "utf8mb4_lv_0900_as_cs", False), # 281 + ("utf8mb4", "utf8mb4_ro_0900_as_cs", False), # 282 + ("utf8mb4", "utf8mb4_sl_0900_as_cs", False), # 283 + ("utf8mb4", "utf8mb4_pl_0900_as_cs", False), # 284 + ("utf8mb4", "utf8mb4_et_0900_as_cs", False), # 285 + ("utf8mb4", "utf8mb4_es_0900_as_cs", False), # 286 + ("utf8mb4", "utf8mb4_sv_0900_as_cs", False), # 287 + ("utf8mb4", "utf8mb4_tr_0900_as_cs", False), # 288 + ("utf8mb4", "utf8mb4_cs_0900_as_cs", False), # 289 + ("utf8mb4", "utf8mb4_da_0900_as_cs", False), # 290 + ("utf8mb4", "utf8mb4_lt_0900_as_cs", False), # 291 + ("utf8mb4", "utf8mb4_sk_0900_as_cs", False), # 292 + ("utf8mb4", "utf8mb4_es_trad_0900_as_cs", False), # 293 + ("utf8mb4", "utf8mb4_la_0900_as_cs", False), # 294 + None, + ("utf8mb4", "utf8mb4_eo_0900_as_cs", False), # 296 + ("utf8mb4", "utf8mb4_hu_0900_as_cs", False), # 297 + ("utf8mb4", "utf8mb4_hr_0900_as_cs", False), # 298 + None, + ("utf8mb4", "utf8mb4_vi_0900_as_cs", False), # 300 + None, + None, + ("utf8mb4", "utf8mb4_ja_0900_as_cs", False), # 303 + ("utf8mb4", "utf8mb4_ja_0900_as_cs_ks", False), # 304 + ("utf8mb4", "utf8mb4_0900_as_ci", False), # 305 + ("utf8mb4", "utf8mb4_ru_0900_ai_ci", False), # 306 + ("utf8mb4", "utf8mb4_ru_0900_as_cs", False), # 307 + ("utf8mb4", "utf8mb4_zh_0900_as_cs", False), # 308 + ("utf8mb4", "utf8mb4_0900_bin", False), # 309 + ("utf8mb4", "utf8mb4_nb_0900_ai_ci", False), # 310 + ("utf8mb4", "utf8mb4_nb_0900_as_cs", False), # 311 + ("utf8mb4", "utf8mb4_nn_0900_ai_ci", False), # 312 + ("utf8mb4", "utf8mb4_nn_0900_as_cs", False), # 313 + ("utf8mb4", "utf8mb4_sr_latn_0900_ai_ci", False), # 314 + ("utf8mb4", "utf8mb4_sr_latn_0900_as_cs", False), # 315 + ("utf8mb4", "utf8mb4_bs_0900_ai_ci", False), # 316 + ("utf8mb4", "utf8mb4_bs_0900_as_cs", False), # 317 + ("utf8mb4", "utf8mb4_bg_0900_ai_ci", False), # 318 + ("utf8mb4", "utf8mb4_bg_0900_as_cs", False), # 319 + ("utf8mb4", "utf8mb4_gl_0900_ai_ci", False), # 320 + ("utf8mb4", "utf8mb4_gl_0900_as_cs", False), # 321 + ("utf8mb4", "utf8mb4_mn_cyrl_0900_ai_ci", False), # 322 + ("utf8mb4", "utf8mb4_mn_cyrl_0900_as_cs", False), # 323 +] + +MYSQL_CHARACTER_SETS_57: List[Optional[Tuple[str, str, bool]]] = [ + # (character set name, collation, default) + None, + ("big5", "big5_chinese_ci", True), # 1 + ("latin2", "latin2_czech_cs", False), # 2 + ("dec8", "dec8_swedish_ci", True), # 3 + ("cp850", "cp850_general_ci", True), # 4 + ("latin1", "latin1_german1_ci", False), # 5 + ("hp8", "hp8_english_ci", True), # 6 + ("koi8r", "koi8r_general_ci", True), # 7 + ("latin1", "latin1_swedish_ci", True), # 8 + ("latin2", "latin2_general_ci", True), # 9 + ("swe7", "swe7_swedish_ci", True), # 10 + ("ascii", "ascii_general_ci", True), # 11 + ("ujis", "ujis_japanese_ci", True), # 12 + ("sjis", "sjis_japanese_ci", True), # 13 + ("cp1251", "cp1251_bulgarian_ci", False), # 14 + ("latin1", "latin1_danish_ci", False), # 15 + ("hebrew", "hebrew_general_ci", True), # 16 + None, + ("tis620", "tis620_thai_ci", True), # 18 + ("euckr", "euckr_korean_ci", True), # 19 + ("latin7", "latin7_estonian_cs", False), # 20 + ("latin2", "latin2_hungarian_ci", False), # 21 + ("koi8u", "koi8u_general_ci", True), # 22 + ("cp1251", "cp1251_ukrainian_ci", False), # 23 + ("gb2312", "gb2312_chinese_ci", True), # 24 + ("greek", "greek_general_ci", True), # 25 + ("cp1250", "cp1250_general_ci", True), # 26 + ("latin2", "latin2_croatian_ci", False), # 27 + ("gbk", "gbk_chinese_ci", True), # 28 + ("cp1257", "cp1257_lithuanian_ci", False), # 29 + ("latin5", "latin5_turkish_ci", True), # 30 + ("latin1", "latin1_german2_ci", False), # 31 + ("armscii8", "armscii8_general_ci", True), # 32 + ("utf8", "utf8_general_ci", True), # 33 + ("cp1250", "cp1250_czech_cs", False), # 34 + ("ucs2", "ucs2_general_ci", True), # 35 + ("cp866", "cp866_general_ci", True), # 36 + ("keybcs2", "keybcs2_general_ci", True), # 37 + ("macce", "macce_general_ci", True), # 38 + ("macroman", "macroman_general_ci", True), # 39 + ("cp852", "cp852_general_ci", True), # 40 + ("latin7", "latin7_general_ci", True), # 41 + ("latin7", "latin7_general_cs", False), # 42 + ("macce", "macce_bin", False), # 43 + ("cp1250", "cp1250_croatian_ci", False), # 44 + ("utf8mb4", "utf8mb4_general_ci", True), # 45 + ("utf8mb4", "utf8mb4_bin", False), # 46 + ("latin1", "latin1_bin", False), # 47 + ("latin1", "latin1_general_ci", False), # 48 + ("latin1", "latin1_general_cs", False), # 49 + ("cp1251", "cp1251_bin", False), # 50 + ("cp1251", "cp1251_general_ci", True), # 51 + ("cp1251", "cp1251_general_cs", False), # 52 + ("macroman", "macroman_bin", False), # 53 + ("utf16", "utf16_general_ci", True), # 54 + ("utf16", "utf16_bin", False), # 55 + ("utf16le", "utf16le_general_ci", True), # 56 + ("cp1256", "cp1256_general_ci", True), # 57 + ("cp1257", "cp1257_bin", False), # 58 + ("cp1257", "cp1257_general_ci", True), # 59 + ("utf32", "utf32_general_ci", True), # 60 + ("utf32", "utf32_bin", False), # 61 + ("utf16le", "utf16le_bin", False), # 62 + ("binary", "binary", True), # 63 + ("armscii8", "armscii8_bin", False), # 64 + ("ascii", "ascii_bin", False), # 65 + ("cp1250", "cp1250_bin", False), # 66 + ("cp1256", "cp1256_bin", False), # 67 + ("cp866", "cp866_bin", False), # 68 + ("dec8", "dec8_bin", False), # 69 + ("greek", "greek_bin", False), # 70 + ("hebrew", "hebrew_bin", False), # 71 + ("hp8", "hp8_bin", False), # 72 + ("keybcs2", "keybcs2_bin", False), # 73 + ("koi8r", "koi8r_bin", False), # 74 + ("koi8u", "koi8u_bin", False), # 75 + None, + ("latin2", "latin2_bin", False), # 77 + ("latin5", "latin5_bin", False), # 78 + ("latin7", "latin7_bin", False), # 79 + ("cp850", "cp850_bin", False), # 80 + ("cp852", "cp852_bin", False), # 81 + ("swe7", "swe7_bin", False), # 82 + ("utf8", "utf8_bin", False), # 83 + ("big5", "big5_bin", False), # 84 + ("euckr", "euckr_bin", False), # 85 + ("gb2312", "gb2312_bin", False), # 86 + ("gbk", "gbk_bin", False), # 87 + ("sjis", "sjis_bin", False), # 88 + ("tis620", "tis620_bin", False), # 89 + ("ucs2", "ucs2_bin", False), # 90 + ("ujis", "ujis_bin", False), # 91 + ("geostd8", "geostd8_general_ci", True), # 92 + ("geostd8", "geostd8_bin", False), # 93 + ("latin1", "latin1_spanish_ci", False), # 94 + ("cp932", "cp932_japanese_ci", True), # 95 + ("cp932", "cp932_bin", False), # 96 + ("eucjpms", "eucjpms_japanese_ci", True), # 97 + ("eucjpms", "eucjpms_bin", False), # 98 + ("cp1250", "cp1250_polish_ci", False), # 99 + None, + ("utf16", "utf16_unicode_ci", False), # 101 + ("utf16", "utf16_icelandic_ci", False), # 102 + ("utf16", "utf16_latvian_ci", False), # 103 + ("utf16", "utf16_romanian_ci", False), # 104 + ("utf16", "utf16_slovenian_ci", False), # 105 + ("utf16", "utf16_polish_ci", False), # 106 + ("utf16", "utf16_estonian_ci", False), # 107 + ("utf16", "utf16_spanish_ci", False), # 108 + ("utf16", "utf16_swedish_ci", False), # 109 + ("utf16", "utf16_turkish_ci", False), # 110 + ("utf16", "utf16_czech_ci", False), # 111 + ("utf16", "utf16_danish_ci", False), # 112 + ("utf16", "utf16_lithuanian_ci", False), # 113 + ("utf16", "utf16_slovak_ci", False), # 114 + ("utf16", "utf16_spanish2_ci", False), # 115 + ("utf16", "utf16_roman_ci", False), # 116 + ("utf16", "utf16_persian_ci", False), # 117 + ("utf16", "utf16_esperanto_ci", False), # 118 + ("utf16", "utf16_hungarian_ci", False), # 119 + ("utf16", "utf16_sinhala_ci", False), # 120 + ("utf16", "utf16_german2_ci", False), # 121 + ("utf16", "utf16_croatian_ci", False), # 122 + ("utf16", "utf16_unicode_520_ci", False), # 123 + ("utf16", "utf16_vietnamese_ci", False), # 124 + None, + None, + None, + ("ucs2", "ucs2_unicode_ci", False), # 128 + ("ucs2", "ucs2_icelandic_ci", False), # 129 + ("ucs2", "ucs2_latvian_ci", False), # 130 + ("ucs2", "ucs2_romanian_ci", False), # 131 + ("ucs2", "ucs2_slovenian_ci", False), # 132 + ("ucs2", "ucs2_polish_ci", False), # 133 + ("ucs2", "ucs2_estonian_ci", False), # 134 + ("ucs2", "ucs2_spanish_ci", False), # 135 + ("ucs2", "ucs2_swedish_ci", False), # 136 + ("ucs2", "ucs2_turkish_ci", False), # 137 + ("ucs2", "ucs2_czech_ci", False), # 138 + ("ucs2", "ucs2_danish_ci", False), # 139 + ("ucs2", "ucs2_lithuanian_ci", False), # 140 + ("ucs2", "ucs2_slovak_ci", False), # 141 + ("ucs2", "ucs2_spanish2_ci", False), # 142 + ("ucs2", "ucs2_roman_ci", False), # 143 + ("ucs2", "ucs2_persian_ci", False), # 144 + ("ucs2", "ucs2_esperanto_ci", False), # 145 + ("ucs2", "ucs2_hungarian_ci", False), # 146 + ("ucs2", "ucs2_sinhala_ci", False), # 147 + ("ucs2", "ucs2_german2_ci", False), # 148 + ("ucs2", "ucs2_croatian_ci", False), # 149 + ("ucs2", "ucs2_unicode_520_ci", False), # 150 + ("ucs2", "ucs2_vietnamese_ci", False), # 151 + None, + None, + None, + None, + None, + None, + None, + ("ucs2", "ucs2_general_mysql500_ci", False), # 159 + ("utf32", "utf32_unicode_ci", False), # 160 + ("utf32", "utf32_icelandic_ci", False), # 161 + ("utf32", "utf32_latvian_ci", False), # 162 + ("utf32", "utf32_romanian_ci", False), # 163 + ("utf32", "utf32_slovenian_ci", False), # 164 + ("utf32", "utf32_polish_ci", False), # 165 + ("utf32", "utf32_estonian_ci", False), # 166 + ("utf32", "utf32_spanish_ci", False), # 167 + ("utf32", "utf32_swedish_ci", False), # 168 + ("utf32", "utf32_turkish_ci", False), # 169 + ("utf32", "utf32_czech_ci", False), # 170 + ("utf32", "utf32_danish_ci", False), # 171 + ("utf32", "utf32_lithuanian_ci", False), # 172 + ("utf32", "utf32_slovak_ci", False), # 173 + ("utf32", "utf32_spanish2_ci", False), # 174 + ("utf32", "utf32_roman_ci", False), # 175 + ("utf32", "utf32_persian_ci", False), # 176 + ("utf32", "utf32_esperanto_ci", False), # 177 + ("utf32", "utf32_hungarian_ci", False), # 178 + ("utf32", "utf32_sinhala_ci", False), # 179 + ("utf32", "utf32_german2_ci", False), # 180 + ("utf32", "utf32_croatian_ci", False), # 181 + ("utf32", "utf32_unicode_520_ci", False), # 182 + ("utf32", "utf32_vietnamese_ci", False), # 183 + None, + None, + None, + None, + None, + None, + None, + None, + ("utf8", "utf8_unicode_ci", False), # 192 + ("utf8", "utf8_icelandic_ci", False), # 193 + ("utf8", "utf8_latvian_ci", False), # 194 + ("utf8", "utf8_romanian_ci", False), # 195 + ("utf8", "utf8_slovenian_ci", False), # 196 + ("utf8", "utf8_polish_ci", False), # 197 + ("utf8", "utf8_estonian_ci", False), # 198 + ("utf8", "utf8_spanish_ci", False), # 199 + ("utf8", "utf8_swedish_ci", False), # 200 + ("utf8", "utf8_turkish_ci", False), # 201 + ("utf8", "utf8_czech_ci", False), # 202 + ("utf8", "utf8_danish_ci", False), # 203 + ("utf8", "utf8_lithuanian_ci", False), # 204 + ("utf8", "utf8_slovak_ci", False), # 205 + ("utf8", "utf8_spanish2_ci", False), # 206 + ("utf8", "utf8_roman_ci", False), # 207 + ("utf8", "utf8_persian_ci", False), # 208 + ("utf8", "utf8_esperanto_ci", False), # 209 + ("utf8", "utf8_hungarian_ci", False), # 210 + ("utf8", "utf8_sinhala_ci", False), # 211 + ("utf8", "utf8_german2_ci", False), # 212 + ("utf8", "utf8_croatian_ci", False), # 213 + ("utf8", "utf8_unicode_520_ci", False), # 214 + ("utf8", "utf8_vietnamese_ci", False), # 215 + None, + None, + None, + None, + None, + None, + None, + ("utf8", "utf8_general_mysql500_ci", False), # 223 + ("utf8mb4", "utf8mb4_unicode_ci", False), # 224 + ("utf8mb4", "utf8mb4_icelandic_ci", False), # 225 + ("utf8mb4", "utf8mb4_latvian_ci", False), # 226 + ("utf8mb4", "utf8mb4_romanian_ci", False), # 227 + ("utf8mb4", "utf8mb4_slovenian_ci", False), # 228 + ("utf8mb4", "utf8mb4_polish_ci", False), # 229 + ("utf8mb4", "utf8mb4_estonian_ci", False), # 230 + ("utf8mb4", "utf8mb4_spanish_ci", False), # 231 + ("utf8mb4", "utf8mb4_swedish_ci", False), # 232 + ("utf8mb4", "utf8mb4_turkish_ci", False), # 233 + ("utf8mb4", "utf8mb4_czech_ci", False), # 234 + ("utf8mb4", "utf8mb4_danish_ci", False), # 235 + ("utf8mb4", "utf8mb4_lithuanian_ci", False), # 236 + ("utf8mb4", "utf8mb4_slovak_ci", False), # 237 + ("utf8mb4", "utf8mb4_spanish2_ci", False), # 238 + ("utf8mb4", "utf8mb4_roman_ci", False), # 239 + ("utf8mb4", "utf8mb4_persian_ci", False), # 240 + ("utf8mb4", "utf8mb4_esperanto_ci", False), # 241 + ("utf8mb4", "utf8mb4_hungarian_ci", False), # 242 + ("utf8mb4", "utf8mb4_sinhala_ci", False), # 243 + ("utf8mb4", "utf8mb4_german2_ci", False), # 244 + ("utf8mb4", "utf8mb4_croatian_ci", False), # 245 + ("utf8mb4", "utf8mb4_unicode_520_ci", False), # 246 + ("utf8mb4", "utf8mb4_vietnamese_ci", False), # 247 + ("gb18030", "gb18030_chinese_ci", True), # 248 + ("gb18030", "gb18030_bin", False), # 249 + ("gb18030", "gb18030_unicode_520_ci", False), # 250 +] diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection.py new file mode 100644 index 0000000..ef209e7 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection.py @@ -0,0 +1,1543 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="arg-type,operator,attr-defined,assignment" + +"""Implementing communication with MySQL servers.""" +from __future__ import annotations + +import datetime +import getpass +import os +import socket +import struct +import sys +import warnings + +from decimal import Decimal +from io import IOBase +from typing import ( + TYPE_CHECKING, + Any, + BinaryIO, + Dict, + Generator, + List, + Mapping, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from . import version +from .abstracts import MySQLConnectionAbstract +from .authentication import MySQLAuthenticator, get_auth_plugin +from .constants import ( + ClientFlag, + FieldType, + RefreshOption, + ServerCmd, + ServerFlag, + flag_is_set, +) +from .conversion import MySQLConverter +from .cursor import ( + CursorBase, + MySQLCursor, + MySQLCursorBuffered, + MySQLCursorBufferedDict, + MySQLCursorBufferedNamedTuple, + MySQLCursorBufferedRaw, + MySQLCursorDict, + MySQLCursorNamedTuple, + MySQLCursorPrepared, + MySQLCursorPreparedDict, + MySQLCursorPreparedNamedTuple, + MySQLCursorPreparedRaw, + MySQLCursorRaw, +) +from .errors import ( + DatabaseError, + Error, + InterfaceError, + InternalError, + NotSupportedError, + OperationalError, + ProgrammingError, + get_exception, +) +from .logger import logger +from .network import MySQLSocket, MySQLTCPSocket, MySQLUnixSocket +from .opentelemetry.constants import OTEL_ENABLED +from .opentelemetry.context_propagation import with_context_propagation +from .protocol import MySQLProtocol +from .types import ( + BinaryProtocolType, + DescriptionType, + EofPacketType, + HandShakeType, + OkPacketType, + ResultType, + RowType, + StatsPacketType, + StrOrBytes, +) +from .utils import get_platform, int1store, int4store, lc_int + +if TYPE_CHECKING: + from .abstracts import CMySQLPrepStmt + +if OTEL_ENABLED: + from .opentelemetry.instrumentation import end_span, record_exception_event + + +class MySQLConnection(MySQLConnectionAbstract): + """Connection to a MySQL Server""" + + def __init__(self, **kwargs: Any) -> None: + self._protocol: Optional[MySQLProtocol] = None + self._socket: Optional[MySQLSocket] = None + self._handshake: Optional[HandShakeType] = None + super().__init__() + + self._converter_class: Type[MySQLConverter] = MySQLConverter + + self._client_flags: int = ClientFlag.get_default() + self._charset_id: int = 45 + self._sql_mode: Optional[str] = None + self._time_zone: Optional[str] = None + self._autocommit: bool = False + + self._user: str = "" + self._password: str = "" + self._database: str = "" + self._host: str = "127.0.0.1" + self._port: int = 3306 + self._unix_socket: Optional[str] = None + self._client_host: str = "" + self._client_port: int = 0 + self._ssl: Dict[str, Optional[Union[str, bool, List[str]]]] = {} + self._force_ipv6: bool = False + + self._use_unicode: bool = True + self._get_warnings: bool = False + self._raise_on_warnings: bool = False + self._buffered: bool = False + self._unread_result: bool = False + self._have_next_result: bool = False + self._raw: bool = False + self._in_transaction: bool = False + + self._prepared_statements: Any = None + + self._ssl_active: bool = False + self._auth_plugin: Optional[str] = None + self._krb_service_principal: Optional[str] = None + self._pool_config_version: Any = None + self._query_attrs_supported: int = False + + self._columns_desc: List[DescriptionType] = [] + self._mfa_nfactor: int = 1 + + self._authenticator: MySQLAuthenticator = MySQLAuthenticator() + + if kwargs: + try: + self.connect(**kwargs) + except Exception: + # Tidy-up underlying socket on failure + self.close() + self._socket = None + raise + + def _add_default_conn_attrs(self) -> None: + """Add the default connection attributes.""" + platform = get_platform() + license_chunks = version.LICENSE.split(" ") + if license_chunks[0] == "GPLv2": + client_license = "GPL-2.0" + else: + client_license = "Commercial" + default_conn_attrs = { + "_pid": str(os.getpid()), + "_platform": platform["arch"], + "_source_host": socket.gethostname(), + "_client_name": "mysql-connector-python", + "_client_license": client_license, + "_client_version": ".".join([str(x) for x in version.VERSION[0:3]]), + "_os": platform["version"], + } + + self._conn_attrs.update((default_conn_attrs)) + + def _do_handshake(self) -> None: + """Get the handshake from the MySQL server""" + packet = bytes(self._socket.recv()) + if packet[4] == 255: + raise get_exception(packet) + + self._handshake = None + handshake = self._protocol.parse_handshake(packet) + + server_version = handshake["server_version_original"] + + self._server_version = self._check_server_version( + server_version + if isinstance(server_version, (str, bytes, bytearray)) + else "Unknown" + ) + self._character_set.set_mysql_version(self._server_version) + + if not handshake["capabilities"] & ClientFlag.SSL: + if self._auth_plugin == "mysql_clear_password" and not self.is_secure: + raise InterfaceError( + "Clear password authentication is not supported over " + "insecure channels" + ) + if self._ssl.get("verify_cert"): + raise InterfaceError( + "SSL is required but the server doesn't support it", + errno=2026, + ) + self._client_flags &= ~ClientFlag.SSL + elif not self._ssl_disabled: + self._client_flags |= ClientFlag.SSL + + if handshake["capabilities"] & ClientFlag.PLUGIN_AUTH: + self.set_client_flags([ClientFlag.PLUGIN_AUTH]) + + if handshake["capabilities"] & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + self._query_attrs_supported = True + self.set_client_flags([ClientFlag.CLIENT_QUERY_ATTRIBUTES]) + + if handshake["capabilities"] & ClientFlag.MULTI_FACTOR_AUTHENTICATION: + self.set_client_flags([ClientFlag.MULTI_FACTOR_AUTHENTICATION]) + + self._handshake = handshake + + def _do_auth( + self, + username: Optional[str] = None, + password: Optional[str] = None, + database: Optional[str] = None, + client_flags: int = 0, + charset: int = 45, + ssl_options: Optional[Dict[str, Optional[Union[str, bool, List[str]]]]] = None, + conn_attrs: Optional[Dict[str, str]] = None, + ) -> bool: + """Authenticate with the MySQL server + + Authentication happens in two parts. We first send a response to the + handshake. The MySQL server will then send either an AuthSwitchRequest + or an error packet. + + Raises NotSupportedError when we get the old, insecure password + reply back. Raises any error coming from MySQL. + """ + if ( + self._auth_plugin.startswith("authentication_oci") + or ( + self._auth_plugin.startswith("authentication_kerberos") + and os.name == "nt" + ) + ) and not username: + username = getpass.getuser() + logger.debug( + "MySQL user is empty, OS user: %s will be used for %s", + username, + self._auth_plugin, + ) + + if self._password1 and password != self._password1: + password = self._password1 + + self._ssl_active = False + if not self._ssl_disabled and (client_flags & ClientFlag.SSL): + self._authenticator.setup_ssl( + self._socket, + self.server_host, + ssl_options, + charset=charset, + client_flags=client_flags, + ) + self._ssl_active = True + + ok_pkt = self._authenticator.authenticate( + sock=self._socket, + handshake=self._handshake, + username=username, + password1=password, + password2=self._password2, + password3=self._password3, + database=database, + charset=charset, + client_flags=client_flags, + auth_plugin=self._auth_plugin, + auth_plugin_class=self._auth_plugin_class, + conn_attrs=conn_attrs, + krb_service_principal=self._krb_service_principal, + oci_config_file=self._oci_config_file, + oci_config_profile=self._oci_config_profile, + webauthn_callback=self._webauthn_callback, + fido_callback=self._fido_callback, + ) + self._handle_ok(ok_pkt) + + if not (client_flags & ClientFlag.CONNECT_WITH_DB) and database: + self.cmd_init_db(database) + + return True + + def _get_connection(self) -> MySQLSocket: + """Get connection based on configuration + + This method will return the appropriated connection object using + the connection parameters. + + Returns subclass of MySQLBaseSocket. + """ + conn = None + if self._unix_socket and os.name == "posix": + conn = MySQLUnixSocket(unix_socket=self.unix_socket) + else: + conn = MySQLTCPSocket( + host=self.server_host, + port=self.server_port, + force_ipv6=self._force_ipv6, + ) + + conn.set_connection_timeout(self._connection_timeout) + return conn + + def _open_connection(self) -> None: + """Open the connection to the MySQL server + + This method sets up and opens the connection to the MySQL server. + + Raises on errors. + """ + if self._auth_plugin == "authentication_kerberos_client" and not self._user: + cls = get_auth_plugin(self._auth_plugin, self._auth_plugin_class) + self._user = cls.get_user_from_credentials() + + self._protocol = MySQLProtocol() + self._socket = self._get_connection() + try: + self._socket.open_connection() + + # do initial handshake + self._do_handshake() + + # start authentication negotiation + self._do_auth( + self._user, + self._password, + self._database, + self._client_flags, + self._charset_id, + self._ssl, + self._conn_attrs, + ) + self.set_converter_class(self._converter_class) + + if self._client_flags & ClientFlag.COMPRESS: + # update the network layer accordingly + self._socket.switch_to_compressed_mode() + + self._socket.set_connection_timeout(None) + except Exception: + # close socket + self._socket.close_connection() + raise + + if ( + not self._ssl_disabled + and hasattr(self._socket.sock, "version") + and callable(self._socket.sock.version) + ): + # Raise a deprecation warning if TLSv1 or TLSv1.1 is being used + tls_version = self._socket.sock.version() + if tls_version in ("TLSv1", "TLSv1.1"): + warn_msg = ( + f"This connection is using {tls_version} which is now " + "deprecated and will be removed in a future release of " + "MySQL Connector/Python" + ) + warnings.warn(warn_msg, DeprecationWarning) + + def shutdown(self) -> None: + """Shut down connection to MySQL Server. + + This method closes the socket. It raises no exceptions. + + Unlike `disconnect()`, `shutdown()` closes the client connection without + attempting to send a QUIT command to the server first. Thus, it will not + block if the connection is disrupted for some reason such as network failure. + """ + if not self._socket: + return + + try: + self._socket.shutdown() + except (AttributeError, Error): + pass # Getting an exception would mean we are disconnected. + + def close(self) -> None: + """Disconnect from the MySQL server""" + if self._span and self._span.is_recording(): + record_exception_event(self._span, sys.exc_info()[1]) + + if not self._socket: + return + + try: + self.cmd_quit() + except (AttributeError, Error): + pass # Getting an exception would mean we are disconnected. + + try: + self._socket.close_connection() + except Exception as err: + if OTEL_ENABLED: + record_exception_event(self._span, err) + raise + finally: + if OTEL_ENABLED: + end_span(self._span) + + self._handshake = None + + disconnect = close + + def _send_cmd( + self, + command: int, + argument: Optional[bytes] = None, + packet_number: int = 0, + packet: Optional[bytes] = None, + expect_response: bool = True, + compressed_packet_number: int = 0, + ) -> Optional[bytearray]: + """Send a command to the MySQL server + + This method sends a command with an optional argument. + If packet is not None, it will be sent and the argument will be + ignored. + + The packet_number is optional and should usually not be used. + + Some commands might not result in the MySQL server returning + a response. If a command does not return anything, you should + set expect_response to False. The _send_cmd method will then + return None instead of a MySQL packet. + + Returns a MySQL packet or None. + """ + self.handle_unread_result() + + try: + self._socket.send( + self._protocol.make_command(command, packet or argument), + packet_number, + compressed_packet_number, + ) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + return self._socket.recv() if expect_response else None + + def _send_data( + self, data_file: BinaryIO, send_empty_packet: bool = False + ) -> bytearray: + """Send data to the MySQL server + + This method accepts a file-like object and sends its data + as is to the MySQL server. If the send_empty_packet is + True, it will send an extra empty package (for example + when using LOAD LOCAL DATA INFILE). + + Returns a MySQL packet. + """ + self.handle_unread_result() + + if not hasattr(data_file, "read"): + raise ValueError("expecting a file-like object") + + chunk_size = 131072 # 128 KB + try: + buf = data_file.read(chunk_size - 16) + while buf: + self._socket.send(buf) + buf = data_file.read(chunk_size - 16) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + if send_empty_packet: + try: + self._socket.send(b"") + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + return self._socket.recv() + + def _handle_server_status(self, flags: int) -> None: + """Handle the server flags found in MySQL packets + + This method handles the server flags send by MySQL OK and EOF + packets. It, for example, checks whether there exists more result + sets or whether there is an ongoing transaction. + """ + self._have_next_result = flag_is_set(ServerFlag.MORE_RESULTS_EXISTS, flags) + self._in_transaction = flag_is_set(ServerFlag.STATUS_IN_TRANS, flags) + + @property + def in_transaction(self) -> bool: + """MySQL session has started a transaction""" + return self._in_transaction + + def _handle_ok(self, packet: bytes) -> OkPacketType: + """Handle a MySQL OK packet + + This method handles a MySQL OK packet. When the packet is found to + be an Error packet, an error will be raised. If the packet is neither + an OK or an Error packet, InterfaceError will be raised. + + Returns a dict() + """ + if packet[4] == 0: + ok_pkt = self._protocol.parse_ok(packet) + self._handle_server_status(ok_pkt["status_flag"]) + return ok_pkt + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected OK packet") + + def _handle_eof(self, packet: bytes) -> EofPacketType: + """Handle a MySQL EOF packet + + This method handles a MySQL EOF packet. When the packet is found to + be an Error packet, an error will be raised. If the packet is neither + and OK or an Error packet, InterfaceError will be raised. + + Returns a dict() + """ + if packet[4] == 254: + eof = self._protocol.parse_eof(packet) + self._handle_server_status(eof["status_flag"]) + return eof + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected EOF packet") + + def _handle_load_data_infile(self, filename: str) -> OkPacketType: + """Handle a LOAD DATA INFILE LOCAL request""" + file_name = os.path.abspath(filename) + if os.path.islink(file_name): + raise OperationalError("Use of symbolic link is not allowed") + if not self._allow_local_infile and not self._allow_local_infile_in_path: + raise DatabaseError( + "LOAD DATA LOCAL INFILE file request rejected due to " + "restrictions on access." + ) + if not self._allow_local_infile and self._allow_local_infile_in_path: + # validate filename is inside of allow_local_infile_in_path path. + infile_path = os.path.abspath(self._allow_local_infile_in_path) + c_path = None + try: + c_path = os.path.commonpath([infile_path, file_name]) + except ValueError as err: + err_msg = ( + "{} while loading file `{}` and path `{}` given" + " in allow_local_infile_in_path" + ) + raise InterfaceError( + err_msg.format(str(err), file_name, infile_path) + ) from err + + if c_path != infile_path: + err_msg = ( + "The file `{}` is not found in the given " + "allow_local_infile_in_path {}" + ) + raise DatabaseError(err_msg.format(file_name, infile_path)) + + try: + data_file = open(file_name, "rb") # pylint: disable=consider-using-with + return self._handle_ok(self._send_data(data_file, send_empty_packet=True)) + except IOError: + # Send a empty packet to cancel the operation + try: + self._socket.send(b"") + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + raise InterfaceError(f"File '{file_name}' could not be read") from None + finally: + try: + data_file.close() + except (IOError, NameError): + pass + + def _handle_result(self, packet: bytes) -> ResultType: + """Handle a MySQL Result + + This method handles a MySQL result, for example, after sending the + query command. OK and EOF packets will be handled and returned. If + the packet is an Error packet, an Error-exception will be + raised. + + The dictionary returned of: + - columns: column information + - eof: the EOF-packet information + + Returns a dict() + """ + if not packet or len(packet) < 4: + raise InterfaceError("Empty response") + if packet[4] == 0: + return self._handle_ok(packet) + if packet[4] == 251: + filename = packet[5:].decode() + return self._handle_load_data_infile(filename) + if packet[4] == 254: + return self._handle_eof(packet) + if packet[4] == 255: + raise get_exception(packet) + + # We have a text result set + column_count = self._protocol.parse_column_count(packet) + if not column_count or not isinstance(column_count, int): + raise InterfaceError("Illegal result set") + + self._columns_desc = [ + None, + ] * column_count + for i in range(0, column_count): + self._columns_desc[i] = self._protocol.parse_column( + self._socket.recv(), self.python_charset + ) + + eof = self._handle_eof(self._socket.recv()) + self.unread_result = True + return {"columns": self._columns_desc, "eof": eof} + + def get_row( + self, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[Optional[RowType], Optional[EofPacketType]]: + """Get the next rows returned by the MySQL server + + This method gets one row from the result set after sending, for + example, the query command. The result is a tuple consisting of the + row and the EOF packet. + If no row was available in the result set, the row data will be None. + + Returns a tuple. + """ + (rows, eof) = self.get_rows(count=1, binary=binary, columns=columns, raw=raw) + if rows: + return (rows[0], eof) + return (None, eof) + + def get_rows( + self, + count: Optional[int] = None, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[List[RowType], Optional[EofPacketType]]: + """Get all rows returned by the MySQL server + + This method gets all rows returned by the MySQL server after sending, + for example, the query command. The result is a tuple consisting of + a list of rows and the EOF packet. + + Returns a tuple() + """ + if raw is None: + raw = self._raw + + if not self.unread_result: + raise InternalError("No result set available") + + rows: Tuple[List[Tuple], Optional[EofPacketType]] = ([], None) + try: + if binary: + charset = self.charset + if charset == "utf8mb4": + charset = "utf8" + rows = self._protocol.read_binary_result( + self._socket, columns, count, charset + ) + else: + rows = self._protocol.read_text_result( + self._socket, self._server_version, count=count + ) + except Error as err: + self.unread_result = False + raise err + + rows, eof_p = rows + if ( + not (binary or raw) + and self._columns_desc is not None + and rows + and hasattr(self, "converter") + ): + row_to_python = self.converter.row_to_python + rows = [row_to_python(row, self._columns_desc) for row in rows] + + if eof_p is not None: + self._handle_server_status( + eof_p["status_flag"] + if "status_flag" in eof_p + else eof_p["server_status"] + ) + self.unread_result = False + + return rows, eof_p + + def consume_results(self) -> None: + """Consume results""" + if self.unread_result: + self.get_rows() + + def cmd_init_db(self, database: str) -> OkPacketType: + """Change the current database + + This method changes the current (default) database by sending the + INIT_DB command. The result is a dictionary containing the OK packet + information. + + Returns a dict() + """ + return self._handle_ok( + self._send_cmd(ServerCmd.INIT_DB, database.encode("utf-8")) + ) + + @with_context_propagation + def cmd_query( + self, + query: StrOrBytes, + raw: bool = False, + buffered: bool = False, + raw_as_string: bool = False, + ) -> ResultType: + """Send a query to the MySQL server + + This method send the query to the MySQL server and returns the result. + + If there was a text result, a tuple will be returned consisting of + the number of columns and a list containing information about these + columns. + + When the query doesn't return a text result, the OK or EOF packet + information as dictionary will be returned. In case the result was + an error, exception Error will be raised. + + Returns a tuple() + """ + if not isinstance(query, bytearray): + if isinstance(query, str): + query = query.encode("utf-8") + query = bytearray(query) + # Prepare query attrs + charset = self.charset if self.charset != "utf8mb4" else "utf8" + packet = bytearray() + if not self._query_attrs_supported and self._query_attrs: + warnings.warn( + "This version of the server does not support Query Attributes", + category=Warning, + ) + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + names = [] + types = [] + values: List[bytes] = [] + null_bitmap = [0] * ((len(self._query_attrs) + 7) // 8) + for pos, attr_tuple in enumerate(self._query_attrs.items()): + value = attr_tuple[1] + flags = 0 + if value is None: + null_bitmap[(pos // 8)] |= 1 << (pos % 8) + types.append(int1store(FieldType.NULL) + int1store(flags)) + continue + if isinstance(value, int): + ( + packed, + field_type, + flags, + ) = self._protocol.prepare_binary_integer(value) + values.append(packed) + elif isinstance(value, str): + value = value.encode(charset) + values.append(lc_int(len(value)) + value) + field_type = FieldType.STRING + elif isinstance(value, bytes): + values.append(lc_int(len(value)) + value) + field_type = FieldType.STRING + elif isinstance(value, Decimal): + values.append( + lc_int(len(str(value).encode(charset))) + + str(value).encode(charset) + ) + field_type = FieldType.DECIMAL + elif isinstance(value, float): + values.append(struct.pack(" parameter_count Number of parameters + packet.extend(lc_int(len(self._query_attrs))) + # int parameter_set_count Number of parameter sets. + # Currently always 1 + packet.extend(lc_int(1)) + if values: + packet.extend( + b"".join([struct.pack("B", bit) for bit in null_bitmap]) + + int1store(1) + ) + for _type, name in zip(types, names): + packet.extend(_type) + packet.extend(name) + + for value in values: + packet.extend(value) + + packet.extend(query) + query = bytes(packet) + try: + result = self._handle_result(self._send_cmd(ServerCmd.QUERY, query)) + except ProgrammingError as err: + if err.errno == 3948 and "Loading local data is disabled" in err.msg: + err_msg = ( + "LOAD DATA LOCAL INFILE file request rejected due " + "to restrictions on access." + ) + raise DatabaseError(err_msg) from err + raise + if self._have_next_result: + raise InterfaceError( + "Use cmd_query_iter for statements with multiple queries." + ) + + return result + + def cmd_query_iter( + self, statements: StrOrBytes + ) -> Generator[ResultType, None, None]: + """Send one or more statements to the MySQL server + + Similar to the cmd_query method, but instead returns a generator + object to iterate through results. It sends the statements to the + MySQL server and through the iterator you can get the results. + + statement = 'SELECT 1; INSERT INTO t1 VALUES (); SELECT 2' + for result in cnx.cmd_query(statement, iterate=True): + if 'columns' in result: + columns = result['columns'] + rows = cnx.get_rows() + else: + # do something useful with INSERT result + + Returns a generator. + """ + packet = bytearray() + if not isinstance(statements, bytearray): + if isinstance(statements, str): + statements = statements.encode("utf8") + statements = bytearray(statements) + + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + # int parameter_count Number of parameters + packet.extend(lc_int(0)) + # int parameter_set_count Number of parameter sets. + # Currently always 1 + packet.extend(lc_int(1)) + + packet.extend(statements) + query = bytes(packet) + # Handle the first query result + yield self._handle_result(self._send_cmd(ServerCmd.QUERY, query)) + + # Handle next results, if any + while self._have_next_result: + self.handle_unread_result() + yield self._handle_result(self._socket.recv()) + + def cmd_refresh(self, options: int) -> OkPacketType: + """Send the Refresh command to the MySQL server + + This method sends the Refresh command to the MySQL server. The options + argument should be a bitwise value using constants.RefreshOption. + Usage example: + RefreshOption = mysql.connector.RefreshOption + refresh = RefreshOption.LOG | RefreshOption.THREADS + cnx.cmd_refresh(refresh) + """ + if not options & ( + RefreshOption.GRANT + | RefreshOption.LOG + | RefreshOption.TABLES + | RefreshOption.HOST + | RefreshOption.STATUS + | RefreshOption.REPLICA + ): + raise ValueError("Invalid command REFRESH option") + + if options & RefreshOption.GRANT: + res = self.cmd_query("FLUSH PRIVILEGES") + if options & RefreshOption.LOG: + res = self.cmd_query("FLUSH LOGS") + if options & RefreshOption.TABLES: + res = self.cmd_query("FLUSH TABLES") + if options & RefreshOption.HOST: + res = self.cmd_query("TRUNCATE TABLE performance_schema.host_cache") + if options & RefreshOption.STATUS: + res = self.cmd_query("FLUSH STATUS") + if options & RefreshOption.REPLICA: + res = self.cmd_query( + "RESET SLAVE" if self._server_version < (8, 0, 22) else "RESET REPLICA" + ) + + return res + + def cmd_quit(self) -> bytes: + """Close the current connection with the server + + This method sends the `QUIT` command to the MySQL server, closing the + current connection. Since there is no response from the MySQL server, + the packet that was sent is returned. + + Returns a str() + """ + self.handle_unread_result() + + packet = self._protocol.make_command(ServerCmd.QUIT) + self._socket.send(packet, 0, 0) + return packet + + def cmd_shutdown(self, shutdown_type: Optional[int] = None) -> None: + """Shut down the MySQL Server + + This method sends the SHUTDOWN command to the MySQL server. + The `shutdown_type` is not used, and it's kept for backward compatibility. + """ + self.cmd_query("SHUTDOWN") + + def cmd_statistics(self) -> StatsPacketType: + """Send the statistics command to the MySQL Server + + This method sends the STATISTICS command to the MySQL server. The + result is a dictionary with various statistical information. + + Returns a dict() + """ + self.handle_unread_result() + + packet = self._protocol.make_command(ServerCmd.STATISTICS) + self._socket.send(packet, 0, 0) + return self._protocol.parse_statistics(self._socket.recv()) + + def cmd_process_kill(self, mysql_pid: int) -> OkPacketType: + """Kill a MySQL process + + This method send the PROCESS_KILL command to the server along with + the process ID. The result is a dictionary with the OK packet + information. + """ + if not isinstance(mysql_pid, int): + raise ValueError("MySQL PID must be int") + return self.cmd_query(f"KILL {mysql_pid}") + + def cmd_debug(self) -> EofPacketType: + """Send the DEBUG command + + This method sends the DEBUG command to the MySQL server, which + requires the MySQL user to have SUPER privilege. The output will go + to the MySQL server error log and the result of this method is a + dictionary with EOF packet information. + + Returns a dict() + """ + return self._handle_eof(self._send_cmd(ServerCmd.DEBUG)) + + def cmd_ping(self) -> OkPacketType: + """Send the PING command + + This method sends the PING command to the MySQL server. It is used to + check if the the connection is still valid. The result of this + method is dictionary with OK packet information. + + Returns a dict() + """ + return self._handle_ok(self._send_cmd(ServerCmd.PING)) + + def cmd_change_user( + self, + username: str = "", + password: str = "", + database: str = "", + charset: int = 45, + password1: str = "", + password2: str = "", + password3: str = "", + oci_config_file: str = "", + oci_config_profile: str = "", + ) -> Optional[OkPacketType]: + """Change the current logged in user + + This method allows to change the current logged in user information. + The result is a dictionary with OK packet information. + + Returns a dict() + """ + if not isinstance(charset, int): + raise ValueError("charset must be an integer") + if charset < 0: + raise ValueError("charset should be either zero or a postive integer") + + self._mfa_nfactor = 1 + self._user = username + self._password = password + self._password1 = password1 + self._password2 = password2 + self._password3 = password3 + + if self._password1 and password != self._password1: + password = self._password1 + + self.handle_unread_result() + + if self._compress: + raise NotSupportedError("Change user is not supported with compression") + + if oci_config_file: + self._oci_config_file = oci_config_file + + self._oci_config_profile = oci_config_profile + + ok_pkt = self._authenticator.authenticate( + sock=self._socket, + handshake=self._handshake, + username=self._user, + password1=password, + password2=self._password2, + password3=self._password3, + database=database, + charset=charset, + client_flags=self._client_flags, + auth_plugin=self._auth_plugin, + auth_plugin_class=self._auth_plugin_class, + conn_attrs=self._conn_attrs, + is_change_user_request=True, + krb_service_principal=self._krb_service_principal, + oci_config_file=self._oci_config_file, + oci_config_profile=self._oci_config_profile, + webauthn_callback=self._webauthn_callback, + fido_callback=self._fido_callback, + ) + + if not (self._client_flags & ClientFlag.CONNECT_WITH_DB) and database: + self.cmd_init_db(database) + + self._charset_id = charset + self._post_connection() + + # return ok_pkt + return self._handle_ok(ok_pkt) + + @property + def database(self) -> str: + """Get the current database""" + return self.info_query("SELECT DATABASE()")[0] # type: ignore[return-value] + + @database.setter + def database(self, value: str) -> None: + """Set the current database""" + self.cmd_init_db(value) + + def is_connected(self) -> bool: + """Reports whether the connection to MySQL Server is available + + This method checks whether the connection to MySQL is available. + It is similar to ping(), but unlike the ping()-method, either True + or False is returned and no exception is raised. + + Returns True or False. + """ + try: + self.cmd_ping() + except Error: + return False # This method does not raise + return True + + def set_allow_local_infile_in_path(self, path: str) -> None: + """Set the path that user can upload files. + + Args: + path (str): Path that user can upload files. + """ + self._allow_local_infile_in_path = path + + def reset_session( + self, + user_variables: Optional[Dict[str, Any]] = None, + session_variables: Optional[Dict[str, Any]] = None, + ) -> None: + """Clears the current active session + + This method resets the session state, if the MySQL server is 5.7.3 + or later active session will be reset without re-authenticating. + For other server versions session will be reset by re-authenticating. + + It is possible to provide a sequence of variables and their values to + be set after clearing the session. This is possible for both user + defined variables and session variables. + This method takes two arguments user_variables and session_variables + which are dictionaries. + + Raises OperationalError if not connected, InternalError if there are + unread results and InterfaceError on errors. + """ + if not self.is_connected(): + raise OperationalError("MySQL Connection not available.") + + if not self.cmd_reset_connection(): + try: + self.cmd_change_user( + self._user, + self._password, + self._database, + self._charset_id, + self._password1, + self._password2, + self._password3, + self._oci_config_file, + self._oci_config_profile, + ) + except ProgrammingError: + self.reconnect() + + cur = self.cursor() + if user_variables: + for key, value in user_variables.items(): + cur.execute(f"SET @`{key}` = %s", (value,)) + if session_variables: + for key, value in session_variables.items(): + cur.execute(f"SET SESSION `{key}` = %s", (value,)) + + def ping(self, reconnect: bool = False, attempts: int = 1, delay: int = 0) -> None: + """Check availability of the MySQL server + + When reconnect is set to True, one or more attempts are made to try + to reconnect to the MySQL server using the reconnect()-method. + + delay is the number of seconds to wait between each retry. + + When the connection is not available, an InterfaceError is raised. Use + the is_connected()-method if you just want to check the connection + without raising an error. + + Raises InterfaceError on errors. + """ + try: + self.cmd_ping() + except Error as err: + if reconnect: + self.reconnect(attempts=attempts, delay=delay) + else: + raise InterfaceError("Connection to MySQL is not available") from err + + @property + def connection_id(self) -> Optional[int]: + """MySQL connection ID""" + if self._handshake: + return self._handshake.get("server_threadid") # type: ignore[return-value] + return None + + def cursor( + self, + buffered: Optional[bool] = None, + raw: Optional[bool] = None, + prepared: Optional[bool] = None, + cursor_class: Optional[Type[MySQLCursor]] = None, # type: ignore[override] + dictionary: Optional[bool] = None, + named_tuple: Optional[bool] = None, + ) -> MySQLCursor: + """Instantiates and returns a cursor + + By default, MySQLCursor is returned. Depending on the options + while connecting, a buffered and/or raw cursor is instantiated + instead. Also depending upon the cursor options, rows can be + returned as dictionary or named tuple. + + Dictionary and namedtuple based cursors are available with buffered + output but not raw. + + It is possible to also give a custom cursor through the + cursor_class parameter, but it needs to be a subclass of + mysql.connector.cursor.CursorBase. + + Raises ProgrammingError when cursor_class is not a subclass of + CursorBase. Raises ValueError when cursor is not available. + + Returns a cursor-object + """ + self.handle_unread_result() + + if not self.is_connected(): + raise OperationalError("MySQL Connection not available") + if cursor_class is not None: + if not issubclass(cursor_class, CursorBase): + raise ProgrammingError( + "Cursor class needs be to subclass of cursor.CursorBase" + ) + return (cursor_class)(self) + + buffered = buffered if buffered is not None else self._buffered + raw = raw if raw is not None else self._raw + + cursor_type = 0 + if buffered is True: + cursor_type |= 1 + if raw is True: + cursor_type |= 2 + if dictionary is True: + cursor_type |= 4 + if named_tuple is True: + cursor_type |= 8 + if prepared is True: + cursor_type |= 16 + + types = { + 0: MySQLCursor, # 0 + 1: MySQLCursorBuffered, + 2: MySQLCursorRaw, + 3: MySQLCursorBufferedRaw, + 4: MySQLCursorDict, + 5: MySQLCursorBufferedDict, + 8: MySQLCursorNamedTuple, + 9: MySQLCursorBufferedNamedTuple, + 16: MySQLCursorPrepared, + 18: MySQLCursorPreparedRaw, + 20: MySQLCursorPreparedDict, + 24: MySQLCursorPreparedNamedTuple, + } + try: + return (types[cursor_type])(self) + except KeyError: + args = ("buffered", "raw", "dictionary", "named_tuple", "prepared") + raise ValueError( + "Cursor not available with given criteria: " + + ", ".join([args[i] for i in range(5) if cursor_type & (1 << i) != 0]) + ) from None + + def commit(self) -> None: + """Commit current transaction""" + self._execute_query("COMMIT") + + def rollback(self) -> None: + """Rollback current transaction""" + if self.unread_result: + self.get_rows() + + self._execute_query("ROLLBACK") + + def _execute_query(self, query: str) -> None: + """Execute a query + + This method simply calls cmd_query() after checking for unread + result. If there are still unread result, an InterfaceError + is raised. Otherwise whatever cmd_query() returns is returned. + + Returns a dict() + """ + self.handle_unread_result() + self.cmd_query(query) + + def info_query(self, query: str) -> Optional[RowType]: + """Send a query which only returns 1 row""" + cursor = self.cursor(buffered=True) + cursor.execute(query) + return cursor.fetchone() + + def _handle_binary_ok(self, packet: bytes) -> Dict[str, int]: + """Handle a MySQL Binary Protocol OK packet + + This method handles a MySQL Binary Protocol OK packet. When the + packet is found to be an Error packet, an error will be raised. If + the packet is neither an OK or an Error packet, InterfaceError + will be raised. + + Returns a dict() + """ + if packet[4] == 0: + return self._protocol.parse_binary_prepare_ok(packet) + if packet[4] == 255: + raise get_exception(packet) + raise InterfaceError("Expected Binary OK packet") + + def _handle_binary_result( + self, packet: bytes + ) -> Union[OkPacketType, Tuple[int, List[DescriptionType], EofPacketType]]: + """Handle a MySQL Result + + This method handles a MySQL result, for example, after sending the + query command. OK and EOF packets will be handled and returned. If + the packet is an Error packet, an Error exception will be raised. + + The tuple returned by this method consist of: + - the number of columns in the result, + - a list of tuples with information about the columns, + - the EOF packet information as a dictionary. + + Returns tuple() or dict() + """ + if not packet or len(packet) < 4: + raise InterfaceError("Empty response") + if packet[4] == 0: + return self._handle_ok(packet) + if packet[4] == 254: + return self._handle_eof(packet) + if packet[4] == 255: + raise get_exception(packet) + + # We have a binary result set + column_count = self._protocol.parse_column_count(packet) + if not column_count or not isinstance(column_count, int): + raise InterfaceError("Illegal result set.") + + columns: List[DescriptionType] = [None] * column_count + for i in range(0, column_count): + columns[i] = self._protocol.parse_column( + self._socket.recv(), self.python_charset + ) + + eof = self._handle_eof(self._socket.recv()) + return (column_count, columns, eof) + + def cmd_stmt_fetch(self, statement_id: int, rows: int = 1) -> None: + """Fetch a MySQL statement Result Set + + This method will send the FETCH command to MySQL together with the + given statement id and the number of rows to fetch. + """ + packet = self._protocol.make_stmt_fetch(statement_id, rows) + self.unread_result = False + self._send_cmd(ServerCmd.STMT_FETCH, packet, expect_response=False) + self.unread_result = True + + def cmd_stmt_prepare( + self, statement: bytes + ) -> Mapping[str, Union[int, List[DescriptionType]]]: + """Prepare a MySQL statement + + This method will send the PREPARE command to MySQL together with the + given statement. + + Returns a dict() + """ + packet = self._send_cmd(ServerCmd.STMT_PREPARE, statement) + result = self._handle_binary_ok(packet) + + result["columns"] = [] + result["parameters"] = [] + if result["num_params"] > 0: + for _ in range(0, result["num_params"]): + result["parameters"].append( + self._protocol.parse_column( + self._socket.recv(), self.python_charset + ) + ) + self._handle_eof(self._socket.recv()) + if result["num_columns"] > 0: + for _ in range(0, result["num_columns"]): + result["columns"].append( + self._protocol.parse_column( + self._socket.recv(), self.python_charset + ) + ) + self._handle_eof(self._socket.recv()) + + return result + + @with_context_propagation + def cmd_stmt_execute( + self, + statement_id: int, + data: Sequence[BinaryProtocolType] = (), + parameters: Sequence = (), + flags: int = 0, + ) -> Union[OkPacketType, Tuple[int, List[DescriptionType], EofPacketType]]: + """Execute a prepared MySQL statement""" + parameters = list(parameters) + long_data_used = {} + + if data: + for param_id, _ in enumerate(parameters): + if isinstance(data[param_id], IOBase): + binary = True + try: + binary = "b" not in data[param_id].mode # type: ignore[union-attr] + except AttributeError: + pass + self.cmd_stmt_send_long_data(statement_id, param_id, data[param_id]) + long_data_used[param_id] = (binary,) + if not self._query_attrs_supported and self._query_attrs: + warnings.warn( + "This version of the server does not support Query Attributes", + category=Warning, + ) + if self._client_flags & ClientFlag.CLIENT_QUERY_ATTRIBUTES: + execute_packet = self._protocol.make_stmt_execute( + statement_id, + data, + tuple(parameters), + flags, + long_data_used, + self.charset, + self.query_attrs, + self._converter_str_fallback, + ) + else: + execute_packet = self._protocol.make_stmt_execute( + statement_id, + data, + tuple(parameters), + flags, + long_data_used, + self.charset, + converter_str_fallback=self._converter_str_fallback, + ) + packet = self._send_cmd(ServerCmd.STMT_EXECUTE, packet=execute_packet) + result = self._handle_binary_result(packet) + return result + + def cmd_stmt_close(self, statement_id: int) -> None: # type: ignore[override] + """Deallocate a prepared MySQL statement + + This method deallocates the prepared statement using the + statement_id. Note that the MySQL server does not return + anything. + """ + self._send_cmd( + ServerCmd.STMT_CLOSE, + int4store(statement_id), + expect_response=False, + ) + + def cmd_stmt_send_long_data( + self, statement_id: int, param_id: int, data: BinaryIO # type: ignore[override] + ) -> int: + """Send data for a column + + This methods send data for a column (for example BLOB) for statement + identified by statement_id. The param_id indicate which parameter + the data belongs too. + The data argument should be a file-like object. + + Since MySQL does not send anything back, no error is raised. When + the MySQL server is not reachable, an OperationalError is raised. + + cmd_stmt_send_long_data should be called before cmd_stmt_execute. + + The total bytes send is returned. + + Returns int. + """ + chunk_size = 131072 # 128 KB + total_sent = 0 + try: + buf = data.read(chunk_size) + while buf: + packet = self._protocol.prepare_stmt_send_long_data( + statement_id, param_id, buf + ) + self._send_cmd( + ServerCmd.STMT_SEND_LONG_DATA, + packet=packet, + expect_response=False, + ) + total_sent += len(buf) + buf = data.read(chunk_size) + except AttributeError as err: + raise OperationalError("MySQL Connection not available") from err + + return total_sent + + def cmd_stmt_reset(self, statement_id: int) -> None: # type: ignore[override] + """Reset data for prepared statement sent as long data + + The result is a dictionary with OK packet information. + + Returns a dict() + """ + self._handle_ok(self._send_cmd(ServerCmd.STMT_RESET, int4store(statement_id))) + + def cmd_reset_connection(self) -> bool: + """Resets the session state without re-authenticating + + Reset command only works on MySQL server 5.7.3 or later. + The result is True for a successful reset otherwise False. + + Returns bool + """ + try: + self._handle_ok(self._send_cmd(ServerCmd.RESET_CONNECTION)) + self._post_connection() + return True + except (NotSupportedError, OperationalError): + return False + + def handle_unread_result(self) -> None: + """Check whether there is an unread result""" + if self.can_consume_results: + self.consume_results() + elif self.unread_result: + raise InternalError("Unread result found") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection_cext.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection_cext.py new file mode 100644 index 0000000..3fdd0ac --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/connection_cext.py @@ -0,0 +1,1039 @@ +# Copyright (c) 2014, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="arg-type" + +"""Connection class using the C Extension.""" + +import os +import platform +import socket +import sys + +from typing import ( + Any, + BinaryIO, + Dict, + List, + NoReturn, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +from . import version +from .abstracts import CMySQLPrepStmt, MySQLConnectionAbstract +from .constants import ClientFlag, FieldFlag, ServerFlag, ShutdownType +from .conversion import MySQLConverter +from .errors import ( + InterfaceError, + InternalError, + OperationalError, + ProgrammingError, + get_mysql_exception, +) +from .protocol import MySQLProtocol +from .types import ( + CextEofPacketType, + CextResultType, + DescriptionType, + ParamsSequenceOrDictType, + RowType, + StatsPacketType, + StrOrBytes, +) +from .utils import import_object + +HAVE_CMYSQL = False + +try: + import _mysql_connector + + from _mysql_connector import MySQLInterfaceError + + from .cursor_cext import ( + CMySQLCursor, + CMySQLCursorBuffered, + CMySQLCursorBufferedDict, + CMySQLCursorBufferedNamedTuple, + CMySQLCursorBufferedRaw, + CMySQLCursorDict, + CMySQLCursorNamedTuple, + CMySQLCursorPrepared, + CMySQLCursorPreparedDict, + CMySQLCursorPreparedNamedTuple, + CMySQLCursorPreparedRaw, + CMySQLCursorRaw, + ) + + HAVE_CMYSQL = True +except ImportError as exc: + raise ImportError( + f"MySQL Connector/Python C Extension not available ({exc})" + ) from exc + +from .opentelemetry.constants import OTEL_ENABLED +from .opentelemetry.context_propagation import with_context_propagation + +if OTEL_ENABLED: + from .opentelemetry.instrumentation import end_span, record_exception_event + + +class CMySQLConnection(MySQLConnectionAbstract): + """Class initiating a MySQL Connection using Connector/C.""" + + def __init__(self, **kwargs: Any) -> None: + """Initialization""" + if not HAVE_CMYSQL: + raise RuntimeError("MySQL Connector/Python C Extension not available") + self._cmysql: Optional[ + _mysql_connector.MySQL # pylint: disable=c-extension-no-member + ] = None + self._columns: List[DescriptionType] = [] + self._plugin_dir: str = os.path.join( + os.path.dirname(os.path.abspath(_mysql_connector.__file__)), + "mysql", + "vendor", + "plugin", + ) + if platform.system() == "Linux": + # Use the authentication plugins from system if they aren't bundled + if not os.path.exists(self._plugin_dir): + self._plugin_dir = ( + "/usr/lib64/mysql/plugin" + if os.path.exists("/usr/lib64/mysql/plugin") + else "/usr/lib/mysql/plugin" + ) + + self.converter: Optional[MySQLConverter] = None + super().__init__() + + if kwargs: + try: + self.connect(**kwargs) + except Exception: + self.close() + raise + + def _add_default_conn_attrs(self) -> None: + """Add default connection attributes""" + license_chunks = version.LICENSE.split(" ") + if license_chunks[0] == "GPLv2": + client_license = "GPL-2.0" + else: + client_license = "Commercial" + + self._conn_attrs.update( + { + "_connector_name": "mysql-connector-python", + "_connector_license": client_license, + "_connector_version": ".".join([str(x) for x in version.VERSION[0:3]]), + "_source_host": socket.gethostname(), + } + ) + + def _do_handshake(self) -> None: + """Gather information of the MySQL server before authentication""" + self._handshake = { + "protocol": self._cmysql.get_proto_info(), + "server_version_original": self._cmysql.get_server_info(), + "server_threadid": self._cmysql.thread_id(), + "charset": None, + "server_status": None, + "auth_plugin": None, + "auth_data": None, + "capabilities": self._cmysql.st_server_capabilities(), + } + + self._server_version = self._check_server_version( + self._handshake["server_version_original"] + ) + self._character_set.set_mysql_version(self._server_version) + + @property + def _server_status(self) -> int: + """Returns the server status attribute of MYSQL structure""" + return self._cmysql.st_server_status() + + def set_allow_local_infile_in_path(self, path: str) -> None: + """set local_infile_in_path + + Set allow_local_infile_in_path. + """ + + if self._cmysql: + self._cmysql.set_load_data_local_infile_option(path) + + def set_unicode(self, value: bool = True) -> None: + """Toggle unicode mode + + Set whether we return string fields as unicode or not. + Default is True. + """ + self._use_unicode = value + if self._cmysql: + self._cmysql.use_unicode(value) + if self.converter: + self.converter.set_unicode(value) + + @property + def autocommit(self) -> bool: + """Get whether autocommit is on or off""" + value = self.info_query("SELECT @@session.autocommit")[0] + return value == 1 + + @autocommit.setter + def autocommit(self, value: bool) -> None: + """Toggle autocommit""" + try: + self._cmysql.autocommit(value) + self._autocommit = value + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + @property + def database(self) -> str: + """Get the current database""" + return self.info_query("SELECT DATABASE()")[0] # type: ignore[return-value] + + @database.setter + def database(self, value: str) -> None: + """Set the current database""" + try: + self._cmysql.select_db(value) + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + @property + def in_transaction(self) -> bool: + """MySQL session has started a transaction""" + return bool(self._server_status & ServerFlag.STATUS_IN_TRANS) + + def _open_connection(self) -> None: + charset_name = self._character_set.get_info(self._charset_id)[0] + # pylint: disable=c-extension-no-member + self._cmysql = _mysql_connector.MySQL( + buffered=self._buffered, + raw=self._raw, + charset_name=charset_name, + connection_timeout=(self._connection_timeout or 0), + use_unicode=self._use_unicode, + auth_plugin=self._auth_plugin, + plugin_dir=self._plugin_dir, + ) + # pylint: enable=c-extension-no-member + if not self.isset_client_flag(ClientFlag.CONNECT_ARGS): + self._conn_attrs = {} + fido_callback = self._webauthn_callback or self._fido_callback + cnx_kwargs = { + "host": self._host, + "user": self._user, + "password": self._password, + "password1": self._password1, + "password2": self._password2, + "password3": self._password3, + "database": self._database, + "port": self._port, + "client_flags": self._client_flags, + "unix_socket": self._unix_socket, + "compress": self._compress, + "ssl_disabled": True, + "conn_attrs": self._conn_attrs, + "local_infile": self._allow_local_infile, + "load_data_local_dir": self._allow_local_infile_in_path, + "oci_config_file": self._oci_config_file, + "oci_config_profile": self._oci_config_profile, + "fido_callback": ( + import_object(fido_callback) + if isinstance(fido_callback, str) + else fido_callback + ), + } + + tls_versions = self._ssl.get("tls_versions") + if tls_versions is not None: + tls_versions.sort(reverse=True) # type: ignore[union-attr] + tls_versions = ",".join(tls_versions) + if self._ssl.get("tls_ciphersuites") is not None: + ssl_ciphersuites = self._ssl.get("tls_ciphersuites")[ # type: ignore[index] + 0 + ] + tls_ciphersuites = self._ssl.get("tls_ciphersuites")[ # type: ignore[index] + 1 + ] + else: + ssl_ciphersuites = None + tls_ciphersuites = None + if ( + tls_versions is not None + and "TLSv1.3" in tls_versions + and not tls_ciphersuites + ): + tls_ciphersuites = "TLS_AES_256_GCM_SHA384" + if not self._ssl_disabled: + cnx_kwargs.update( + { + "ssl_ca": self._ssl.get("ca"), + "ssl_cert": self._ssl.get("cert"), + "ssl_key": self._ssl.get("key"), + "ssl_cipher_suites": ssl_ciphersuites, + "tls_versions": tls_versions, + "tls_cipher_suites": tls_ciphersuites, + "ssl_verify_cert": self._ssl.get("verify_cert") or False, + "ssl_verify_identity": self._ssl.get("verify_identity") or False, + "ssl_disabled": self._ssl_disabled, + } + ) + + if os.name == "nt" and self._auth_plugin_class == "MySQLKerberosAuthPlugin": + cnx_kwargs["use_kerberos_gssapi"] = True + + try: + self._cmysql.connect(**cnx_kwargs) + self._cmysql.converter_str_fallback = self._converter_str_fallback + if self.converter: + self.converter.str_fallback = self._converter_str_fallback + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + self._do_handshake() + + def close(self) -> None: + """Disconnect from the MySQL server""" + if self._span and self._span.is_recording(): + record_exception_event(self._span, sys.exc_info()[1]) + + if not self._cmysql: + return + + try: + self.free_result() + self._cmysql.close() + except MySQLInterfaceError as err: + if OTEL_ENABLED: + record_exception_event(self._span, err) + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + finally: + if OTEL_ENABLED: + end_span(self._span) + + disconnect = close + + def is_closed(self) -> bool: + """Return True if the connection to MySQL Server is closed.""" + return not self._cmysql.connected() + + def is_connected(self) -> bool: + """Reports whether the connection to MySQL Server is available""" + if self._cmysql: + self.handle_unread_result() + return self._cmysql.ping() + + return False + + def ping(self, reconnect: bool = False, attempts: int = 1, delay: int = 0) -> None: + """Check availability of the MySQL server + + When reconnect is set to True, one or more attempts are made to try + to reconnect to the MySQL server using the reconnect()-method. + + delay is the number of seconds to wait between each retry. + + When the connection is not available, an InterfaceError is raised. Use + the is_connected()-method if you just want to check the connection + without raising an error. + + Raises InterfaceError on errors. + """ + self.handle_unread_result() + + try: + connected = self._cmysql.ping() + except AttributeError: + pass # Raise or reconnect later + else: + if connected: + return + + if reconnect: + self.reconnect(attempts=attempts, delay=delay) + else: + raise InterfaceError("Connection to MySQL is not available") + + def set_character_set_name(self, charset: str) -> None: + """Sets the default character set name for current connection.""" + self._cmysql.set_character_set(charset) + + def info_query(self, query: StrOrBytes) -> Optional[RowType]: + """Send a query which only returns 1 row""" + first_row = () + try: + self._cmysql.query(query) + if self._cmysql.have_result_set: + first_row = self._cmysql.fetch_row() + if self._cmysql.fetch_row(): + self._cmysql.free_result() + raise InterfaceError("Query should not return more than 1 row") + self._cmysql.free_result() + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + return first_row + + @property + def connection_id(self) -> Optional[int]: + """MySQL connection ID""" + try: + return self._cmysql.thread_id() + except MySQLInterfaceError: + pass # Just return None + + return None + + def get_rows( + self, + count: Optional[int] = None, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[List[RowType], Optional[CextEofPacketType]]: + """Get all or a subset of rows returned by the MySQL server""" + unread_result = prep_stmt.have_result_set if prep_stmt else self.unread_result + if not (self._cmysql and unread_result): + raise InternalError("No result set available") + + if raw is None: + raw = self._raw + + rows: List[Tuple] = [] + if count is not None and count <= 0: + raise AttributeError("count should be 1 or higher, or None") + + counter = 0 + try: + fetch_row = prep_stmt.fetch_row if prep_stmt else self._cmysql.fetch_row + if self.converter: + # When using a converter class, the C extension should not + # convert the values. This can be accomplished by setting + # the raw option to True. + self._cmysql.raw(True) + row = fetch_row() + while row: + if not self._raw and self.converter: + row = list(row) + for i, _ in enumerate(row): + if not raw: + row[i] = self.converter.to_python(self._columns[i], row[i]) + row = tuple(row) + rows.append(row) + counter += 1 + if count and counter == count: + break + row = fetch_row() + if not row: + _eof: Optional[CextEofPacketType] = self.fetch_eof_columns(prep_stmt)[ + "eof" + ] # type: ignore[assignment] + if prep_stmt: + prep_stmt.free_result() + self._unread_result = False + else: + self.free_result() + else: + _eof = None + except MySQLInterfaceError as err: + if prep_stmt: + prep_stmt.free_result() + raise InterfaceError(str(err)) from err + self.free_result() + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + return rows, _eof + + def get_row( + self, + binary: bool = False, + columns: Optional[List[DescriptionType]] = None, + raw: Optional[bool] = None, + prep_stmt: Optional[CMySQLPrepStmt] = None, + ) -> Tuple[Optional[RowType], Optional[CextEofPacketType]]: + """Get the next rows returned by the MySQL server""" + try: + rows, eof = self.get_rows( + count=1, + binary=binary, + columns=columns, + raw=raw, + prep_stmt=prep_stmt, + ) + if rows: + return (rows[0], eof) + return (None, eof) + except IndexError: + # No row available + return (None, None) + + def next_result(self) -> Optional[bool]: + """Reads the next result""" + if self._cmysql: + self._cmysql.consume_result() + return self._cmysql.next_result() + return None + + def free_result(self) -> None: + """Frees the result""" + if self._cmysql: + self._cmysql.free_result() + + def commit(self) -> None: + """Commit current transaction""" + if self._cmysql: + self.handle_unread_result() + self._cmysql.commit() + + def rollback(self) -> None: + """Rollback current transaction""" + if self._cmysql: + self._cmysql.consume_result() + self._cmysql.rollback() + + def cmd_init_db(self, database: str) -> None: + """Change the current database""" + try: + self._cmysql.select_db(database) + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + def fetch_eof_columns( + self, prep_stmt: Optional[CMySQLPrepStmt] = None + ) -> CextResultType: + """Fetch EOF and column information""" + have_result_set = ( + prep_stmt.have_result_set if prep_stmt else self._cmysql.have_result_set + ) + if not have_result_set: + raise InterfaceError("No result set") + + fields = prep_stmt.fetch_fields() if prep_stmt else self._cmysql.fetch_fields() + self._columns = [] + for col in fields: + self._columns.append( + ( + col[4], + int(col[8]), + None, + None, + None, + None, + ~int(col[9]) & FieldFlag.NOT_NULL, + int(col[9]), + int(col[6]), + ) + ) + + return { + "eof": { + "status_flag": self._server_status, + "warning_count": self._cmysql.st_warning_count(), + }, + "columns": self._columns, + } + + def fetch_eof_status(self) -> Optional[CextEofPacketType]: + """Fetch EOF and status information""" + if self._cmysql: + return { + "warning_count": self._cmysql.st_warning_count(), + "field_count": self._cmysql.st_field_count(), + "insert_id": self._cmysql.insert_id(), + "affected_rows": self._cmysql.affected_rows(), + "server_status": self._server_status, + } + + return None + + def cmd_stmt_prepare(self, statement: bytes) -> CMySQLPrepStmt: + """Prepares the SQL statement""" + if not self._cmysql: + raise OperationalError("MySQL Connection not available") + + try: + stmt = self._cmysql.stmt_prepare(statement) + stmt.converter_str_fallback = self._converter_str_fallback + return CMySQLPrepStmt(stmt) + except MySQLInterfaceError as err: + raise InterfaceError(str(err)) from err + + @with_context_propagation + def cmd_stmt_execute( + self, statement_id: CMySQLPrepStmt, *args: Any + ) -> Optional[Union[CextEofPacketType, CextResultType]]: + """Executes the prepared statement""" + try: + statement_id.stmt_execute(*args, query_attrs=self.query_attrs) + except MySQLInterfaceError as err: + raise InterfaceError(str(err)) from err + + self._columns = [] + if not statement_id.have_result_set: + # No result + self._unread_result = False + return self.fetch_eof_status() + + self._unread_result = True + return self.fetch_eof_columns(statement_id) + + def cmd_stmt_close( + self, + statement_id: CMySQLPrepStmt, # type: ignore[override] + ) -> None: + """Closes the prepared statement""" + if self._unread_result: + raise InternalError("Unread result found") + statement_id.stmt_close() + + def cmd_stmt_reset( + self, + statement_id: CMySQLPrepStmt, # type: ignore[override] + ) -> None: + """Resets the prepared statement""" + if self._unread_result: + raise InternalError("Unread result found") + statement_id.stmt_reset() + + @with_context_propagation + def cmd_query( + self, + query: StrOrBytes, + raw: Optional[bool] = None, + buffered: bool = False, + raw_as_string: bool = False, + ) -> Optional[Union[CextEofPacketType, CextResultType]]: + """Send a query to the MySQL server""" + self.handle_unread_result() + if raw is None: + raw = self._raw + try: + if not isinstance(query, bytes): + query = query.encode("utf-8") + self._cmysql.query( + query, + raw=raw, + buffered=buffered, + raw_as_string=raw_as_string, + query_attrs=self.query_attrs, + ) + except MySQLInterfaceError as err: + raise get_mysql_exception( + err.errno, msg=err.msg, sqlstate=err.sqlstate + ) from err + except AttributeError as err: + addr = ( + self._unix_socket if self._unix_socket else f"{self._host}:{self._port}" + ) + raise OperationalError( + errno=2055, values=(addr, "Connection not available.") + ) from err + + self._columns = [] + if not self._cmysql.have_result_set: + # No result + return self.fetch_eof_status() + + return self.fetch_eof_columns() + + _execute_query = cmd_query + + def cursor( + self, + buffered: Optional[bool] = None, + raw: Optional[bool] = None, + prepared: Optional[bool] = None, + cursor_class: Optional[Type[CMySQLCursor]] = None, # type: ignore[override] + dictionary: Optional[bool] = None, + named_tuple: Optional[bool] = None, + ) -> CMySQLCursor: + """Instantiates and returns a cursor using C Extension + + By default, CMySQLCursor is returned. Depending on the options + while connecting, a buffered and/or raw cursor is instantiated + instead. Also depending upon the cursor options, rows can be + returned as dictionary or named tuple. + + Dictionary and namedtuple based cursors are available with buffered + output but not raw. + + It is possible to also give a custom cursor through the + cursor_class parameter, but it needs to be a subclass of + mysql.connector.cursor_cext.CMySQLCursor. + + Raises ProgrammingError when cursor_class is not a subclass of + CursorBase. Raises ValueError when cursor is not available. + + Returns instance of CMySQLCursor or subclass. + + :param buffered: Return a buffering cursor + :param raw: Return a raw cursor + :param prepared: Return a cursor which uses prepared statements + :param cursor_class: Use a custom cursor class + :param dictionary: Rows are returned as dictionary + :param named_tuple: Rows are returned as named tuple + :return: Subclass of CMySQLCursor + :rtype: CMySQLCursor or subclass + """ + self.handle_unread_result(prepared) + if not self.is_connected(): + raise OperationalError("MySQL Connection not available.") + if cursor_class is not None: + if not issubclass(cursor_class, CMySQLCursor): + raise ProgrammingError( + "Cursor class needs be to subclass of cursor_cext.CMySQLCursor" + ) + return (cursor_class)(self) + + buffered = buffered or self._buffered + raw = raw or self._raw + + cursor_type = 0 + if buffered is True: + cursor_type |= 1 + if raw is True: + cursor_type |= 2 + if dictionary is True: + cursor_type |= 4 + if named_tuple is True: + cursor_type |= 8 + if prepared is True: + cursor_type |= 16 + + types = { + 0: CMySQLCursor, # 0 + 1: CMySQLCursorBuffered, + 2: CMySQLCursorRaw, + 3: CMySQLCursorBufferedRaw, + 4: CMySQLCursorDict, + 5: CMySQLCursorBufferedDict, + 8: CMySQLCursorNamedTuple, + 9: CMySQLCursorBufferedNamedTuple, + 16: CMySQLCursorPrepared, + 18: CMySQLCursorPreparedRaw, + 20: CMySQLCursorPreparedDict, + 24: CMySQLCursorPreparedNamedTuple, + } + try: + return (types[cursor_type])(self) + except KeyError: + args = ("buffered", "raw", "dictionary", "named_tuple", "prepared") + raise ValueError( + "Cursor not available with given criteria: " + + ", ".join([args[i] for i in range(5) if cursor_type & (1 << i) != 0]) + ) from None + + @property + def num_rows(self) -> int: + """Returns number of rows of current result set""" + if not self._cmysql.have_result_set: + raise InterfaceError("No result set") + + return self._cmysql.num_rows() + + @property + def warning_count(self) -> int: + """Returns number of warnings""" + if not self._cmysql: + return 0 + + return self._cmysql.warning_count() + + @property + def result_set_available(self) -> bool: + """Check if a result set is available""" + if not self._cmysql: + return False + + return self._cmysql.have_result_set + + @property # type: ignore[misc] + def unread_result(self) -> bool: + """Check if there are unread results or rows""" + return self.result_set_available + + @property + def more_results(self) -> bool: + """Check if there are more results""" + return self._cmysql.more_results() + + def prepare_for_mysql( + self, params: ParamsSequenceOrDictType + ) -> Union[Sequence[bytes], Dict[str, bytes]]: + """Prepare parameters for statements + + This method is use by cursors to prepared parameters found in the + list (or tuple) params. + + Returns dict. + """ + result: Union[List[Any], Dict[str, Any]] = [] + if isinstance(params, (list, tuple)): + if self.converter: + result = [ + self.converter.quote( + self.converter.escape( + self.converter.to_mysql(value), self._sql_mode + ) + ) + for value in params + ] + else: + result = self._cmysql.convert_to_mysql(*params) + elif isinstance(params, dict): + result = {} + if self.converter: + for key, value in params.items(): + result[key] = self.converter.quote( + self.converter.escape( + self.converter.to_mysql(value), self._sql_mode + ) + ) + else: + for key, value in params.items(): + result[key] = self._cmysql.convert_to_mysql(value)[0] + else: + raise ProgrammingError( + f"Could not process parameters: {type(params).__name__}({params})," + " it must be of type list, tuple or dict" + ) + + return result + + def consume_results(self) -> None: + """Consume the current result + + This method consume the result by reading (consuming) all rows. + """ + self._cmysql.consume_result() + + def cmd_change_user( + self, + username: str = "", + password: str = "", + database: str = "", + charset: int = 45, + password1: str = "", + password2: str = "", + password3: str = "", + oci_config_file: Optional[str] = None, + oci_config_profile: Optional[str] = None, + ) -> None: + """Change the current logged in user""" + try: + self._cmysql.change_user( + username, + password, + database, + password1, + password2, + password3, + oci_config_file, + oci_config_profile, + ) + + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + self._charset_id = charset + self._user = username # updating user accordingly + self._post_connection() + + def cmd_reset_connection(self) -> bool: + """Resets the session state without re-authenticating + + Reset command only works on MySQL server 5.7.3 or later. + The result is True for a successful reset otherwise False. + + Returns bool + """ + res = self._cmysql.reset_connection() + if res: + self._post_connection() + return res + + def cmd_refresh(self, options: int) -> Optional[CextEofPacketType]: + """Send the Refresh command to the MySQL server""" + try: + self.handle_unread_result() + self._cmysql.refresh(options) + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + return self.fetch_eof_status() + + def cmd_quit(self) -> None: + """Close the current connection with the server""" + self.close() + + def cmd_shutdown(self, shutdown_type: Optional[int] = None) -> None: + """Shut down the MySQL Server + + This method sends the SHUTDOWN command to the MySQL server. + The `shutdown_type` is not used, and it's kept for backward compatibility. + """ + if not self._cmysql: + raise OperationalError("MySQL Connection not available") + + if shutdown_type: + if not ShutdownType.get_info(shutdown_type): + raise InterfaceError("Invalid shutdown type") + level = shutdown_type + else: + level = ShutdownType.SHUTDOWN_DEFAULT + + try: + self._cmysql.shutdown(level) + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + self.close() + + def cmd_statistics(self) -> StatsPacketType: + """Return statistics from the MySQL server""" + self.handle_unread_result() + + try: + stat = self._cmysql.stat() + return MySQLProtocol().parse_statistics(stat, with_header=False) + except (MySQLInterfaceError, InterfaceError) as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + def cmd_process_kill(self, mysql_pid: int) -> None: + """Kill a MySQL process""" + if not isinstance(mysql_pid, int): + raise ValueError("MySQL PID must be int") + self.cmd_query(f"KILL {mysql_pid}") + + def cmd_debug(self) -> NoReturn: + """Send the DEBUG command""" + raise NotImplementedError + + def cmd_ping(self) -> NoReturn: + """Send the PING command""" + raise NotImplementedError + + def cmd_query_iter(self, statements: str) -> NoReturn: + """Send one or more statements to the MySQL server""" + raise NotImplementedError + + def cmd_stmt_send_long_data( + self, + statement_id: CMySQLPrepStmt, # type: ignore[override] + param_id: int, + data: BinaryIO, + ) -> NoReturn: + """Send data for a column""" + raise NotImplementedError + + def handle_unread_result(self, prepared: bool = False) -> None: + """Check whether there is an unread result""" + unread_result = self._unread_result if prepared is True else self.unread_result + if self.can_consume_results: + self.consume_results() + elif unread_result: + raise InternalError("Unread result found") + + def reset_session( + self, + user_variables: Optional[Dict[str, Any]] = None, + session_variables: Optional[Dict[str, Any]] = None, + ) -> None: + """Clears the current active session + + This method resets the session state, if the MySQL server is 5.7.3 + or later active session will be reset without re-authenticating. + For other server versions session will be reset by re-authenticating. + + It is possible to provide a sequence of variables and their values to + be set after clearing the session. This is possible for both user + defined variables and session variables. + This method takes two arguments user_variables and session_variables + which are dictionaries. + + Raises OperationalError if not connected, InternalError if there are + unread results and InterfaceError on errors. + """ + if not self.is_connected(): + raise OperationalError("MySQL Connection not available.") + + if not self.cmd_reset_connection(): + try: + self.cmd_change_user( + self._user, + self._password, + self._database, + self._charset_id, + self._password1, + self._password2, + self._password3, + self._oci_config_file, + self._oci_config_profile, + ) + except ProgrammingError: + self.reconnect() + + if user_variables or session_variables: + cur = self.cursor() + if user_variables: + for key, value in user_variables.items(): + cur.execute(f"SET @`{key}` = %s", (value,)) + if session_variables: + for key, value in session_variables.items(): + cur.execute(f"SET SESSION `{key}` = %s", (value,)) + cur.close() diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/constants.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/constants.py new file mode 100644 index 0000000..7a3feb0 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/constants.py @@ -0,0 +1,1189 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Various MySQL constants and character sets.""" + +import warnings + +from abc import ABC, ABCMeta +from typing import Any, Dict, List, Optional, Sequence, Tuple, Union, ValuesView + +from .charsets import MYSQL_CHARACTER_SETS, MYSQL_CHARACTER_SETS_57 +from .errors import ProgrammingError + +NET_BUFFER_LENGTH: int = 8192 +MAX_MYSQL_TABLE_COLUMNS: int = 4096 +# Flag used to send the Query Attributes with 0 (or more) parameters. +PARAMETER_COUNT_AVAILABLE: int = 8 + +DEFAULT_CONFIGURATION: Dict[str, Optional[Union[str, bool, int]]] = { + "database": None, + "user": "", + "password": "", + "password1": "", + "password2": "", + "password3": "", + "host": "127.0.0.1", + "port": 3306, + "unix_socket": None, + "use_unicode": True, + "charset": "utf8mb4", + "collation": None, + "converter_class": None, + "converter_str_fallback": False, + "autocommit": False, + "time_zone": None, + "sql_mode": None, + "get_warnings": False, + "raise_on_warnings": False, + "connection_timeout": None, + "client_flags": 0, + "compress": False, + "buffered": False, + "raw": False, + "ssl_ca": None, + "ssl_cert": None, + "ssl_key": None, + "ssl_verify_cert": False, + "ssl_verify_identity": False, + "ssl_cipher": None, + "tls_ciphersuites": None, + "ssl_disabled": False, + "tls_versions": None, + "passwd": None, + "db": None, + "connect_timeout": None, + "dsn": None, + "force_ipv6": False, + "auth_plugin": None, + "allow_local_infile": False, + "allow_local_infile_in_path": None, + "consume_results": False, + "conn_attrs": None, + "dns_srv": False, + "use_pure": False, + "krb_service_principal": None, + "oci_config_file": None, + "oci_config_profile": None, + "fido_callback": None, + "webauthn_callback": None, + "kerberos_auth_mode": None, + "init_command": None, +} + +CNX_POOL_ARGS: Tuple[str, str, str] = ("pool_name", "pool_size", "pool_reset_session") + +TLS_VERSIONS: List[str] = ["TLSv1.2", "TLSv1.3"] + +DEPRECATED_TLS_VERSIONS: List[str] = ["TLSv1", "TLSv1.1"] + + +def flag_is_set(flag: int, flags: int) -> bool: + """Checks if the flag is set + + Returns boolean""" + if (flags & flag) > 0: + return True + return False + + +def _obsolete_option(name: str, new_name: str, value: int) -> int: + """Raise a deprecation warning and advise a new option name. + + Args: + name (str): The name of the option. + new_name (str): The new option name. + value (int): The value of the option. + + Returns: + int: The value of the option. + """ + warnings.warn( + f"The option '{name}' has been deprecated, use '{new_name}' instead.", + category=DeprecationWarning, + ) + return value + + +class _Constants(ABC): + """Base class for constants.""" + + prefix: str = "" + desc: Dict[str, Tuple[int, str]] = {} + + @classmethod + def get_desc(cls, name: str) -> Optional[str]: + """Get description of given constant""" + try: + return cls.desc[name][1] + except (IndexError, KeyError): + return None + + @classmethod + def get_info(cls, setid: int) -> Union[Optional[str], Tuple[str, str]]: + """Get information about given constant""" + for name, info in cls.desc.items(): + if info[0] == setid: + return name + return None + + @classmethod + def get_full_info(cls) -> Union[str, Sequence[str]]: + """get full information about given constant""" + res: Union[str, List[str]] = [] + try: + res = [f"{k} : {v[1]}" for k, v in cls.desc.items()] + except (AttributeError, IndexError) as err: + res = f"No information found in constant class. {err}" + + return res + + +class _Flags(_Constants): + """Base class for classes describing flags""" + + @classmethod + def get_bit_info(cls, value: int) -> List[str]: + """Get the name of all bits set + + Returns a list of strings.""" + res = [] + for name, info in cls.desc.items(): + if value & info[0]: + res.append(name) + return res + + +class FieldType(_Constants): + """MySQL Field Types. + + This class provides all supported MySQL field or data types. They can be useful + when dealing with raw data or defining your own converters. The field type is + stored with every cursor in the description for each column. + + The `FieldType` class shouldn't be instantiated. + + Examples: + The following example shows how to print the name of the data type for + each column in a result set. + + ``` + from __future__ import print_function + import mysql.connector + from mysql.connector import FieldType + + cnx = mysql.connector.connect(user='scott', database='test') + cursor = cnx.cursor() + + cursor.execute( + "SELECT DATE(NOW()) AS `c1`, TIME(NOW()) AS `c2`, " + "NOW() AS `c3`, 'a string' AS `c4`, 42 AS `c5`") + rows = cursor.fetchall() + + for desc in cursor.description: + colname = desc[0] + coltype = desc[1] + print("Column {} has type {}".format( + colname, FieldType.get_info(coltype))) + + cursor.close() + cnx.close() + ``` + """ + + prefix: str = "FIELD_TYPE_" + DECIMAL: int = 0x00 + TINY: int = 0x01 + SHORT: int = 0x02 + LONG: int = 0x03 + FLOAT: int = 0x04 + DOUBLE: int = 0x05 + NULL: int = 0x06 + TIMESTAMP: int = 0x07 + LONGLONG: int = 0x08 + INT24: int = 0x09 + DATE: int = 0x0A + TIME: int = 0x0B + DATETIME: int = 0x0C + YEAR: int = 0x0D + NEWDATE: int = 0x0E + VARCHAR: int = 0x0F + BIT: int = 0x10 + JSON: int = 0xF5 + NEWDECIMAL: int = 0xF6 + ENUM: int = 0xF7 + SET: int = 0xF8 + TINY_BLOB: int = 0xF9 + MEDIUM_BLOB: int = 0xFA + LONG_BLOB: int = 0xFB + BLOB: int = 0xFC + VAR_STRING: int = 0xFD + STRING: int = 0xFE + GEOMETRY: int = 0xFF + + desc: Dict[str, Tuple[int, str]] = { + "DECIMAL": (0x00, "DECIMAL"), + "TINY": (0x01, "TINY"), + "SHORT": (0x02, "SHORT"), + "LONG": (0x03, "LONG"), + "FLOAT": (0x04, "FLOAT"), + "DOUBLE": (0x05, "DOUBLE"), + "NULL": (0x06, "NULL"), + "TIMESTAMP": (0x07, "TIMESTAMP"), + "LONGLONG": (0x08, "LONGLONG"), + "INT24": (0x09, "INT24"), + "DATE": (0x0A, "DATE"), + "TIME": (0x0B, "TIME"), + "DATETIME": (0x0C, "DATETIME"), + "YEAR": (0x0D, "YEAR"), + "NEWDATE": (0x0E, "NEWDATE"), + "VARCHAR": (0x0F, "VARCHAR"), + "BIT": (0x10, "BIT"), + "JSON": (0xF5, "JSON"), + "NEWDECIMAL": (0xF6, "NEWDECIMAL"), + "ENUM": (0xF7, "ENUM"), + "SET": (0xF8, "SET"), + "TINY_BLOB": (0xF9, "TINY_BLOB"), + "MEDIUM_BLOB": (0xFA, "MEDIUM_BLOB"), + "LONG_BLOB": (0xFB, "LONG_BLOB"), + "BLOB": (0xFC, "BLOB"), + "VAR_STRING": (0xFD, "VAR_STRING"), + "STRING": (0xFE, "STRING"), + "GEOMETRY": (0xFF, "GEOMETRY"), + } + + @classmethod + def get_string_types(cls) -> List[int]: + """Get the list of all string types""" + return [ + cls.VARCHAR, + cls.ENUM, + cls.VAR_STRING, + cls.STRING, + ] + + @classmethod + def get_binary_types(cls) -> List[int]: + """Get the list of all binary types""" + return [ + cls.TINY_BLOB, + cls.MEDIUM_BLOB, + cls.LONG_BLOB, + cls.BLOB, + ] + + @classmethod + def get_number_types(cls) -> List[int]: + """Get the list of all number types""" + return [ + cls.DECIMAL, + cls.NEWDECIMAL, + cls.TINY, + cls.SHORT, + cls.LONG, + cls.FLOAT, + cls.DOUBLE, + cls.LONGLONG, + cls.INT24, + cls.BIT, + cls.YEAR, + ] + + @classmethod + def get_timestamp_types(cls) -> List[int]: + """Get the list of all timestamp types""" + return [ + cls.DATETIME, + cls.TIMESTAMP, + ] + + +class FieldFlag(_Flags): + """MySQL Field Flags + + Field flags as found in MySQL sources mysql-src/include/mysql_com.h + """ + + _prefix: str = "" + NOT_NULL: int = 1 << 0 + PRI_KEY: int = 1 << 1 + UNIQUE_KEY: int = 1 << 2 + MULTIPLE_KEY: int = 1 << 3 + BLOB: int = 1 << 4 + UNSIGNED: int = 1 << 5 + ZEROFILL: int = 1 << 6 + BINARY: int = 1 << 7 + + ENUM: int = 1 << 8 + AUTO_INCREMENT: int = 1 << 9 + TIMESTAMP: int = 1 << 10 + SET: int = 1 << 11 + + NO_DEFAULT_VALUE: int = 1 << 12 + ON_UPDATE_NOW: int = 1 << 13 + NUM: int = 1 << 14 + PART_KEY: int = 1 << 15 + GROUP: int = 1 << 14 # SAME AS NUM !!!!!!!???? + UNIQUE: int = 1 << 16 + BINCMP: int = 1 << 17 + + GET_FIXED_FIELDS: int = 1 << 18 + FIELD_IN_PART_FUNC: int = 1 << 19 + FIELD_IN_ADD_INDEX: int = 1 << 20 + FIELD_IS_RENAMED: int = 1 << 21 + + desc: Dict[str, Tuple[int, str]] = { + "NOT_NULL": (1 << 0, "Field can't be NULL"), + "PRI_KEY": (1 << 1, "Field is part of a primary key"), + "UNIQUE_KEY": (1 << 2, "Field is part of a unique key"), + "MULTIPLE_KEY": (1 << 3, "Field is part of a key"), + "BLOB": (1 << 4, "Field is a blob"), + "UNSIGNED": (1 << 5, "Field is unsigned"), + "ZEROFILL": (1 << 6, "Field is zerofill"), + "BINARY": (1 << 7, "Field is binary "), + "ENUM": (1 << 8, "field is an enum"), + "AUTO_INCREMENT": (1 << 9, "field is a autoincrement field"), + "TIMESTAMP": (1 << 10, "Field is a timestamp"), + "SET": (1 << 11, "field is a set"), + "NO_DEFAULT_VALUE": (1 << 12, "Field doesn't have default value"), + "ON_UPDATE_NOW": (1 << 13, "Field is set to NOW on UPDATE"), + "NUM": (1 << 14, "Field is num (for clients)"), + "PART_KEY": (1 << 15, "Intern; Part of some key"), + "GROUP": (1 << 14, "Intern: Group field"), # Same as NUM + "UNIQUE": (1 << 16, "Intern: Used by sql_yacc"), + "BINCMP": (1 << 17, "Intern: Used by sql_yacc"), + "GET_FIXED_FIELDS": (1 << 18, "Used to get fields in item tree"), + "FIELD_IN_PART_FUNC": (1 << 19, "Field part of partition func"), + "FIELD_IN_ADD_INDEX": (1 << 20, "Intern: Field used in ADD INDEX"), + "FIELD_IS_RENAMED": (1 << 21, "Intern: Field is being renamed"), + } + + +class ServerCmdMeta(ABCMeta): + """ClientFlag Metaclass.""" + + def __getattribute__(cls, name: str) -> Any: + deprecated_options = ( + "FIELD_LIST", + "REFRESH", + "SHUTDOWN", + "PROCESS_INFO", + "PROCESS_KILL", + ) + if name in deprecated_options: + warnings.warn( + f"The option 'ServerCmd.{name}' is deprecated and will be removed in " + "a future release.", + category=DeprecationWarning, + ) + return super().__getattribute__(name) + + +class ServerCmd(_Constants, metaclass=ServerCmdMeta): + """MySQL Server Commands""" + + _prefix: str = "COM_" + SLEEP: int = 0 + QUIT: int = 1 + INIT_DB: int = 2 + QUERY: int = 3 + FIELD_LIST: int = 4 + CREATE_DB: int = 5 + DROP_DB: int = 6 + REFRESH: int = 7 + SHUTDOWN: int = 8 + STATISTICS: int = 9 + PROCESS_INFO: int = 10 + CONNECT: int = 11 + PROCESS_KILL: int = 12 + DEBUG: int = 13 + PING: int = 14 + TIME: int = 15 + DELAYED_INSERT: int = 16 + CHANGE_USER: int = 17 + BINLOG_DUMP: int = 18 + TABLE_DUMP: int = 19 + CONNECT_OUT: int = 20 + REGISTER_REPLICA: int = 21 + STMT_PREPARE: int = 22 + STMT_EXECUTE: int = 23 + STMT_SEND_LONG_DATA: int = 24 + STMT_CLOSE: int = 25 + STMT_RESET: int = 26 + SET_OPTION: int = 27 + STMT_FETCH: int = 28 + DAEMON: int = 29 + BINLOG_DUMP_GTID: int = 30 + RESET_CONNECTION: int = 31 + + desc: Dict[str, Tuple[int, str]] = { + "SLEEP": (0, "SLEEP"), + "QUIT": (1, "QUIT"), + "INIT_DB": (2, "INIT_DB"), + "QUERY": (3, "QUERY"), + "FIELD_LIST": (4, "FIELD_LIST"), + "CREATE_DB": (5, "CREATE_DB"), + "DROP_DB": (6, "DROP_DB"), + "REFRESH": (7, "REFRESH"), + "SHUTDOWN": (8, "SHUTDOWN"), + "STATISTICS": (9, "STATISTICS"), + "PROCESS_INFO": (10, "PROCESS_INFO"), + "CONNECT": (11, "CONNECT"), + "PROCESS_KILL": (12, "PROCESS_KILL"), + "DEBUG": (13, "DEBUG"), + "PING": (14, "PING"), + "TIME": (15, "TIME"), + "DELAYED_INSERT": (16, "DELAYED_INSERT"), + "CHANGE_USER": (17, "CHANGE_USER"), + "BINLOG_DUMP": (18, "BINLOG_DUMP"), + "TABLE_DUMP": (19, "TABLE_DUMP"), + "CONNECT_OUT": (20, "CONNECT_OUT"), + "REGISTER_REPLICA": (21, "REGISTER_REPLICA"), + "STMT_PREPARE": (22, "STMT_PREPARE"), + "STMT_EXECUTE": (23, "STMT_EXECUTE"), + "STMT_SEND_LONG_DATA": (24, "STMT_SEND_LONG_DATA"), + "STMT_CLOSE": (25, "STMT_CLOSE"), + "STMT_RESET": (26, "STMT_RESET"), + "SET_OPTION": (27, "SET_OPTION"), + "STMT_FETCH": (28, "STMT_FETCH"), + "DAEMON": (29, "DAEMON"), + "BINLOG_DUMP_GTID": (30, "BINLOG_DUMP_GTID"), + "RESET_CONNECTION": (31, "RESET_CONNECTION"), + } + + +class ClientFlag(_Flags): + """MySQL Client Flags. + + Client options as found in the MySQL sources mysql-src/include/mysql_com.h. + + This class provides constants defining MySQL client flags that can be used + when the connection is established to configure the session. The `ClientFlag` + class is available when importing mysql.connector. + + The `ClientFlag` class shouldn't be instantiated. + + Examples: + ``` + >>> import mysql.connector + >>> mysql.connector.ClientFlag.FOUND_ROWS + 2 + ``` + """ + + LONG_PASSWD: int = 1 << 0 + FOUND_ROWS: int = 1 << 1 + LONG_FLAG: int = 1 << 2 + CONNECT_WITH_DB: int = 1 << 3 + NO_SCHEMA: int = 1 << 4 + COMPRESS: int = 1 << 5 + ODBC: int = 1 << 6 + LOCAL_FILES: int = 1 << 7 + IGNORE_SPACE: int = 1 << 8 + PROTOCOL_41: int = 1 << 9 + INTERACTIVE: int = 1 << 10 + SSL: int = 1 << 11 + IGNORE_SIGPIPE: int = 1 << 12 + TRANSACTIONS: int = 1 << 13 + RESERVED: int = 1 << 14 + SECURE_CONNECTION: int = 1 << 15 + MULTI_STATEMENTS: int = 1 << 16 + MULTI_RESULTS: int = 1 << 17 + PS_MULTI_RESULTS: int = 1 << 18 + PLUGIN_AUTH: int = 1 << 19 + CONNECT_ARGS: int = 1 << 20 + PLUGIN_AUTH_LENENC_CLIENT_DATA: int = 1 << 21 + CAN_HANDLE_EXPIRED_PASSWORDS: int = 1 << 22 + SESION_TRACK: int = 1 << 23 # deprecated + SESSION_TRACK: int = 1 << 23 + DEPRECATE_EOF: int = 1 << 24 + CLIENT_QUERY_ATTRIBUTES: int = 1 << 27 + SSL_VERIFY_SERVER_CERT: int = 1 << 30 + REMEMBER_OPTIONS: int = 1 << 31 + MULTI_FACTOR_AUTHENTICATION: int = 1 << 28 + + desc: Dict[str, Tuple[int, str]] = { + "LONG_PASSWD": (1 << 0, "New more secure passwords"), + "FOUND_ROWS": (1 << 1, "Found instead of affected rows"), + "LONG_FLAG": (1 << 2, "Get all column flags"), + "CONNECT_WITH_DB": (1 << 3, "One can specify db on connect"), + "NO_SCHEMA": (1 << 4, "Don't allow database.table.column"), + "COMPRESS": (1 << 5, "Can use compression protocol"), + "ODBC": (1 << 6, "ODBC client"), + "LOCAL_FILES": (1 << 7, "Can use LOAD DATA LOCAL"), + "IGNORE_SPACE": (1 << 8, "Ignore spaces before ''"), + "PROTOCOL_41": (1 << 9, "New 4.1 protocol"), + "INTERACTIVE": (1 << 10, "This is an interactive client"), + "SSL": (1 << 11, "Switch to SSL after handshake"), + "IGNORE_SIGPIPE": (1 << 12, "IGNORE sigpipes"), + "TRANSACTIONS": (1 << 13, "Client knows about transactions"), + "RESERVED": (1 << 14, "Old flag for 4.1 protocol"), + "SECURE_CONNECTION": (1 << 15, "New 4.1 authentication"), + "MULTI_STATEMENTS": (1 << 16, "Enable/disable multi-stmt support"), + "MULTI_RESULTS": (1 << 17, "Enable/disable multi-results"), + "PS_MULTI_RESULTS": (1 << 18, "Multi-results in PS-protocol"), + "PLUGIN_AUTH": (1 << 19, "Client supports plugin authentication"), + "CONNECT_ARGS": (1 << 20, "Client supports connection attributes"), + "PLUGIN_AUTH_LENENC_CLIENT_DATA": ( + 1 << 21, + "Enable authentication response packet to be larger than 255 bytes", + ), + "CAN_HANDLE_EXPIRED_PASSWORDS": ( + 1 << 22, + "Don't close the connection for a connection with expired password", + ), + "SESION_TRACK": ( # deprecated + 1 << 23, + "Capable of handling server state change information", + ), + "SESSION_TRACK": ( + 1 << 23, + "Capable of handling server state change information", + ), + "DEPRECATE_EOF": (1 << 24, "Client no longer needs EOF packet"), + "CLIENT_QUERY_ATTRIBUTES": ( + 1 << 27, + "Support optional extension for query parameters", + ), + "SSL_VERIFY_SERVER_CERT": (1 << 30, ""), + "REMEMBER_OPTIONS": (1 << 31, ""), + } + + default: List[int] = [ + LONG_PASSWD, + LONG_FLAG, + CONNECT_WITH_DB, + PROTOCOL_41, + TRANSACTIONS, + SECURE_CONNECTION, + MULTI_STATEMENTS, + MULTI_RESULTS, + CONNECT_ARGS, + ] + + @classmethod + def get_default(cls) -> int: + """Get the default client options set + + Returns a flag with all the default client options set""" + flags = 0 + for option in cls.default: + flags |= option + return flags + + +class ServerFlag(_Flags): + """MySQL Server Flags + + Server flags as found in the MySQL sources mysql-src/include/mysql_com.h + """ + + _prefix: str = "SERVER_" + STATUS_IN_TRANS: int = 1 << 0 + STATUS_AUTOCOMMIT: int = 1 << 1 + MORE_RESULTS_EXISTS: int = 1 << 3 + QUERY_NO_GOOD_INDEX_USED: int = 1 << 4 + QUERY_NO_INDEX_USED: int = 1 << 5 + STATUS_CURSOR_EXISTS: int = 1 << 6 + STATUS_LAST_ROW_SENT: int = 1 << 7 + STATUS_DB_DROPPED: int = 1 << 8 + STATUS_NO_BACKSLASH_ESCAPES: int = 1 << 9 + SERVER_STATUS_METADATA_CHANGED: int = 1 << 10 + SERVER_QUERY_WAS_SLOW: int = 1 << 11 + SERVER_PS_OUT_PARAMS: int = 1 << 12 + SERVER_STATUS_IN_TRANS_READONLY: int = 1 << 13 + SERVER_SESSION_STATE_CHANGED: int = 1 << 14 + + desc: Dict[str, Tuple[int, str]] = { + "SERVER_STATUS_IN_TRANS": (1 << 0, "Transaction has started"), + "SERVER_STATUS_AUTOCOMMIT": (1 << 1, "Server in auto_commit mode"), + "SERVER_MORE_RESULTS_EXISTS": ( + 1 << 3, + "Multi query - next query exists", + ), + "SERVER_QUERY_NO_GOOD_INDEX_USED": (1 << 4, ""), + "SERVER_QUERY_NO_INDEX_USED": (1 << 5, ""), + "SERVER_STATUS_CURSOR_EXISTS": ( + 1 << 6, + "Set when server opened a read-only non-scrollable cursor for a query.", + ), + "SERVER_STATUS_LAST_ROW_SENT": ( + 1 << 7, + "Set when a read-only cursor is exhausted", + ), + "SERVER_STATUS_DB_DROPPED": (1 << 8, "A database was dropped"), + "SERVER_STATUS_NO_BACKSLASH_ESCAPES": (1 << 9, ""), + "SERVER_STATUS_METADATA_CHANGED": ( + 1024, + "Set if after a prepared statement " + "reprepare we discovered that the " + "new statement returns a different " + "number of result set columns.", + ), + "SERVER_QUERY_WAS_SLOW": (2048, ""), + "SERVER_PS_OUT_PARAMS": ( + 4096, + "To mark ResultSet containing output parameter values.", + ), + "SERVER_STATUS_IN_TRANS_READONLY": ( + 8192, + "Set if multi-statement transaction is a read-only transaction.", + ), + "SERVER_SESSION_STATE_CHANGED": ( + 1 << 14, + "Session state has changed on the " + "server because of the execution of " + "the last statement", + ), + } + + +class RefreshOptionMeta(ABCMeta): + """RefreshOption Metaclass.""" + + @property + def SLAVE(self) -> int: # pylint: disable=bad-mcs-method-argument,invalid-name + """Return the deprecated alias of RefreshOption.REPLICA. + + Raises a warning about this attribute deprecation. + """ + return _obsolete_option( + "RefreshOption.SLAVE", + "RefreshOption.REPLICA", + RefreshOption.REPLICA, + ) + + +class RefreshOption(_Constants, metaclass=RefreshOptionMeta): + """MySQL Refresh command options. + + Options used when sending the COM_REFRESH server command. + """ + + _prefix: str = "REFRESH_" + GRANT: int = 1 << 0 + LOG: int = 1 << 1 + TABLES: int = 1 << 2 + HOST: int = 1 << 3 + STATUS: int = 1 << 4 + REPLICA: int = 1 << 6 + + desc: Dict[str, Tuple[int, str]] = { + "GRANT": (1 << 0, "Refresh grant tables"), + "LOG": (1 << 1, "Start on new log file"), + "TABLES": (1 << 2, "close all tables"), + "HOST": (1 << 3, "Flush host cache"), + "STATUS": (1 << 4, "Flush status variables"), + "REPLICA": (1 << 6, "Reset source info and restart replica thread"), + "SLAVE": (1 << 6, "Deprecated option; use REPLICA instead."), + } + + +class ShutdownType(_Constants): + """MySQL Shutdown types + + Shutdown types used by the COM_SHUTDOWN server command. + """ + + _prefix: str = "" + SHUTDOWN_DEFAULT: int = 0 + SHUTDOWN_WAIT_CONNECTIONS: int = 1 + SHUTDOWN_WAIT_TRANSACTIONS: int = 2 + SHUTDOWN_WAIT_UPDATES: int = 8 + SHUTDOWN_WAIT_ALL_BUFFERS: int = 16 + SHUTDOWN_WAIT_CRITICAL_BUFFERS: int = 17 + KILL_QUERY: int = 254 + KILL_CONNECTION: int = 255 + + desc: Dict[str, Tuple[int, str]] = { + "SHUTDOWN_DEFAULT": ( + SHUTDOWN_DEFAULT, + "defaults to SHUTDOWN_WAIT_ALL_BUFFERS", + ), + "SHUTDOWN_WAIT_CONNECTIONS": ( + SHUTDOWN_WAIT_CONNECTIONS, + "wait for existing connections to finish", + ), + "SHUTDOWN_WAIT_TRANSACTIONS": ( + SHUTDOWN_WAIT_TRANSACTIONS, + "wait for existing trans to finish", + ), + "SHUTDOWN_WAIT_UPDATES": ( + SHUTDOWN_WAIT_UPDATES, + "wait for existing updates to finish", + ), + "SHUTDOWN_WAIT_ALL_BUFFERS": ( + SHUTDOWN_WAIT_ALL_BUFFERS, + "flush InnoDB and other storage engine buffers", + ), + "SHUTDOWN_WAIT_CRITICAL_BUFFERS": ( + SHUTDOWN_WAIT_CRITICAL_BUFFERS, + "don't flush InnoDB buffers, flush other storage engines' buffers", + ), + "KILL_QUERY": (KILL_QUERY, "(no description)"), + "KILL_CONNECTION": (KILL_CONNECTION, "(no description)"), + } + + +class CharacterSet: + """MySQL supported character sets and collations + + List of character sets with their collations supported by MySQL. This + maps to the character set we get from the server within the handshake + packet. + + The list is hardcode so we avoid a database query when getting the + name of the used character set or collation. + """ + + # Multi-byte character sets which use 5c (backslash) in characters + slash_charsets: Tuple[int, ...] = (1, 13, 28, 84, 87, 88) + + def __init__(self) -> None: + # Use LTS character set as default + self._desc: List[Optional[Tuple[str, str, bool]]] = MYSQL_CHARACTER_SETS_57 + self._mysql_version: Tuple[int, ...] = (5, 7) + + def set_mysql_version(self, version: Tuple[int, ...]) -> None: + """Set the MySQL major version and change the charset mapping if is 5.7. + + Args: + version (tuple): MySQL version tuple. + """ + self._mysql_version = version[:2] + if self._mysql_version >= (8, 0): + self._desc = MYSQL_CHARACTER_SETS + + def get_info(self, setid: int) -> Tuple[str, str]: + """Retrieves character set information as tuple using an ID + + Retrieves character set and collation information based on the + given MySQL ID. + + Raises ProgrammingError when character set is not supported. + + Returns a tuple. + """ + try: + return self._desc[setid][0:2] + except IndexError: + raise ProgrammingError(f"Character set '{setid}' unsupported") from None + + def get_desc(self, name: int) -> str: + """Retrieves character set information as string using an ID + + Retrieves character set and collation information based on the + given MySQL ID. + + Returns a tuple. + """ + charset, collation = self.get_info(name) + return f"{charset}/{collation}" + + def get_default_collation(self, charset: Union[int, str]) -> Tuple[str, str, int]: + """Retrieves the default collation for given character set + + Raises ProgrammingError when character set is not supported. + + Returns list (collation, charset, index) + """ + if isinstance(charset, int): + try: + info = self._desc[charset] + return info[1], info[0], charset + except (IndexError, KeyError) as err: + raise ProgrammingError( + f"Character set ID '{charset}' unsupported" + ) from err + + for cid, info in enumerate(self._desc): + if info is None: + continue + if info[0] == charset and info[2] is True: + return info[1], info[0], cid + + raise ProgrammingError(f"Character set '{charset}' unsupported") + + def get_charset_info( + self, charset: Optional[Union[int, str]] = None, collation: Optional[str] = None + ) -> Tuple[int, str, str]: + """Get character set information using charset name and/or collation + + Retrieves character set and collation information given character + set name and/or a collation name. + If charset is an integer, it will look up the character set based + on the MySQL's ID. + For example: + get_charset_info('utf8',None) + get_charset_info(collation='utf8_general_ci') + get_charset_info(47) + + Raises ProgrammingError when character set is not supported. + + Returns a tuple with (id, characterset name, collation) + """ + info: Optional[Union[Tuple[str, str, bool], Tuple[str, str, int]]] = None + if isinstance(charset, int): + try: + info = self._desc[charset] + return (charset, info[0], info[1]) + except IndexError as err: + raise ProgrammingError(f"Character set ID {charset} unknown") from err + + if charset in ("utf8", "utf-8") and self._mysql_version >= (8, 0): + charset = "utf8mb4" + if charset is not None and collation is None: + info = self.get_default_collation(charset) + return (info[2], info[1], info[0]) + if charset is None and collation is not None: + for cid, info in enumerate(self._desc): + if info is None: + continue + if collation == info[1]: + return (cid, info[0], info[1]) + raise ProgrammingError(f"Collation '{collation}' unknown") + for cid, info in enumerate(self._desc): + if info is None: + continue + if info[0] == charset and info[1] == collation: + return (cid, info[0], info[1]) + _ = self.get_default_collation(charset) + raise ProgrammingError(f"Collation '{collation}' unknown") + + def get_supported(self) -> Tuple[str, ...]: + """Retrieves a list with names of all supproted character sets + + Returns a tuple. + """ + res = [] + for info in self._desc: + if info and info[0] not in res: + res.append(info[0]) + return tuple(res) + + +class SQLMode(_Constants): + """MySQL SQL Modes + + The numeric values of SQL Modes are not interesting, only the names + are used when setting the SQL_MODE system variable using the MySQL + SET command. + + The `SQLMode` class shouldn't be instantiated. + + See http://dev.mysql.com/doc/refman/5.6/en/server-sql-mode.html + """ + + _prefix: str = "MODE_" + REAL_AS_FLOAT: str = "REAL_AS_FLOAT" + PIPES_AS_CONCAT: str = "PIPES_AS_CONCAT" + ANSI_QUOTES: str = "ANSI_QUOTES" + IGNORE_SPACE: str = "IGNORE_SPACE" + NOT_USED: str = "NOT_USED" + ONLY_FULL_GROUP_BY: str = "ONLY_FULL_GROUP_BY" + NO_UNSIGNED_SUBTRACTION: str = "NO_UNSIGNED_SUBTRACTION" + NO_DIR_IN_CREATE: str = "NO_DIR_IN_CREATE" + POSTGRESQL: str = "POSTGRESQL" + ORACLE: str = "ORACLE" + MSSQL: str = "MSSQL" + DB2: str = "DB2" + MAXDB: str = "MAXDB" + NO_KEY_OPTIONS: str = "NO_KEY_OPTIONS" + NO_TABLE_OPTIONS: str = "NO_TABLE_OPTIONS" + NO_FIELD_OPTIONS: str = "NO_FIELD_OPTIONS" + MYSQL323: str = "MYSQL323" + MYSQL40: str = "MYSQL40" + ANSI: str = "ANSI" + NO_AUTO_VALUE_ON_ZERO: str = "NO_AUTO_VALUE_ON_ZERO" + NO_BACKSLASH_ESCAPES: str = "NO_BACKSLASH_ESCAPES" + STRICT_TRANS_TABLES: str = "STRICT_TRANS_TABLES" + STRICT_ALL_TABLES: str = "STRICT_ALL_TABLES" + NO_ZERO_IN_DATE: str = "NO_ZERO_IN_DATE" + NO_ZERO_DATE: str = "NO_ZERO_DATE" + INVALID_DATES: str = "INVALID_DATES" + ERROR_FOR_DIVISION_BY_ZERO: str = "ERROR_FOR_DIVISION_BY_ZERO" + TRADITIONAL: str = "TRADITIONAL" + NO_AUTO_CREATE_USER: str = "NO_AUTO_CREATE_USER" + HIGH_NOT_PRECEDENCE: str = "HIGH_NOT_PRECEDENCE" + NO_ENGINE_SUBSTITUTION: str = "NO_ENGINE_SUBSTITUTION" + PAD_CHAR_TO_FULL_LENGTH: str = "PAD_CHAR_TO_FULL_LENGTH" + + @classmethod + def get_desc(cls, name: str) -> Optional[str]: + raise NotImplementedError + + @classmethod + def get_info(cls, setid: int) -> Optional[str]: + raise NotImplementedError + + @classmethod + def get_full_info(cls) -> Tuple[str, ...]: + """Returns a sequence of all available SQL Modes + + This class method returns a tuple containing all SQL Mode names. The + names will be alphabetically sorted. + + Returns a tuple. + """ + res = [] + for key in vars(cls).keys(): + if not key.startswith("_") and not hasattr(getattr(cls, key), "__call__"): + res.append(key) + return tuple(sorted(res)) + + +CONN_ATTRS_DN: List[str] = [ + "_pid", + "_platform", + "_source_host", + "_client_name", + "_client_license", + "_client_version", + "_os", + "_connector_name", + "_connector_license", + "_connector_version", +] + +# TLS v1.0 cipher suites IANI to OpenSSL name translation +TLSV1_CIPHER_SUITES: Dict[str, str] = { + "TLS_RSA_WITH_NULL_MD5": "NULL-MD5", + "TLS_RSA_WITH_NULL_SHA": "NULL-SHA", + "TLS_RSA_WITH_RC4_128_MD5": "RC4-MD5", + "TLS_RSA_WITH_RC4_128_SHA": "RC4-SHA", + "TLS_RSA_WITH_IDEA_CBC_SHA": "IDEA-CBC-SHA", + "TLS_RSA_WITH_3DES_EDE_CBC_SHA": "DES-CBC3-SHA", + "TLS_DH_DSS_WITH_3DES_EDE_CBC_SHA": "Not implemented.", + "TLS_DH_RSA_WITH_3DES_EDE_CBC_SHA": "Not implemented.", + "TLS_DHE_DSS_WITH_3DES_EDE_CBC_SHA": "DHE-DSS-DES-CBC3-SHA", + "TLS_DHE_RSA_WITH_3DES_EDE_CBC_SHA": "DHE-RSA-DES-CBC3-SHA", + "TLS_DH_anon_WITH_RC4_128_MD5": "ADH-RC4-MD5", + "TLS_DH_anon_WITH_3DES_EDE_CBC_SHA": "ADH-DES-CBC3-SHA", + # AES cipher suites from RFC3268, extending TLS v1.0 + "TLS_RSA_WITH_AES_128_CBC_SHA": "AES128-SHA", + "TLS_RSA_WITH_AES_256_CBC_SHA": "AES256-SHA", + "TLS_DH_DSS_WITH_AES_128_CBC_SHA": "DH-DSS-AES128-SHA", + "TLS_DH_DSS_WITH_AES_256_CBC_SHA": "DH-DSS-AES256-SHA", + "TLS_DH_RSA_WITH_AES_128_CBC_SHA": "DH-RSA-AES128-SHA", + "TLS_DH_RSA_WITH_AES_256_CBC_SHA": "DH-RSA-AES256-SHA", + "TLS_DHE_DSS_WITH_AES_128_CBC_SHA": "DHE-DSS-AES128-SHA", + "TLS_DHE_DSS_WITH_AES_256_CBC_SHA": "DHE-DSS-AES256-SHA", + "TLS_DHE_RSA_WITH_AES_128_CBC_SHA": "DHE-RSA-AES128-SHA", + "TLS_DHE_RSA_WITH_AES_256_CBC_SHA": "DHE-RSA-AES256-SHA", + "TLS_DH_anon_WITH_AES_128_CBC_SHA": "ADH-AES128-SHA", + "TLS_DH_anon_WITH_AES_256_CBC_SHA": "ADH-AES256-SHA", + # Camellia cipher suites from RFC4132, extending TLS v1.0 + "TLS_RSA_WITH_CAMELLIA_128_CBC_SHA": "CAMELLIA128-SHA", + "TLS_RSA_WITH_CAMELLIA_256_CBC_SHA": "CAMELLIA256-SHA", + "TLS_DH_DSS_WITH_CAMELLIA_128_CBC_SHA": "DH-DSS-CAMELLIA128-SHA", + "TLS_DH_DSS_WITH_CAMELLIA_256_CBC_SHA": "DH-DSS-CAMELLIA256-SHA", + "TLS_DH_RSA_WITH_CAMELLIA_128_CBC_SHA": "DH-RSA-CAMELLIA128-SHA", + "TLS_DH_RSA_WITH_CAMELLIA_256_CBC_SHA": "DH-RSA-CAMELLIA256-SHA", + "TLS_DHE_DSS_WITH_CAMELLIA_128_CBC_SHA": "DHE-DSS-CAMELLIA128-SHA", + "TLS_DHE_DSS_WITH_CAMELLIA_256_CBC_SHA": "DHE-DSS-CAMELLIA256-SHA", + "TLS_DHE_RSA_WITH_CAMELLIA_128_CBC_SHA": "DHE-RSA-CAMELLIA128-SHA", + "TLS_DHE_RSA_WITH_CAMELLIA_256_CBC_SHA": "DHE-RSA-CAMELLIA256-SHA", + "TLS_DH_anon_WITH_CAMELLIA_128_CBC_SHA": "ADH-CAMELLIA128-SHA", + "TLS_DH_anon_WITH_CAMELLIA_256_CBC_SHA": "ADH-CAMELLIA256-SHA", + # SEED cipher suites from RFC4162, extending TLS v1.0 + "TLS_RSA_WITH_SEED_CBC_SHA": "SEED-SHA", + "TLS_DH_DSS_WITH_SEED_CBC_SHA": "DH-DSS-SEED-SHA", + "TLS_DH_RSA_WITH_SEED_CBC_SHA": "DH-RSA-SEED-SHA", + "TLS_DHE_DSS_WITH_SEED_CBC_SHA": "DHE-DSS-SEED-SHA", + "TLS_DHE_RSA_WITH_SEED_CBC_SHA": "DHE-RSA-SEED-SHA", + "TLS_DH_anon_WITH_SEED_CBC_SHA": "ADH-SEED-SHA", + # GOST cipher suites from draft-chudov-cryptopro-cptls, extending TLS v1.0 + "TLS_GOSTR341094_WITH_28147_CNT_IMIT": "GOST94-GOST89-GOST89", + "TLS_GOSTR341001_WITH_28147_CNT_IMIT": "GOST2001-GOST89-GOST89", + "TLS_GOSTR341094_WITH_NULL_GOSTR3411": "GOST94-NULL-GOST94", + "TLS_GOSTR341001_WITH_NULL_GOSTR3411": "GOST2001-NULL-GOST94", +} + +# TLS v1.1 cipher suites IANI to OpenSSL name translation +TLSV1_1_CIPHER_SUITES: Dict[str, str] = TLSV1_CIPHER_SUITES + +# TLS v1.2 cipher suites IANI to OpenSSL name translation +TLSV1_2_CIPHER_SUITES: Dict[str, str] = { + "TLS_RSA_WITH_NULL_SHA256": "NULL-SHA256", + "TLS_RSA_WITH_AES_128_CBC_SHA256": "AES128-SHA256", + "TLS_RSA_WITH_AES_256_CBC_SHA256": "AES256-SHA256", + "TLS_RSA_WITH_AES_128_GCM_SHA256": "AES128-GCM-SHA256", + "TLS_RSA_WITH_AES_256_GCM_SHA384": "AES256-GCM-SHA384", + "TLS_DH_RSA_WITH_AES_128_CBC_SHA256": "DH-RSA-AES128-SHA256", + "TLS_DH_RSA_WITH_AES_256_CBC_SHA256": "DH-RSA-AES256-SHA256", + "TLS_DH_RSA_WITH_AES_128_GCM_SHA256": "DH-RSA-AES128-GCM-SHA256", + "TLS_DH_RSA_WITH_AES_256_GCM_SHA384": "DH-RSA-AES256-GCM-SHA384", + "TLS_DH_DSS_WITH_AES_128_CBC_SHA256": "DH-DSS-AES128-SHA256", + "TLS_DH_DSS_WITH_AES_256_CBC_SHA256": "DH-DSS-AES256-SHA256", + "TLS_DH_DSS_WITH_AES_128_GCM_SHA256": "DH-DSS-AES128-GCM-SHA256", + "TLS_DH_DSS_WITH_AES_256_GCM_SHA384": "DH-DSS-AES256-GCM-SHA384", + "TLS_DHE_RSA_WITH_AES_128_CBC_SHA256": "DHE-RSA-AES128-SHA256", + "TLS_DHE_RSA_WITH_AES_256_CBC_SHA256": "DHE-RSA-AES256-SHA256", + "TLS_DHE_RSA_WITH_AES_128_GCM_SHA256": "DHE-RSA-AES128-GCM-SHA256", + "TLS_DHE_RSA_WITH_AES_256_GCM_SHA384": "DHE-RSA-AES256-GCM-SHA384", + "TLS_DHE_DSS_WITH_AES_128_CBC_SHA256": "DHE-DSS-AES128-SHA256", + "TLS_DHE_DSS_WITH_AES_256_CBC_SHA256": "DHE-DSS-AES256-SHA256", + "TLS_DHE_DSS_WITH_AES_128_GCM_SHA256": "DHE-DSS-AES128-GCM-SHA256", + "TLS_DHE_DSS_WITH_AES_256_GCM_SHA384": "DHE-DSS-AES256-GCM-SHA384", + "TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256": "ECDHE-RSA-AES128-SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384": "ECDHE-RSA-AES256-SHA384", + "TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256": "ECDHE-RSA-AES128-GCM-SHA256", + "TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384": "ECDHE-RSA-AES256-GCM-SHA384", + "TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256": "ECDHE-ECDSA-AES128-SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384": "ECDHE-ECDSA-AES256-SHA384", + "TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256": "ECDHE-ECDSA-AES128-GCM-SHA256", + "TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384": "ECDHE-ECDSA-AES256-GCM-SHA384", + "TLS_DH_anon_WITH_AES_128_CBC_SHA256": "ADH-AES128-SHA256", + "TLS_DH_anon_WITH_AES_256_CBC_SHA256": "ADH-AES256-SHA256", + "TLS_DH_anon_WITH_AES_128_GCM_SHA256": "ADH-AES128-GCM-SHA256", + "TLS_DH_anon_WITH_AES_256_GCM_SHA384": "ADH-AES256-GCM-SHA384", + "RSA_WITH_AES_128_CCM": "AES128-CCM", + "RSA_WITH_AES_256_CCM": "AES256-CCM", + "DHE_RSA_WITH_AES_128_CCM": "DHE-RSA-AES128-CCM", + "DHE_RSA_WITH_AES_256_CCM": "DHE-RSA-AES256-CCM", + "RSA_WITH_AES_128_CCM_8": "AES128-CCM8", + "RSA_WITH_AES_256_CCM_8": "AES256-CCM8", + "DHE_RSA_WITH_AES_128_CCM_8": "DHE-RSA-AES128-CCM8", + "DHE_RSA_WITH_AES_256_CCM_8": "DHE-RSA-AES256-CCM8", + "ECDHE_ECDSA_WITH_AES_128_CCM": "ECDHE-ECDSA-AES128-CCM", + "ECDHE_ECDSA_WITH_AES_256_CCM": "ECDHE-ECDSA-AES256-CCM", + "ECDHE_ECDSA_WITH_AES_128_CCM_8": "ECDHE-ECDSA-AES128-CCM8", + "ECDHE_ECDSA_WITH_AES_256_CCM_8": "ECDHE-ECDSA-AES256-CCM8", + # ARIA cipher suites from RFC6209, extending TLS v1.2 + "TLS_RSA_WITH_ARIA_128_GCM_SHA256": "ARIA128-GCM-SHA256", + "TLS_RSA_WITH_ARIA_256_GCM_SHA384": "ARIA256-GCM-SHA384", + "TLS_DHE_RSA_WITH_ARIA_128_GCM_SHA256": "DHE-RSA-ARIA128-GCM-SHA256", + "TLS_DHE_RSA_WITH_ARIA_256_GCM_SHA384": "DHE-RSA-ARIA256-GCM-SHA384", + "TLS_DHE_DSS_WITH_ARIA_128_GCM_SHA256": "DHE-DSS-ARIA128-GCM-SHA256", + "TLS_DHE_DSS_WITH_ARIA_256_GCM_SHA384": "DHE-DSS-ARIA256-GCM-SHA384", + "TLS_ECDHE_ECDSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ECDSA-ARIA128-GCM-SHA256", + "TLS_ECDHE_ECDSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ECDSA-ARIA256-GCM-SHA384", + "TLS_ECDHE_RSA_WITH_ARIA_128_GCM_SHA256": "ECDHE-ARIA128-GCM-SHA256", + "TLS_ECDHE_RSA_WITH_ARIA_256_GCM_SHA384": "ECDHE-ARIA256-GCM-SHA384", + "TLS_PSK_WITH_ARIA_128_GCM_SHA256": "PSK-ARIA128-GCM-SHA256", + "TLS_PSK_WITH_ARIA_256_GCM_SHA384": "PSK-ARIA256-GCM-SHA384", + "TLS_DHE_PSK_WITH_ARIA_128_GCM_SHA256": "DHE-PSK-ARIA128-GCM-SHA256", + "TLS_DHE_PSK_WITH_ARIA_256_GCM_SHA384": "DHE-PSK-ARIA256-GCM-SHA384", + "TLS_RSA_PSK_WITH_ARIA_128_GCM_SHA256": "RSA-PSK-ARIA128-GCM-SHA256", + "TLS_RSA_PSK_WITH_ARIA_256_GCM_SHA384": "RSA-PSK-ARIA256-GCM-SHA384", + # Camellia HMAC-Based cipher suites from RFC6367, extending TLS v1.2 + "TLS_ECDHE_ECDSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-ECDSA-CAMELLIA128-SHA256", + "TLS_ECDHE_ECDSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-ECDSA-CAMELLIA256-SHA384", + "TLS_ECDHE_RSA_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-RSA-CAMELLIA128-SHA256", + "TLS_ECDHE_RSA_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-RSA-CAMELLIA256-SHA384", + # Pre-shared keying (PSK) cipher suites", + "PSK_WITH_NULL_SHA": "PSK-NULL-SHA", + "DHE_PSK_WITH_NULL_SHA": "DHE-PSK-NULL-SHA", + "RSA_PSK_WITH_NULL_SHA": "RSA-PSK-NULL-SHA", + "PSK_WITH_RC4_128_SHA": "PSK-RC4-SHA", + "PSK_WITH_3DES_EDE_CBC_SHA": "PSK-3DES-EDE-CBC-SHA", + "PSK_WITH_AES_128_CBC_SHA": "PSK-AES128-CBC-SHA", + "PSK_WITH_AES_256_CBC_SHA": "PSK-AES256-CBC-SHA", + "DHE_PSK_WITH_RC4_128_SHA": "DHE-PSK-RC4-SHA", + "DHE_PSK_WITH_3DES_EDE_CBC_SHA": "DHE-PSK-3DES-EDE-CBC-SHA", + "DHE_PSK_WITH_AES_128_CBC_SHA": "DHE-PSK-AES128-CBC-SHA", + "DHE_PSK_WITH_AES_256_CBC_SHA": "DHE-PSK-AES256-CBC-SHA", + "RSA_PSK_WITH_RC4_128_SHA": "RSA-PSK-RC4-SHA", + "RSA_PSK_WITH_3DES_EDE_CBC_SHA": "RSA-PSK-3DES-EDE-CBC-SHA", + "RSA_PSK_WITH_AES_128_CBC_SHA": "RSA-PSK-AES128-CBC-SHA", + "RSA_PSK_WITH_AES_256_CBC_SHA": "RSA-PSK-AES256-CBC-SHA", + "PSK_WITH_AES_128_GCM_SHA256": "PSK-AES128-GCM-SHA256", + "PSK_WITH_AES_256_GCM_SHA384": "PSK-AES256-GCM-SHA384", + "DHE_PSK_WITH_AES_128_GCM_SHA256": "DHE-PSK-AES128-GCM-SHA256", + "DHE_PSK_WITH_AES_256_GCM_SHA384": "DHE-PSK-AES256-GCM-SHA384", + "RSA_PSK_WITH_AES_128_GCM_SHA256": "RSA-PSK-AES128-GCM-SHA256", + "RSA_PSK_WITH_AES_256_GCM_SHA384": "RSA-PSK-AES256-GCM-SHA384", + "PSK_WITH_AES_128_CBC_SHA256": "PSK-AES128-CBC-SHA256", + "PSK_WITH_AES_256_CBC_SHA384": "PSK-AES256-CBC-SHA384", + "PSK_WITH_NULL_SHA256": "PSK-NULL-SHA256", + "PSK_WITH_NULL_SHA384": "PSK-NULL-SHA384", + "DHE_PSK_WITH_AES_128_CBC_SHA256": "DHE-PSK-AES128-CBC-SHA256", + "DHE_PSK_WITH_AES_256_CBC_SHA384": "DHE-PSK-AES256-CBC-SHA384", + "DHE_PSK_WITH_NULL_SHA256": "DHE-PSK-NULL-SHA256", + "DHE_PSK_WITH_NULL_SHA384": "DHE-PSK-NULL-SHA384", + "RSA_PSK_WITH_AES_128_CBC_SHA256": "RSA-PSK-AES128-CBC-SHA256", + "RSA_PSK_WITH_AES_256_CBC_SHA384": "RSA-PSK-AES256-CBC-SHA384", + "RSA_PSK_WITH_NULL_SHA256": "RSA-PSK-NULL-SHA256", + "RSA_PSK_WITH_NULL_SHA384": "RSA-PSK-NULL-SHA384", + "ECDHE_PSK_WITH_RC4_128_SHA": "ECDHE-PSK-RC4-SHA", + "ECDHE_PSK_WITH_3DES_EDE_CBC_SHA": "ECDHE-PSK-3DES-EDE-CBC-SHA", + "ECDHE_PSK_WITH_AES_128_CBC_SHA": "ECDHE-PSK-AES128-CBC-SHA", + "ECDHE_PSK_WITH_AES_256_CBC_SHA": "ECDHE-PSK-AES256-CBC-SHA", + "ECDHE_PSK_WITH_AES_128_CBC_SHA256": "ECDHE-PSK-AES128-CBC-SHA256", + "ECDHE_PSK_WITH_AES_256_CBC_SHA384": "ECDHE-PSK-AES256-CBC-SHA384", + "ECDHE_PSK_WITH_NULL_SHA": "ECDHE-PSK-NULL-SHA", + "ECDHE_PSK_WITH_NULL_SHA256": "ECDHE-PSK-NULL-SHA256", + "ECDHE_PSK_WITH_NULL_SHA384": "ECDHE-PSK-NULL-SHA384", + "PSK_WITH_CAMELLIA_128_CBC_SHA256": "PSK-CAMELLIA128-SHA256", + "PSK_WITH_CAMELLIA_256_CBC_SHA384": "PSK-CAMELLIA256-SHA384", + "DHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "DHE-PSK-CAMELLIA128-SHA256", + "DHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "DHE-PSK-CAMELLIA256-SHA384", + "RSA_PSK_WITH_CAMELLIA_128_CBC_SHA256": "RSA-PSK-CAMELLIA128-SHA256", + "RSA_PSK_WITH_CAMELLIA_256_CBC_SHA384": "RSA-PSK-CAMELLIA256-SHA384", + "ECDHE_PSK_WITH_CAMELLIA_128_CBC_SHA256": "ECDHE-PSK-CAMELLIA128-SHA256", + "ECDHE_PSK_WITH_CAMELLIA_256_CBC_SHA384": "ECDHE-PSK-CAMELLIA256-SHA384", + "PSK_WITH_AES_128_CCM": "PSK-AES128-CCM", + "PSK_WITH_AES_256_CCM": "PSK-AES256-CCM", + "DHE_PSK_WITH_AES_128_CCM": "DHE-PSK-AES128-CCM", + "DHE_PSK_WITH_AES_256_CCM": "DHE-PSK-AES256-CCM", + "PSK_WITH_AES_128_CCM_8": "PSK-AES128-CCM8", + "PSK_WITH_AES_256_CCM_8": "PSK-AES256-CCM8", + "DHE_PSK_WITH_AES_128_CCM_8": "DHE-PSK-AES128-CCM8", + "DHE_PSK_WITH_AES_256_CCM_8": "DHE-PSK-AES256-CCM8", + # ChaCha20-Poly1305 cipher suites, extending TLS v1.2 + "TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-RSA-CHACHA20-POLY1305", + "TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-ECDSA-CHACHA20-POLY1305", + "TLS_DHE_RSA_WITH_CHACHA20_POLY1305_SHA256": "DHE-RSA-CHACHA20-POLY1305", + "TLS_PSK_WITH_CHACHA20_POLY1305_SHA256": "PSK-CHACHA20-POLY1305", + "TLS_ECDHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "ECDHE-PSK-CHACHA20-POLY1305", + "TLS_DHE_PSK_WITH_CHACHA20_POLY1305_SHA256": "DHE-PSK-CHACHA20-POLY1305", + "TLS_RSA_PSK_WITH_CHACHA20_POLY1305_SHA256": "RSA-PSK-CHACHA20-POLY1305", +} + +# TLS v1.3 cipher suites IANI to OpenSSL name translation +TLSV1_3_CIPHER_SUITES: Dict[str, str] = { + "TLS_AES_128_GCM_SHA256": "TLS_AES_128_GCM_SHA256", + "TLS_AES_256_GCM_SHA384": "TLS_AES_256_GCM_SHA384", + "TLS_CHACHA20_POLY1305_SHA256": "TLS_CHACHA20_POLY1305_SHA256", + "TLS_AES_128_CCM_SHA256": "TLS_AES_128_CCM_SHA256", + "TLS_AES_128_CCM_8_SHA256": "TLS_AES_128_CCM_8_SHA256", +} + +TLS_CIPHER_SUITES: Dict[str, Dict[str, str]] = { + "TLSv1": TLSV1_CIPHER_SUITES, + "TLSv1.1": TLSV1_1_CIPHER_SUITES, + "TLSv1.2": TLSV1_2_CIPHER_SUITES, + "TLSv1.3": TLSV1_3_CIPHER_SUITES, +} + +OPENSSL_CS_NAMES: Dict[str, ValuesView[str]] = { + "TLSv1": TLSV1_CIPHER_SUITES.values(), + "TLSv1.1": TLSV1_1_CIPHER_SUITES.values(), + "TLSv1.2": TLSV1_2_CIPHER_SUITES.values(), + "TLSv1.3": TLSV1_3_CIPHER_SUITES.values(), +} diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/conversion.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/conversion.py new file mode 100644 index 0000000..e229ae8 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/conversion.py @@ -0,0 +1,746 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Converting MySQL and Python types +""" + +import datetime +import math +import struct +import time + +from decimal import Decimal +from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union + +from .constants import CharacterSet, FieldFlag, FieldType +from .custom_types import HexLiteral +from .types import ( + DescriptionType, + MySQLConvertibleType, + MySQLProducedType, + PythonProducedType, + StrOrBytes, +) +from .utils import NUMERIC_TYPES + +CONVERT_ERROR = "Could not convert '{value}' to python {pytype}" + + +class MySQLConverterBase: + """Base class for conversion classes + + All class dealing with converting to and from MySQL data types must + be a subclass of this class. + """ + + def __init__( + self, + charset: Optional[str] = "utf8", + use_unicode: bool = True, + str_fallback: bool = False, + ) -> None: + self._character_set: CharacterSet = CharacterSet() + self.python_types: Optional[Tuple] = None + self.mysql_types: Optional[Tuple] = None + self.charset: Optional[str] = None + self.charset_id: int = 0 + self.set_charset(charset) + self.use_unicode: bool = use_unicode + self.str_fallback: bool = str_fallback + self._cache_field_types: Dict[ + int, + Callable[[bytes, DescriptionType], PythonProducedType], + ] = {} + + def set_charset( + self, charset: Optional[str], character_set: Optional[CharacterSet] = None + ) -> None: + """Set character set""" + if charset in ("utf8mb4", "utf8mb3"): + charset = "utf8" + if charset is not None: + self.charset = charset + else: + # default to utf8 + self.charset = "utf8" + + if character_set: + self._character_set = character_set + + self.charset_id = self._character_set.get_charset_info(self.charset)[0] + + def set_unicode(self, value: bool = True) -> None: + """Set whether to use Unicode""" + self.use_unicode = value + + def to_mysql( + self, value: MySQLConvertibleType + ) -> Union[MySQLConvertibleType, HexLiteral]: + """Convert Python data type to MySQL""" + type_name = value.__class__.__name__.lower() + try: + converted: MySQLConvertibleType = getattr(self, f"_{type_name}_to_mysql")( + value + ) + return converted + except AttributeError: + return value + + def to_python( + self, vtype: DescriptionType, value: Optional[bytes] + ) -> PythonProducedType: + """Convert MySQL data type to Python""" + + if (value == b"\x00" or value is None) and vtype[1] != FieldType.BIT: + # Don't go further when we hit a NULL value + return None + + if not self._cache_field_types: + self._cache_field_types = {} + for name, info in FieldType.desc.items(): + try: + self._cache_field_types[info[0]] = getattr( + self, f"_{name.lower()}_to_python" + ) + except AttributeError: + # We ignore field types which has no method + pass + if value is None: + return None + try: + return self._cache_field_types[vtype[1]](value, vtype) + except KeyError: + return value + + @staticmethod + def escape( + value: Any, + sql_mode: Optional[str] = None, # pylint: disable=unused-argument + ) -> Any: + """Escape buffer for sending to MySQL""" + return value + + @staticmethod + def quote(buf: Any) -> StrOrBytes: + """Quote buffer for sending to MySQL""" + return str(buf) + + +class MySQLConverter(MySQLConverterBase): + """Default conversion class for MySQL Connector/Python. + + o escape method: for escaping values send to MySQL + o quoting method: for quoting values send to MySQL in statements + o conversion mapping: maps Python and MySQL data types to + function for converting them. + + Whenever one needs to convert values differently, a converter_class + argument can be given while instantiating a new connection like + cnx.connect(converter_class=CustomMySQLConverterClass). + + """ + + def __init__( + self, + charset: Optional[str] = None, + use_unicode: bool = True, + str_fallback: bool = False, + ) -> None: + super().__init__(charset, use_unicode, str_fallback) + self._cache_field_types: Dict[ + int, + Callable[[bytes, DescriptionType], PythonProducedType], + ] = {} + + @staticmethod + def escape(value: Any, sql_mode: Optional[str] = None) -> Any: + """ + Escapes special characters as they are expected to by when MySQL + receives them. + As found in MySQL source mysys/charset.c + + Returns the value if not a string, or the escaped string. + """ + if isinstance(value, (bytes, bytearray)): + if sql_mode == "NO_BACKSLASH_ESCAPES": + return value.replace(b"'", b"''") + value = value.replace(b"\\", b"\\\\") + value = value.replace(b"\n", b"\\n") + value = value.replace(b"\r", b"\\r") + value = value.replace(b"\047", b"\134\047") # single quotes + value = value.replace(b"\042", b"\134\042") # double quotes + value = value.replace(b"\032", b"\134\032") # for Win32 + elif isinstance(value, str) and not isinstance(value, HexLiteral): + if sql_mode == "NO_BACKSLASH_ESCAPES": + return value.replace("'", "''") + value = value.replace("\\", "\\\\") + value = value.replace("\n", "\\n") + value = value.replace("\r", "\\r") + value = value.replace("\047", "\134\047") # single quotes + value = value.replace("\042", "\134\042") # double quotes + value = value.replace("\032", "\134\032") # for Win32 + return value + + @staticmethod + def quote(buf: Optional[Union[float, int, Decimal, HexLiteral, bytes]]) -> bytes: + """ + Quote the parameters for commands. General rules: + o numbers are returns as bytes using ascii codec + o None is returned as bytearray(b'NULL') + o Everything else is single quoted '' + + Returns a bytearray object. + """ + if isinstance(buf, NUMERIC_TYPES): + return str(buf).encode("ascii") + if isinstance(buf, type(None)): + return bytearray(b"NULL") + return bytearray(b"'" + buf + b"'") # type: ignore[operator] + + def to_mysql(self, value: MySQLConvertibleType) -> MySQLProducedType: + """Convert Python data type to MySQL""" + type_name = value.__class__.__name__.lower() + try: + converted: MySQLProducedType = getattr(self, f"_{type_name}_to_mysql")( + value + ) + return converted + except AttributeError: + if self.str_fallback: + return str(value).encode() + raise TypeError( + f"Python '{type_name}' cannot be converted to a MySQL type" + ) from None + + def to_python( + self, + vtype: DescriptionType, + value: Optional[bytes], + ) -> PythonProducedType: + """Convert MySQL data type to Python""" + # \x00 + if value == 0 and vtype[1] != FieldType.BIT: + # Don't go further when we hit a NULL value + return None + if value is None: + return None + + if not self._cache_field_types: + self._cache_field_types = {} + for name, info in FieldType.desc.items(): + try: + self._cache_field_types[info[0]] = getattr( + self, f"_{name.lower()}_to_python" + ) + except AttributeError: + # We ignore field types which has no method + pass + + try: + return self._cache_field_types[vtype[1]](value, vtype) + except KeyError: + # If one type is not defined, we just return the value as str + try: + return value.decode("utf-8") + except UnicodeDecodeError: + return value + except ValueError as err: + raise ValueError(f"{err} (field {vtype[0]})") from err + except TypeError as err: + raise TypeError(f"{err} (field {vtype[0]})") from err + + @staticmethod + def _int_to_mysql(value: int) -> int: + """Convert value to int""" + return int(value) + + @staticmethod + def _long_to_mysql(value: int) -> int: + """Convert value to int + + Note: There is no type "long" in Python 3 since integers are of unlimited size. + Since Python 2 is no longer supported, this method should be deprecated. + """ + return int(value) + + @staticmethod + def _float_to_mysql(value: float) -> Optional[float]: + """Convert value to float""" + if math.isnan(value): + return None + return float(value) + + def _str_to_mysql(self, value: str) -> Union[bytes, HexLiteral]: + """Convert value to string""" + return self._unicode_to_mysql(value) + + def _unicode_to_mysql(self, value: str) -> Union[bytes, HexLiteral]: + """Convert unicode""" + charset = self.charset + charset_id = self.charset_id + if charset == "binary": + charset = "utf8" + charset_id = self._character_set.get_charset_info(charset)[0] + encoded = value.encode(charset) + if charset_id in self._character_set.slash_charsets: + if b"\x5c" in encoded: + return HexLiteral(value, charset) + return encoded + + @staticmethod + def _bytes_to_mysql(value: bytes) -> bytes: + """Convert value to bytes""" + return value + + @staticmethod + def _bytearray_to_mysql(value: bytearray) -> bytes: + """Convert value to bytes""" + return bytes(value) + + @staticmethod + def _bool_to_mysql(value: bool) -> int: + """Convert value to boolean""" + return 1 if value else 0 + + @staticmethod + def _nonetype_to_mysql(value: None) -> None: # pylint: disable=unused-argument + """ + This would return what None would be in MySQL, but instead we + leave it None and return it right away. The actual conversion + from None to NULL happens in the quoting functionality. + + Return None. + """ + return None + + @staticmethod + def _datetime_to_mysql(value: datetime.datetime) -> bytes: + """ + Converts a datetime instance to a string suitable for MySQL. + The returned string has format: %Y-%m-%d %H:%M:%S[.%f] + + If the instance isn't a datetime.datetime type, it return None. + + Returns a bytes. + """ + if value.microsecond: + fmt = "{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}.{6:06d}" + return fmt.format( + value.year, + value.month, + value.day, + value.hour, + value.minute, + value.second, + value.microsecond, + ).encode("ascii") + + fmt = "{0:04d}-{1:02d}-{2:02d} {3:02d}:{4:02d}:{5:02d}" + return fmt.format( + value.year, + value.month, + value.day, + value.hour, + value.minute, + value.second, + ).encode("ascii") + + @staticmethod + def _date_to_mysql(value: datetime.date) -> bytes: + """ + Converts a date instance to a string suitable for MySQL. + The returned string has format: %Y-%m-%d + + If the instance isn't a datetime.date type, it return None. + + Returns a bytes. + """ + return f"{value.year:04d}-{value.month:02d}-{value.day:02d}".encode("ascii") + + @staticmethod + def _time_to_mysql(value: datetime.time) -> bytes: + """ + Converts a time instance to a string suitable for MySQL. + The returned string has format: %H:%M:%S[.%f] + + If the instance isn't a datetime.time type, it return None. + + Returns a bytes. + """ + if value.microsecond: + return value.strftime("%H:%M:%S.%f").encode("ascii") + return value.strftime("%H:%M:%S").encode("ascii") + + @staticmethod + def _struct_time_to_mysql(value: time.struct_time) -> bytes: + """ + Converts a time.struct_time sequence to a string suitable + for MySQL. + The returned string has format: %Y-%m-%d %H:%M:%S + + Returns a bytes or None when not valid. + """ + return time.strftime("%Y-%m-%d %H:%M:%S", value).encode("ascii") + + @staticmethod + def _timedelta_to_mysql(value: datetime.timedelta) -> bytes: + """ + Converts a timedelta instance to a string suitable for MySQL. + The returned string has format: %H:%M:%S + + Returns a bytes. + """ + seconds = abs(value.days * 86400 + value.seconds) + + if value.microseconds: + fmt = "{0:02d}:{1:02d}:{2:02d}.{3:06d}" + if value.days < 0: + mcs = 1000000 - value.microseconds + seconds -= 1 + else: + mcs = value.microseconds + else: + fmt = "{0:02d}:{1:02d}:{2:02d}" + + if value.days < 0: + fmt = "-" + fmt + + (hours, remainder) = divmod(seconds, 3600) + (mins, secs) = divmod(remainder, 60) + + if value.microseconds: + result = fmt.format(hours, mins, secs, mcs) + else: + result = fmt.format(hours, mins, secs) + + return result.encode("ascii") + + @staticmethod + def _decimal_to_mysql(value: Decimal) -> Optional[bytes]: + """ + Converts a decimal.Decimal instance to a string suitable for + MySQL. + + Returns a bytes or None when not valid. + """ + if isinstance(value, Decimal): + return str(value).encode("ascii") + + return None + + def row_to_python( + self, row: Tuple[bytes, ...], fields: List[DescriptionType] + ) -> Tuple[PythonProducedType, ...]: + """Convert a MySQL text result row to Python types + + The row argument is a sequence containing text result returned + by a MySQL server. Each value of the row is converted to the + using the field type information in the fields argument. + + Returns a tuple. + """ + i = 0 + result: List[PythonProducedType] = [None] * len(fields) + + if not self._cache_field_types: + self._cache_field_types = {} + for name, info in FieldType.desc.items(): + try: + self._cache_field_types[info[0]] = getattr( + self, f"_{name.lower()}_to_python" + ) + except AttributeError: + # We ignore field types which has no method + pass + + for field in fields: + field_type = field[1] + + if (row[i] == 0 and field_type != FieldType.BIT) or row[i] is None: + # Don't convert NULL value + i += 1 + continue + + try: + result[i] = self._cache_field_types[field_type](row[i], field) + except KeyError: + # If one type is not defined, we just return the value as str + try: + result[i] = row[i].decode("utf-8") + except UnicodeDecodeError: + result[i] = row[i] + except (ValueError, TypeError) as err: + # Item "ValueError" of "Union[ValueError, TypeError]" has no attribute "message" + err.message = f"{err} (field {field[0]})" # type: ignore[union-attr] + raise + + i += 1 + + return tuple(result) + + # pylint: disable=unused-argument + @staticmethod + def _float_to_python(value: bytes, desc: Optional[DescriptionType] = None) -> float: + """ + Returns value as float type. + """ + return float(value) + + _double_to_python = _float_to_python + + @staticmethod + def _int_to_python(value: bytes, desc: Optional[DescriptionType] = None) -> int: + """ + Returns value as int type. + """ + return int(value) + + _tiny_to_python = _int_to_python + _short_to_python = _int_to_python + _int24_to_python = _int_to_python + _long_to_python = _int_to_python + _longlong_to_python = _int_to_python + + def _decimal_to_python( + self, value: bytes, desc: Optional[DescriptionType] = None + ) -> Decimal: + """ + Returns value as a decimal.Decimal. + """ + val = value.decode(self.charset) + return Decimal(val) + + _newdecimal_to_python = _decimal_to_python + + @staticmethod + def _str(value: bytes, desc: Optional[DescriptionType] = None) -> str: + """ + Returns value as str type. + """ + return str(value) + + @staticmethod + def _bit_to_python(value: bytes, dsc: Optional[DescriptionType] = None) -> int: + """Returns BIT columntype as integer""" + int_val = value + if len(int_val) < 8: + int_val = b"\x00" * (8 - len(int_val)) + int_val + return int(struct.unpack(">Q", int_val)[0]) + + @staticmethod + def _date_to_python( + value: bytes, dsc: Optional[DescriptionType] = None + ) -> Optional[datetime.date]: + """Converts TIME column MySQL to a python datetime.datetime type. + + Raises ValueError if the value can not be converted. + + Returns DATE column type as datetime.date type. + """ + if isinstance(value, datetime.date): + return value + try: + parts = value.split(b"-") + if len(parts) != 3: + raise ValueError(f"invalid datetime format: {parts} len: {len(parts)}") + try: + return datetime.date(int(parts[0]), int(parts[1]), int(parts[2])) + except ValueError: + return None + except (IndexError, ValueError): + raise ValueError( + f"Could not convert {repr(value)} to python datetime.timedelta" + ) from None + + _NEWDATE_to_python = _date_to_python + + @staticmethod + def _time_to_python( + value: bytes, dsc: Optional[DescriptionType] = None + ) -> datetime.timedelta: + """Converts TIME column value to python datetime.time value type. + + Converts the TIME column MySQL type passed as bytes to a python + datetime.datetime type. + + Raises ValueError if the value can not be converted. + + Returns datetime.timedelta type. + """ + mcs: Optional[Union[int, bytes]] = None + try: + (hms, mcs) = value.split(b".") + mcs = int(mcs.ljust(6, b"0")) + except (TypeError, ValueError): + hms = value + mcs = 0 + try: + (hours, mins, secs) = [int(d) for d in hms.split(b":")] + if value[0] == 45 or value[0] == "-": + mins, secs, mcs = ( + -mins, + -secs, + -mcs, # pylint: disable=invalid-unary-operand-type + ) + return datetime.timedelta( + hours=hours, minutes=mins, seconds=secs, microseconds=mcs + ) + except (IndexError, TypeError, ValueError): + raise ValueError( + CONVERT_ERROR.format(value=value, pytype="datetime.timedelta") + ) from None + + @staticmethod + def _datetime_to_python( + value: bytes, dsc: Optional[DescriptionType] = None + ) -> Optional[datetime.datetime]: + """Converts DATETIME column value to python datetime.time value type. + + Converts the DATETIME column MySQL type passed as bytes to a python + datetime.datetime type. + + Returns: datetime.datetime type. + """ + if isinstance(value, datetime.datetime): + return value + datetime_val = None + mcs: Optional[Union[int, bytes]] = None + try: + (date_, time_) = value.split(b" ") + if len(time_) > 8: + (hms, mcs) = time_.split(b".") + mcs = int(mcs.ljust(6, b"0")) + else: + hms = time_ + mcs = 0 + dtval = ( + [int(i) for i in date_.split(b"-")] + + [int(i) for i in hms.split(b":")] + + [ + mcs, + ] + ) + if len(dtval) < 6: + raise ValueError(f"invalid datetime format: {dtval} len: {len(dtval)}") + # Note that by default MySQL accepts invalid timestamps + # (this is also backward compatibility). + # Traditionaly C/py returns None for this well formed but + # invalid datetime for python like '0000-00-00 HH:MM:SS'. + try: + datetime_val = datetime.datetime(*dtval) # type: ignore[arg-type] + except ValueError: + return None + except (IndexError, TypeError): + raise ValueError( + CONVERT_ERROR.format(value=value, pytype="datetime.timedelta") + ) from None + + return datetime_val + + _timestamp_to_python = _datetime_to_python + + @staticmethod + def _year_to_python(value: bytes, dsc: Optional[DescriptionType] = None) -> int: + """Returns YEAR column type as integer""" + try: + year = int(value) + except ValueError as err: + raise ValueError(f"Failed converting YEAR to int ({repr(value)})") from err + + return year + + def _set_to_python( + self, value: bytes, dsc: Optional[DescriptionType] = None + ) -> Set[str]: + """Returns SET column type as set + + Actually, MySQL protocol sees a SET as a string type field. So this + code isn't called directly, but used by STRING_to_python() method. + + Returns SET column type as a set. + """ + set_type = None + val = value.decode(self.charset) + if not val: + return set() + try: + set_type = set(val.split(",")) + except ValueError as err: + raise ValueError( + f"Could not convert set {repr(value)} to a sequence" + ) from err + return set_type + + def _string_to_python( + self, value: bytes, dsc: Optional[DescriptionType] = None + ) -> Union[StrOrBytes, Set[str]]: + """ + Note that a SET is a string too, but using the FieldFlag we can see + whether we have to split it. + + Returns string typed columns as string type. + """ + if self.charset == "binary": + return value + if dsc is not None: + if dsc[1] == FieldType.JSON and self.use_unicode: + return value.decode(self.charset) + if dsc[7] & FieldFlag.SET: + return self._set_to_python(value, dsc) + # 'binary' charset + if dsc[8] == 63: + return value + if isinstance(value, (bytes, bytearray)) and self.use_unicode: + try: + return value.decode(self.charset) + except UnicodeDecodeError: + return value + + return value + + _var_string_to_python = _string_to_python + _json_to_python = _string_to_python + + def _blob_to_python( + self, value: bytes, dsc: Optional[DescriptionType] = None + ) -> Union[StrOrBytes, Set[str]]: + """Convert BLOB data type to Python.""" + if dsc is not None: + if ( + dsc[7] & FieldFlag.BLOB + and dsc[7] & FieldFlag.BINARY + # 'binary' charset + and dsc[8] == 63 + ): + return bytes(value) + return self._string_to_python(value, dsc) + + _long_blob_to_python = _blob_to_python + _medium_blob_to_python = _blob_to_python + _tiny_blob_to_python = _blob_to_python + # pylint: enable=unused-argument diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor.py new file mode 100644 index 0000000..a835540 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor.py @@ -0,0 +1,1883 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="assignment,arg-type,attr-defined,index,override,call-overload" + +"""Cursor classes.""" +from __future__ import annotations + +import re +import unicodedata +import warnings +import weakref + +from collections import deque, namedtuple +from decimal import Decimal +from typing import ( + TYPE_CHECKING, + Any, + Deque, + Dict, + Generator, + Iterator, + List, + NoReturn, + Optional, + Sequence, + Tuple, + Union, +) + +from .abstracts import NAMED_TUPLE_CACHE, MySQLCursorAbstract +from .constants import ServerFlag +from .errors import ( + Error, + InterfaceError, + NotSupportedError, + ProgrammingError, + get_mysql_exception, +) +from .types import ( + DescriptionType, + EofPacketType, + MySQLConvertibleType, + ParamsDictType, + ParamsSequenceOrDictType, + ParamsSequenceType, + ResultType, + RowItemType, + RowType, + StrOrBytes, + WarningType, +) + +if TYPE_CHECKING: + from .connection import MySQLConnection + + +SQL_COMMENT = r"\/\*.*?\*\/" +RE_SQL_COMMENT = re.compile( + rf"""({SQL_COMMENT})|(["'`][^"'`]*?({SQL_COMMENT})[^"'`]*?["'`])""", + re.I | re.M | re.S, +) +RE_SQL_ON_DUPLICATE = re.compile( + r"""\s*ON\s+DUPLICATE\s+KEY(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$""", + re.I | re.M | re.S, +) +RE_SQL_INSERT_STMT = re.compile( + rf"({SQL_COMMENT}|\s)*INSERT({SQL_COMMENT}|\s)" + r"*(?:IGNORE\s+)?INTO\s+[`'\"]?.+[`'\"]?(?:\.[`'\"]?.+[`'\"]?)" + r"{0,2}\s+VALUES\s*(\(.+\)).*", + re.I | re.M | re.S, +) +RE_SQL_INSERT_VALUES = re.compile(r".*VALUES\s*(\(.+\)).*", re.I | re.M | re.S) +RE_PY_PARAM = re.compile(b"(%s)") +RE_PY_MAPPING_PARAM = re.compile( + rb""" + % + \((?P[^)]+)\) + (?P[diouxXeEfFgGcrs%]) + """, + re.X, +) +RE_SQL_SPLIT_STMTS = re.compile( + b""";(?=(?:[^"'`]*(?:"[^"]*"|'[^']*'|`[^`]*`))*[^"'`]*$)""" +) +RE_SQL_FIND_PARAM = re.compile(b"""%s(?=(?:[^"'`]*["'`][^"'`]*["'`])*[^"'`]*$)""") +RE_SQL_PYTHON_REPLACE_PARAM = re.compile(r"%\(.*?\)s") +RE_SQL_PYTHON_CAPTURE_PARAM_NAME = re.compile(r"%\((.*?)\)s") + +ERR_NO_RESULT_TO_FETCH = "No result set to fetch from" + +MAX_RESULTS = 4294967295 + + +def is_eol_comment(stmt: bytes) -> bool: + """Checks if statement is an end-of-line comment. + + Double-dash comment style requires the second dash to be + followed by at least one whitespace (Z) or control character (C) such + as a space, tab, newline, and so on. + + Hash comment simply requires start from `#` and nothing else. + + Args: + stmt: MySQL statement. + + Returns: + Whether or not the statement is an end-of-line comment. + + References: + [1]: https://dev.mysql.com/doc/refman/8.0/en/comments.html + """ + is_double_dash_comment = ( + len(stmt) >= 3 + and stmt.startswith(b"--") + and unicodedata.category(chr(stmt[2]))[0] in {"Z", "C"} + ) + is_hash_comment = len(stmt) >= 2 and stmt.startswith(b"#") + + return is_double_dash_comment or is_hash_comment + + +def parse_multi_statement_query(multi_stmt: bytes) -> Deque[bytes]: + """Parses a multi-statement query/operation. + + Parsing consists of removing empty (which includes just whitespaces and/or control + characters) statements and EOL (end-of-line) comments. + + However, there's a caveat, by rule, the last EOL comment found in the stream isn't + removed if and only if it's the last statement. + + Why? EOL comments do not generate results, however, when the last statement is an + EOL comment the server returns an empty result. So, in other to match statements + and results correctly we need to keep the last EOL comment statement. + + Args: + multi_stmt: Query representing multi-statement operations separated by semicolons. + + Returns: + A list of statements that aren't empty and don't contain leading + ASCII whitespaces. Also, they aren't EOL comments except + perhaps for the last one. + """ + executed_list: Deque[bytes] = deque(RE_SQL_SPLIT_STMTS.split(multi_stmt)) + stmt, num_stms = b"", len(executed_list) + while num_stms > 0: + num_stms -= 1 + stmt_next = executed_list.popleft().lstrip() + if stmt_next: + stmt = stmt_next + if not is_eol_comment(stmt): + executed_list.append(stmt) + + if is_eol_comment(stmt): + executed_list.append(stmt) + + return executed_list + + +class _ParamSubstitutor: + """ + Substitutes parameters into SQL statement. + """ + + def __init__(self, params: Sequence[bytes]) -> None: + self.params: Sequence[bytes] = params + self.index: int = 0 + + def __call__(self, matchobj: re.Match) -> bytes: + index = self.index + self.index += 1 + try: + return bytes(self.params[index]) + except IndexError: + raise ProgrammingError( + "Not enough parameters for the SQL statement" + ) from None + + @property + def remaining(self) -> int: + """Returns number of parameters remaining to be substituted""" + return len(self.params) - self.index + + +def _bytestr_format_dict(bytestr: bytes, value_dict: Dict[bytes, bytes]) -> bytes: + """ + >>> _bytestr_format_dict(b'%(a)s', {b'a': b'foobar'}) + b'foobar + >>> _bytestr_format_dict(b'%%(a)s', {b'a': b'foobar'}) + b'%%(a)s' + >>> _bytestr_format_dict(b'%%%(a)s', {b'a': b'foobar'}) + b'%%foobar' + >>> _bytestr_format_dict(b'%(x)s %(y)s', + ... {b'x': b'x=%(y)s', b'y': b'y=%(x)s'}) + b'x=%(y)s y=%(x)s' + """ + + def replace(matchobj: re.Match) -> bytes: + """Replace pattern.""" + value: Optional[bytes] = None + groups = matchobj.groupdict() + if groups["conversion_type"] == b"%": + value = b"%" + if groups["conversion_type"] == b"s": + key = groups["mapping_key"] + value = value_dict[key] + if value is None: + raise ValueError( + f"Unsupported conversion_type: {groups['conversion_type']}" + ) + return value + + stmt = RE_PY_MAPPING_PARAM.sub(replace, bytestr) + return stmt + + +class CursorBase(MySQLCursorAbstract): + """ + Base for defining MySQLCursor. This class is a skeleton and defines + methods and members as required for the Python Database API + Specification v2.0. + + It's better to inherite from MySQLCursor. + """ + + _raw: bool = False + + def __init__(self) -> None: + self._description: Optional[List[DescriptionType]] = None + self._rowcount: int = -1 + self.arraysize: int = 1 + super().__init__() + + def callproc( + self, procname: str, args: Sequence = () + ) -> Optional[Union[Dict[str, RowItemType], RowType]]: + """Calls a stored procedue with the given arguments + + The arguments will be set during this session, meaning + they will be called like ___arg where + is an enumeration (+1) of the arguments. + + Coding Example: + 1) Definining the Stored Routine in MySQL: + CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT) + BEGIN + SET pProd := pFac1 * pFac2; + END + + 2) Executing in Python: + args = (5,5,0) # 0 is to hold pprod + cursor.callproc('multiply', args) + print(cursor.fetchone()) + + Does not return a value, but a result set will be + available when the CALL-statement execute successfully. + Raises exceptions when something is wrong. + """ + + def close(self) -> None: + """Close the cursor.""" + + def execute( + self, + operation: str, + params: Union[ + Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType] + ] = (), + multi: bool = False, + ) -> Optional[Generator[MySQLCursorAbstract, None, None]]: + """Executes the given operation + + Executes the given operation substituting any markers with + the given parameters. + + For example, getting all rows where id is 5: + cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) + + The multi argument should be set to True when executing multiple + statements in one operation. If not set and multiple results are + found, an InterfaceError will be raised. + + If warnings where generated, and connection.get_warnings is True, then + self._warnings will be a list containing these warnings. + + Returns an iterator when multi is True, otherwise None. + """ + + def executemany( + self, + operation: str, + seq_params: Sequence[ + Union[Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType]] + ], + ) -> Optional[Generator[MySQLCursorAbstract, None, None]]: + """Execute the given operation multiple times + + The executemany() method will execute the operation iterating + over the list of parameters in seq_params. + + Example: Inserting 3 new employees and their phone number + + data = [ + ('Jane','555-001'), + ('Joe', '555-001'), + ('John', '555-003') + ] + stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')" + cursor.executemany(stmt, data) + + INSERT statements are optimized by batching the data, that is + using the MySQL multiple rows syntax. + + Results are discarded. If they are needed, consider looping over + data using the execute() method. + """ + + def fetchone(self) -> Optional[Union[RowType, Dict[str, RowItemType]]]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + + def fetchmany(self, size: int = 1) -> List[Union[RowType, Dict[str, RowItemType]]]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + + def fetchall(self) -> List[Union[RowType, Dict[str, RowItemType]]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + + def stored_results(self) -> Iterator[MySQLCursorAbstract]: + """Returns an iterator for stored results. + + This method returns an iterator over results which are stored when + callproc() is called. The iterator will provide `MySQLCursorBuffered` + instances. + + Examples: + ``` + >>> cursor.callproc('myproc') + () + >>> for result in cursor.stored_results(): + ... print result.fetchall() + ... + [(1,)] + [(2,)] + ``` + """ + + def nextset(self) -> NoReturn: + """Not Implemented.""" + + def setinputsizes(self, sizes: Any) -> NoReturn: + """Not Implemented.""" + + def setoutputsize(self, size: Any, column: Any = None) -> NoReturn: + """Not Implemented.""" + + def reset(self, free: bool = True) -> None: + """Resets the cursor to default""" + + @property + def description(self) -> Optional[List[DescriptionType]]: + """Returns description of columns in a result + + This property returns a list of tuples describing the columns in + in a result set. A tuple is described as follows:: + + (column_name, + type, + None, + None, + None, + None, + null_ok, + column_flags) # Addition to PEP-249 specs + + Returns a list of tuples. + """ + return self._description + + @property + def rowcount(self) -> int: + """Returns the number of rows produced or affected + + This property returns the number of rows produced by queries + such as a SELECT, or affected rows when executing DML statements + like INSERT or UPDATE. + + Note that for non-buffered cursors it is impossible to know the + number of rows produced before having fetched them all. For those, + the number of rows will be -1 right after execution, and + incremented when fetching rows. + + Returns an integer. + """ + return self._rowcount + + +class MySQLCursor(CursorBase): + """Default cursor for interacting with MySQL + + This cursor will execute statements and handle the result. It will + not automatically fetch all rows. + + MySQLCursor should be inherited whenever other functionallity is + required. An example would to change the fetch* member functions + to return dictionaries instead of lists of values. + + Implements the Python Database API Specification v2.0 (PEP-249) + """ + + def __init__(self, connection: Optional[MySQLConnection] = None) -> None: + CursorBase.__init__(self) + self._connection: Optional[MySQLConnection] = None + self._nextrow: Tuple[Optional[RowType], Optional[EofPacketType]] = ( + None, + None, + ) + self._binary: bool = False + + if connection is not None: + self._set_connection(connection) + + def __iter__(self) -> Iterator[RowType]: + """ + Iteration over the result set which calls self.fetchone() + and returns the next row. + """ + return iter(self.fetchone, None) + + def _set_connection(self, connection: MySQLConnection) -> None: + """Set the connection""" + try: + self._connection = weakref.proxy(connection) + self._connection.is_connected() + except (AttributeError, TypeError): + raise InterfaceError(errno=2048) from None + + def _reset_result(self) -> None: + """Reset the cursor to default""" + self._rowcount: int = -1 + self._nextrow = (None, None) + self._stored_results: List[MySQLCursor] = [] + self._warnings: Optional[List[WarningType]] = None + self._warning_count: int = 0 + self._description: Optional[List[DescriptionType]] = None + self._executed: Optional[bytes] = None + self._executed_list: List[bytes] = [] + self.reset() + + def _have_unread_result(self) -> bool: + """Check whether there is an unread result""" + try: + return self._connection.unread_result + except AttributeError: + return False + + def _check_executed(self) -> None: + """Check if the statement has been executed. + + Raises an error if the statement has not been executed. + """ + if self._executed is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + + def __next__(self) -> RowType: + """ + Used for iterating over the result set. Calles self.fetchone() + to get the next row. + """ + try: + row = self.fetchone() + except InterfaceError: + raise StopIteration from None + if not row: + raise StopIteration + return row + + def close(self) -> bool: + """Close the cursor + + Returns True when successful, otherwise False. + """ + if self._connection is None: + return False + + self._connection.handle_unread_result() + self._reset_result() + self._connection = None + + return True + + def _process_params_dict( + self, params: ParamsDictType + ) -> Dict[bytes, Union[bytes, Decimal]]: + """Process query parameters given as dictionary""" + res: Dict[bytes, Any] = {} + try: + sql_mode = self._connection.sql_mode + to_mysql = self._connection.converter.to_mysql + escape = self._connection.converter.escape + quote = self._connection.converter.quote + for key, value in params.items(): + conv = value + conv = to_mysql(conv) + conv = escape(conv, sql_mode) + if not isinstance(value, Decimal): + conv = quote(conv) + res[key.encode()] = conv + except Exception as err: + raise ProgrammingError( + f"Failed processing pyformat-parameters; {err}" + ) from err + return res + + def _process_params( + self, params: ParamsSequenceType + ) -> Tuple[Union[bytes, Decimal], ...]: + """Process query parameters.""" + res = params[:] + try: + sql_mode = self._connection.sql_mode + to_mysql = self._connection.converter.to_mysql + escape = self._connection.converter.escape + quote = self._connection.converter.quote + res = [to_mysql(value) for value in res] + res = [escape(value, sql_mode) for value in res] + res = [ + quote(value) if not isinstance(params[i], Decimal) else value + for i, value in enumerate(res) + ] + except Exception as err: + raise ProgrammingError( + f"Failed processing format-parameters; {err}" + ) from err + return tuple(res) + + def _handle_noresultset(self, res: ResultType) -> None: + """Handles result of execute() when there is no result set""" + try: + self._rowcount = res["affected_rows"] + self._last_insert_id = res["insert_id"] + self._warning_count = res["warning_count"] + except (KeyError, TypeError) as err: + raise ProgrammingError(f"Failed handling non-resultset; {err}") from None + + self._handle_warnings() + + def _handle_resultset(self) -> None: + """Handles result set + + This method handles the result set and is called after reading + and storing column information in _handle_result(). For non-buffering + cursors, this method is usually doing nothing. + """ + + def _handle_result(self, result: ResultType) -> None: + """ + Handle the result after a command was send. The result can be either + an OK-packet or a dictionary containing column/eof information. + + Raises InterfaceError when result is not a dict() or result is + invalid. + """ + if not isinstance(result, dict): + raise InterfaceError("Result was not a dict()") + + if "columns" in result: + # Weak test, must be column/eof information + self._description = result["columns"] + self._connection.unread_result = True + self._handle_resultset() + elif "affected_rows" in result: + # Weak test, must be an OK-packet + self._connection.unread_result = False + self._handle_noresultset(result) + else: + raise InterfaceError("Invalid result") + + def _execute_iter( + self, query_iter: Generator[ResultType, None, None] + ) -> Generator[MySQLCursor, None, None]: + """Generator returns MySQLCursor objects for multiple statements + + This method is only used when multiple statements are executed + by the `cursor.execute(multi_stmt_query, multi=True)` method. + + It matches the given `query_iter` (result of `MySQLConnection.cmd_query_iter()`) + and the list of statements that were executed. + + How does this method work? To properly map each statement (stmt) to a result, + the following facts must be considered: + + 1. Read operations such as `SELECT` produce a non-empty result + (calling `next(query_iter)` gets a result that includes at least one column). + 2. Write operatios such as `INSERT` produce an empty result + (calling `next(query_iter)` gets a result with no columns - aka empty). + 3. End-of-line (EOL) comments do not produce a result, unless is the last stmt + in which case produces an empty result. + 4. Calling procedures such as `CALL my_proc` produce a sequence `(1)*0` which + means it may produce zero or more non-empty results followed by just one + empty result. In other words, a callproc stmt always terminates with an + empty result. E.g., `my_proc` includes an update + select + select + update, + then the result sequence will be `110` - note how the write ops results get + annulated, just the read ops results are produced. Other examples: + * insert + insert -> 0 + * select + select + insert + select -> 1110 + * select -> 10 + Observe how 0 indicates the end of the result sequence. This property is + vital to know what result corresponds to what callproc stmt. + + In this regard, the implementation is composed of: + 1. Parsing: the multi-statement is broken down into single statements, and then + for each of these, leading white spaces are removed (including + jumping line, vertical line, tab, etc.). Also, EOL comments are removed from + the stream, except when the comment is the last statement of the + multi-statement string. + 2. Mapping: the facts described above as used as "game rules" to properly match + statements and results. In case, if we run out of statements before running out + of results we use a sentinel named "stmt_overflow!" to indicate that the mapping + went wrong. + + Acronyms + 1: a non-empty result + 2: an empty result + """ + executed_list = parse_multi_statement_query(multi_stmt=self._executed) + self._executed = None + stmt = executed_list.popleft() if executed_list else b"stmt_overflow!" + for result in query_iter: + self._reset_result() + self._handle_result(result) + + if is_eol_comment(stmt): + continue + + self._executed = stmt.rstrip() + yield self + + if not stmt.upper().startswith(b"CALL") or "columns" not in result: + stmt = executed_list.popleft() if executed_list else b"stmt_overflow!" + + def execute( + self, + operation: StrOrBytes, + params: Optional[ParamsSequenceOrDictType] = None, + multi: bool = False, + ) -> Optional[Generator[MySQLCursor, None, None]]: + """Executes the given operation + + Executes the given operation substituting any markers with + the given parameters. + + For example, getting all rows where id is 5: + cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) + + The multi argument should be set to True when executing multiple + statements in one operation. If not set and multiple results are + found, an InterfaceError will be raised. + + If warnings where generated, and connection.get_warnings is True, then + self._warnings will be a list containing these warnings. + + Returns an iterator when multi is True, otherwise None. + """ + if not operation: + return None + + try: + if not self._connection: + raise ProgrammingError + except (ProgrammingError, ReferenceError) as err: + raise ProgrammingError("Cursor is not connected") from err + + self._connection.handle_unread_result() + + self._reset_result() + stmt: StrOrBytes = "" + + try: + if not isinstance(operation, (bytes, bytearray)): + stmt = operation.encode(self._connection.python_charset) + else: + stmt = operation + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + + if params: + if isinstance(params, dict): + stmt = _bytestr_format_dict(stmt, self._process_params_dict(params)) + elif isinstance(params, (list, tuple)): + psub = _ParamSubstitutor(self._process_params(params)) + stmt = RE_PY_PARAM.sub(psub, stmt) + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + else: + raise ProgrammingError( + f"Could not process parameters: {type(params).__name__}({params})," + " it must be of type list, tuple or dict" + ) + + self._executed = stmt + if multi: + self._executed_list = [] + return self._execute_iter(self._connection.cmd_query_iter(stmt)) + + try: + self._handle_result(self._connection.cmd_query(stmt)) + except InterfaceError as err: + if self._connection.have_next_result: + raise InterfaceError( + "Use multi=True when executing multiple statements" + ) from err + raise + return None + + def _batch_insert( + self, operation: str, seq_params: Sequence[ParamsSequenceOrDictType] + ) -> Optional[bytes]: + """Implements multi row insert""" + + def remove_comments(match: re.Match) -> str: + """Remove comments from INSERT statements. + + This function is used while removing comments from INSERT + statements. If the matched string is a comment not enclosed + by quotes, it returns an empty string, else the string itself. + """ + if match.group(1): + return "" + return match.group(2) + + tmp = re.sub( + RE_SQL_ON_DUPLICATE, + "", + re.sub(RE_SQL_COMMENT, remove_comments, operation), + ) + + matches = re.search(RE_SQL_INSERT_VALUES, tmp) + if not matches: + raise InterfaceError( + "Failed rewriting statement for multi-row INSERT. Check SQL syntax" + ) + fmt = matches.group(1).encode(self._connection.python_charset) + values = [] + + try: + stmt = operation.encode(self._connection.python_charset) + for params in seq_params: + tmp = fmt + if isinstance(params, dict): + tmp = _bytestr_format_dict(tmp, self._process_params_dict(params)) + else: + psub = _ParamSubstitutor(self._process_params(params)) + tmp = RE_PY_PARAM.sub(psub, tmp) + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + values.append(tmp) + if fmt in stmt: + stmt = stmt.replace(fmt, b",".join(values), 1) + self._executed = stmt + return stmt + return None + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + except Error: + raise + except Exception as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + + def executemany( + self, operation: str, seq_params: Sequence[ParamsSequenceOrDictType] + ) -> Optional[Generator[MySQLCursor, None, None]]: + """Execute the given operation multiple times + + The executemany() method will execute the operation iterating + over the list of parameters in seq_params. + + Example: Inserting 3 new employees and their phone number + + data = [ + ('Jane','555-001'), + ('Joe', '555-001'), + ('John', '555-003') + ] + stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s)" + cursor.executemany(stmt, data) + + INSERT statements are optimized by batching the data, that is + using the MySQL multiple rows syntax. + + Results are discarded. If they are needed, consider looping over + data using the execute() method. + """ + if not operation or not seq_params: + return None + self._connection.handle_unread_result() + + try: + _ = iter(seq_params) + except TypeError as err: + raise ProgrammingError("Parameters for query must be an Iterable") from err + + # Optimize INSERTs by batching them + if re.match(RE_SQL_INSERT_STMT, operation): + if not seq_params: + self._rowcount = 0 + return None + stmt = self._batch_insert(operation, seq_params) + if stmt is not None: + self._executed = stmt + return self.execute(stmt) + + rowcnt = 0 + try: + for params in seq_params: + self.execute(operation, params) + if self.with_rows and self._have_unread_result(): + self.fetchall() + rowcnt += self._rowcount + except (ValueError, TypeError) as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + self._rowcount = rowcnt + return None + + def stored_results(self) -> Iterator[MySQLCursor]: + """Returns an iterator for stored results + + This method returns an iterator over results which are stored when + callproc() is called. The iterator will provide MySQLCursorBuffered + instances. + + Returns a iterator. + """ + return iter(self._stored_results) + + def callproc( + self, + procname: str, + args: Sequence = (), + ) -> Optional[Union[Dict[str, RowItemType], RowType]]: + """Calls a stored procedure with the given arguments + + The arguments will be set during this session, meaning + they will be called like ___arg where + is an enumeration (+1) of the arguments. + + Coding Example: + 1) Defining the Stored Routine in MySQL: + CREATE PROCEDURE multiply(IN pFac1 INT, IN pFac2 INT, OUT pProd INT) + BEGIN + SET pProd := pFac1 * pFac2; + END + + 2) Executing in Python: + args = (5, 5, 0) # 0 is to hold pprod + cursor.callproc('multiply', args) + print(cursor.fetchone()) + + For OUT and INOUT parameters the user should provide the + type of the parameter as well. The argument should be a + tuple with first item as the value of the parameter to pass + and second argument the type of the argument. + + In the above example, one can call callproc method like: + args = (5, 5, (0, 'INT')) + cursor.callproc('multiply', args) + + The type of the argument given in the tuple will be used by + the MySQL CAST function to convert the values in the corresponding + MySQL type (See CAST in MySQL Reference for more information) + + Does not return a value, but a result set will be + available when the CALL-statement execute successfully. + Raises exceptions when something is wrong. + """ + if not procname or not isinstance(procname, str): + raise ValueError("procname must be a string") + + if not isinstance(args, (tuple, list)): + raise ValueError("args must be a sequence") + + argfmt = "@_{name}_arg{index}" + self._stored_results = [] + + results = [] + try: + argnames = [] + argtypes = [] + + # MySQL itself does support calling procedures with their full + # name .. It's necessary to split + # by '.' and grab the procedure name from procname. + procname_abs = procname.split(".")[-1] + if args: + argvalues = [] + for idx, arg in enumerate(args): + argname = argfmt.format(name=procname_abs, index=idx + 1) + argnames.append(argname) + if isinstance(arg, tuple): + argtypes.append(f" CAST({argname} AS {arg[1]})") + argvalues.append(arg[0]) + else: + argtypes.append(argname) + argvalues.append(arg) + + placeholders = ",".join(f"{arg}=%s" for arg in argnames) + self.execute(f"SET {placeholders}", argvalues) + + call = f"CALL {procname}({','.join(argnames)})" + + # We disable consuming results temporary to make sure we + # getting all results + can_consume_results = self._connection.can_consume_results + for result in self._connection.cmd_query_iter(call): + self._connection.can_consume_results = False + if isinstance(self, (MySQLCursorDict, MySQLCursorBufferedDict)): + cursor_class = MySQLCursorBufferedDict + elif isinstance( + self, + (MySQLCursorNamedTuple, MySQLCursorBufferedNamedTuple), + ): + cursor_class = MySQLCursorBufferedNamedTuple + elif self._raw: + cursor_class = MySQLCursorBufferedRaw + else: + cursor_class = MySQLCursorBuffered + # pylint: disable=protected-access + cur = cursor_class(self._connection.get_self()) + cur._executed = f"(a result of {call})" + cur._handle_result(result) + # pylint: enable=protected-access + if cur.warnings is not None: + self._warnings = cur.warnings + if "columns" in result: + results.append(cur) + self._connection.can_consume_results = can_consume_results + + if argnames: + # Create names aliases to be compatible with namedtuples + args = [ + f"{name} AS {alias}" + for name, alias in zip( + argtypes, [arg.lstrip("@_") for arg in argnames] + ) + ] + select = f"SELECT {','.join(args)}" + self.execute(select) + self._stored_results = results + return self.fetchone() + + self._stored_results = results + return tuple() + + except Error: + raise + except Exception as err: + raise InterfaceError(f"Failed calling stored routine; {err}") from None + + def getlastrowid(self) -> Optional[int]: + """Returns the value generated for an AUTO_INCREMENT column + + Returns the value generated for an AUTO_INCREMENT column by + the previous INSERT or UPDATE statement. + + Returns a long value or None. + """ + return self._last_insert_id + + def _fetch_warnings(self) -> Optional[List[WarningType]]: + """ + Fetch warnings doing a SHOW WARNINGS. Can be called after getting + the result. + + Returns a result set or None when there were no warnings. + """ + res = [] + try: + cur = self._connection.cursor(raw=False) + cur.execute("SHOW WARNINGS") + res = cur.fetchall() + cur.close() + except Exception as err: + raise InterfaceError(f"Failed getting warnings; {err}") from None + + if res: + return res # type: ignore[return-value] + + return None + + def _handle_warnings(self) -> None: + """Handle possible warnings after all results are consumed. + + Raises: + Error: Also raises exceptions if raise_on_warnings is set. + """ + if self._connection.get_warnings and self._warning_count: + self._warnings = self._fetch_warnings() + + if not self._warnings: + return + + err = get_mysql_exception( + self._warnings[0][1], + self._warnings[0][2], + warning=not self._connection.raise_on_warnings, + ) + + if self._connection.raise_on_warnings: + raise err + + warnings.warn(err, stacklevel=4) + + def _handle_eof(self, eof: EofPacketType) -> None: + """Handle EOF packet""" + self._connection.unread_result = False + self._nextrow = (None, None) + self._warning_count = eof["warning_count"] + self._handle_warnings() + + def _fetch_row(self, raw: bool = False) -> Optional[RowType]: + """Returns the next row in the result set + + Returns a tuple or None. + """ + if not self._have_unread_result(): + return None + row = None + + if self._nextrow == (None, None): + (row, eof) = self._connection.get_row( + binary=self._binary, columns=self.description, raw=raw + ) + else: + (row, eof) = self._nextrow + + if row: + self._nextrow = self._connection.get_row( + binary=self._binary, columns=self.description, raw=raw + ) + eof = self._nextrow[1] + if eof is not None: + self._handle_eof(eof) + if self._rowcount == -1: + self._rowcount = 1 + else: + self._rowcount += 1 + if eof: + self._handle_eof(eof) + + return row + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row() + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._have_unread_result(): + cnt -= 1 + row = self.fetchone() + if row: + res.append(row) + return res + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + if not self._have_unread_result(): + return [] + + (rows, eof) = self._connection.get_rows() + if self._nextrow[0]: + rows.insert(0, self._nextrow[0]) + + self._handle_eof(eof) + rowcount = len(rows) + if rowcount >= 0 and self._rowcount == -1: + self._rowcount = 0 + self._rowcount += rowcount + return rows + + @property + def column_names(self) -> Tuple[str, ...]: + """Returns column names + + This property returns the columns names as a tuple. + + Returns a tuple. + """ + if not self.description: + return tuple() + return tuple(d[0] for d in self.description) + + @property + def statement(self) -> Optional[str]: + """Returns the executed statement + + This property returns the executed statement. When multiple + statements were executed, the current statement in the iterator + will be returned. + """ + if self._executed is None: + return None + try: + return self._executed.strip().decode("utf-8") + except (AttributeError, UnicodeDecodeError): + return self._executed.strip() # type: ignore[return-value] + + @property + def with_rows(self) -> bool: + """Returns whether the cursor could have rows returned + + This property returns True when column descriptions are available + and possibly also rows, which will need to be fetched. + + Returns True or False. + """ + if not self.description: + return False + return True + + def __str__(self) -> str: + fmt = "{class_name}: {stmt}" + if self._executed: + try: + executed = self._executed.decode("utf-8") + except AttributeError: + executed = self._executed + if len(executed) > 40: + executed = executed[:40] + ".." + else: + executed = "(Nothing executed yet)" + return fmt.format(class_name=self.__class__.__name__, stmt=executed) + + +class MySQLCursorBuffered(MySQLCursor): + """Cursor which fetches rows within execute()""" + + def __init__(self, connection: Optional[MySQLConnection] = None) -> None: + super().__init__(connection) + self._rows: Optional[List[RowType]] = None + self._next_row: int = 0 + + def _handle_resultset(self) -> None: + (self._rows, eof) = self._connection.get_rows() + self._rowcount = len(self._rows) + self._handle_eof(eof) + self._next_row = 0 + try: + self._connection.unread_result = False + except AttributeError: + pass + + def reset(self, free: bool = True) -> None: + self._rows = None + + def _fetch_row(self, raw: bool = False) -> Optional[RowType]: + row = None + try: + row = self._rows[self._next_row] + except (IndexError, TypeError): + return None + self._next_row += 1 + return row + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row() + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + res = self._rows[self._next_row :] + self._next_row = len(self._rows) + return res + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0: + cnt -= 1 + row = self.fetchone() + if row: + res.append(row) + + return res + + @property + def with_rows(self) -> bool: + return self._rows is not None + + +class MySQLCursorRaw(MySQLCursor): + """ + Skips conversion from MySQL datatypes to Python types when fetching rows. + """ + + _raw: bool = True + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row(raw=True) + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + if not self._have_unread_result(): + return [] + (rows, eof) = self._connection.get_rows(raw=True) + if self._nextrow[0]: + rows.insert(0, self._nextrow[0]) + self._handle_eof(eof) + rowcount = len(rows) + if rowcount >= 0 and self._rowcount == -1: + self._rowcount = 0 + self._rowcount += rowcount + return rows + + +class MySQLCursorBufferedRaw(MySQLCursorBuffered): + """ + Cursor which skips conversion from MySQL datatypes to Python types when + fetching rows and fetches rows within execute(). + """ + + _raw: bool = True + + def _handle_resultset(self) -> None: + (self._rows, eof) = self._connection.get_rows(raw=self._raw) + self._rowcount = len(self._rows) + self._handle_eof(eof) + self._next_row = 0 + try: + self._connection.unread_result = False + except AttributeError: + pass + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row() + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + return list(self._rows[self._next_row :]) + + @property + def with_rows(self) -> bool: + return self._rows is not None + + +class MySQLCursorPrepared(MySQLCursor): + """Cursor using MySQL Prepared Statements""" + + def __init__(self, connection: Optional[MySQLConnection] = None): + super().__init__(connection) + self._rows: Optional[List[RowType]] = None + self._next_row: int = 0 + self._prepared: Optional[Dict[str, Union[int, List[DescriptionType]]]] = None + self._binary: bool = True + self._have_result: Optional[bool] = None + self._last_row_sent: bool = False + self._cursor_exists: bool = False + + def reset(self, free: bool = True) -> None: + if self._prepared: + try: + self._connection.cmd_stmt_close(self._prepared["statement_id"]) + except Error: + # We tried to deallocate, but it's OK when we fail. + pass + self._prepared = None + self._last_row_sent = False + self._cursor_exists = False + + def _handle_noresultset(self, res: ResultType) -> None: + self._handle_server_status(res.get("status_flag", res.get("server_status", 0))) + super()._handle_noresultset(res) + + def _handle_server_status(self, flags: int) -> None: + """Check for SERVER_STATUS_CURSOR_EXISTS and + SERVER_STATUS_LAST_ROW_SENT flags set by the server. + """ + self._cursor_exists = flags & ServerFlag.STATUS_CURSOR_EXISTS != 0 + self._last_row_sent = flags & ServerFlag.STATUS_LAST_ROW_SENT != 0 + + def _handle_eof(self, eof: EofPacketType) -> None: + self._handle_server_status(eof.get("status_flag", eof.get("server_status", 0))) + super()._handle_eof(eof) + + def callproc(self, procname: Any, args: Any = ()) -> NoReturn: + """Calls a stored procedue + + Not supported with MySQLCursorPrepared. + """ + raise NotSupportedError() + + def close(self) -> None: + """Close the cursor + + This method will try to deallocate the prepared statement and close + the cursor. + """ + self.reset() + super().close() + + def _row_to_python(self, rowdata: Any, desc: Any = None) -> Any: + """Convert row data from MySQL to Python types + + The conversion is done while reading binary data in the + protocol module. + """ + + def _handle_result(self, result: ResultType) -> None: + """Handle result after execution""" + if isinstance(result, dict): + self._connection.unread_result = False + self._have_result = False + self._handle_noresultset(result) + else: + self._description = result[1] + self._connection.unread_result = True + self._have_result = True + + if "status_flag" in result[2]: # type: ignore[operator] + self._handle_server_status(result[2]["status_flag"]) + elif "server_status" in result[2]: # type: ignore[operator] + self._handle_server_status(result[2]["server_status"]) + + def execute( + self, + operation: StrOrBytes, + params: Optional[ParamsSequenceOrDictType] = None, + multi: bool = False, + ) -> None: # multi is unused + """Prepare and execute a MySQL Prepared Statement + + This method will prepare the given operation and execute it using + the optionally given parameters. + + If the cursor instance already had a prepared statement, it is + first closed. + + Note: argument "multi" is unused. + """ + charset = self._connection.charset + if charset == "utf8mb4": + charset = "utf8" + + if not isinstance(operation, str): + try: + operation = operation.decode(charset) + except UnicodeDecodeError as err: + raise ProgrammingError(str(err)) from err + + if isinstance(params, dict): + replacement_keys = re.findall(RE_SQL_PYTHON_CAPTURE_PARAM_NAME, operation) + try: + # Replace params dict with params tuple in correct order. + params = tuple(params[key] for key in replacement_keys) + except KeyError as err: + raise ProgrammingError( + "Not all placeholders were found in the parameters dict" + ) from err + # Convert %(name)s to ? before sending it to MySQL + operation = re.sub(RE_SQL_PYTHON_REPLACE_PARAM, "?", operation) + + if operation is not self._executed: + if self._prepared: + self._connection.cmd_stmt_close(self._prepared["statement_id"]) + self._executed = operation + + try: + operation = operation.encode(charset) + except UnicodeEncodeError as err: + raise ProgrammingError(str(err)) from err + + if b"%s" in operation: + # Convert %s to ? before sending it to MySQL + operation = re.sub(RE_SQL_FIND_PARAM, b"?", operation) + + try: + self._prepared = self._connection.cmd_stmt_prepare(operation) + except Error: + self._executed = None + raise + + self._connection.cmd_stmt_reset(self._prepared["statement_id"]) + + if self._prepared["parameters"] and not params: + return + if params: + if not isinstance(params, (tuple, list)): + raise ProgrammingError( + errno=1210, + msg=f"Incorrect type of argument: {type(params).__name__}({params})" + ", it must be of type tuple or list the argument given to " + "the prepared statement", + ) + if len(self._prepared["parameters"]) != len(params): + raise ProgrammingError( + errno=1210, + msg="Incorrect number of arguments executing prepared statement", + ) + + if params is None: + params = () + res = self._connection.cmd_stmt_execute( + self._prepared["statement_id"], + data=params, + parameters=self._prepared["parameters"], + ) + self._handle_result(res) + + def executemany( + self, + operation: str, + seq_params: Sequence[ParamsSequenceType], + ) -> None: + """Prepare and execute a MySQL Prepared Statement many times + + This method will prepare the given operation and execute with each + tuple found the list seq_params. + + If the cursor instance already had a prepared statement, it is + first closed. + + executemany() simply calls execute(). + """ + rowcnt = 0 + try: + for params in seq_params: + self.execute(operation, params) + if self.with_rows and self._have_unread_result(): + self.fetchall() + rowcnt += self._rowcount + except (ValueError, TypeError) as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + self._rowcount = rowcnt + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + if self._cursor_exists: + self._connection.cmd_stmt_fetch(self._prepared["statement_id"]) + return self._fetch_row() or None + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._have_unread_result(): + cnt -= 1 + row = self._fetch_row() + if row: + res.append(row) + return res + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + rows = [] + if self._nextrow[0]: + rows.append(self._nextrow[0]) + while self._have_unread_result(): + if self._cursor_exists: + self._connection.cmd_stmt_fetch( + self._prepared["statement_id"], MAX_RESULTS + ) + (tmp, eof) = self._connection.get_rows( + binary=self._binary, columns=self.description + ) + rows.extend(tmp) + self._handle_eof(eof) + self._rowcount = len(rows) + return rows + + +class MySQLCursorDict(MySQLCursor): + """ + Cursor fetching rows as dictionaries. + + The fetch methods of this class will return dictionaries instead of tuples. + Each row is a dictionary that looks like: + row = { + "col1": value1, + "col2": value2 + } + """ + + def _row_to_python( + self, + rowdata: RowType, + desc: Optional[List[DescriptionType]] = None, # pylint: disable=unused-argument + ) -> Optional[Dict[str, RowItemType]]: + """Convert a MySQL text result row to Python types + + Returns a dictionary. + """ + return dict(zip(self.column_names, rowdata)) if rowdata else None + + def fetchone(self) -> Optional[Dict[str, RowItemType]]: + """Return next row of a query result set. + + Returns: + dict or None: A dict from query result set. + """ + return self._row_to_python(super().fetchone(), self.description) + + def fetchall(self) -> List[Optional[Dict[str, RowItemType]]]: + """Return all rows of a query result set. + + Returns: + list: A list of dictionaries with all rows of a query + result set where column names are used as keys. + """ + return [ + self._row_to_python(row, self.description) + for row in super().fetchall() + if row + ] + + +class MySQLCursorNamedTuple(MySQLCursor): + """ + Cursor fetching rows as named tuple. + + The fetch methods of this class will return namedtuples instead of tuples. + Each row is returned as a namedtuple and the values can be accessed as: + row.col1, row.col2 + """ + + def _row_to_python( + self, + rowdata: RowType, + desc: Optional[List[DescriptionType]] = None, # pylint: disable=unused-argument + ) -> Optional[RowType]: + """Convert a MySQL text result row to Python types + + Returns a named tuple. + """ + row = rowdata + + if row: + columns = tuple(self.column_names) + try: + named_tuple = NAMED_TUPLE_CACHE[columns] + except KeyError: + named_tuple = namedtuple("Row", columns) # type:ignore[no-redef, misc] + NAMED_TUPLE_CACHE[columns] = named_tuple + return named_tuple(*row) + return None + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + row = super().fetchone() + if not row: + return None + return ( + self._row_to_python(row, self.description) + if hasattr(self._connection, "converter") + else row + ) + + def fetchall(self) -> List[Optional[RowType]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + return [ + self._row_to_python(row, self.description) + for row in super().fetchall() + if row + ] + + +class MySQLCursorBufferedDict(MySQLCursorDict, MySQLCursorBuffered): + """ + Buffered Cursor fetching rows as dictionaries. + """ + + def fetchone(self) -> Optional[Dict[str, RowItemType]]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + row = self._fetch_row() + if row: + return self._row_to_python(row, self.description) + return None + + def fetchall(self) -> List[Optional[Dict[str, RowItemType]]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + for row in self._rows[self._next_row :]: + res.append(self._row_to_python(row, self.description)) + self._next_row = len(self._rows) + return res + + +class MySQLCursorBufferedNamedTuple(MySQLCursorNamedTuple, MySQLCursorBuffered): + """ + Buffered Cursor fetching rows as named tuple. + """ + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + row = self._fetch_row() + if row: + return self._row_to_python(row, self.description) + return None + + def fetchall(self) -> List[Optional[RowType]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + if self._executed is None or self._rows is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + res = [] + for row in self._rows[self._next_row :]: + res.append(self._row_to_python(row, self.description)) + self._next_row = len(self._rows) + return res + + +class MySQLCursorPreparedDict(MySQLCursorDict, MySQLCursorPrepared): # type: ignore[misc] + """ + This class is a blend of features from MySQLCursorDict and MySQLCursorPrepared + + Multiple inheritance in python is allowed but care must be taken + when assuming methods resolution. In the case of multiple + inheritance, a given attribute is first searched in the current + class if it's not found then it's searched in the parent classes. + The parent classes are searched in a left-right fashion and each + class is searched once. + Based on python's attribute resolution, in this case, attributes + are searched as follows: + 1. MySQLCursorPreparedDict (current class) + 2. MySQLCursorDict (left parent class) + 3. MySQLCursorPrepared (right parent class) + 4. MySQLCursor (base class) + """ + + def fetchmany(self, size: Optional[int] = None) -> List[Dict[str, RowItemType]]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set represented + as a list of dictionaries where column names are used as keys. + """ + return [ + self._row_to_python(row, self.description) + for row in super().fetchmany(size=size) + if row + ] + + +class MySQLCursorPreparedNamedTuple(MySQLCursorNamedTuple, MySQLCursorPrepared): + """ + This class is a blend of features from MySQLCursorNamedTuple and MySQLCursorPrepared + """ + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set represented + as a list of named tuples where column names are used as names. + """ + return [ + self._row_to_python(row, self.description) + for row in super().fetchmany(size=size) + if row + ] + + +class MySQLCursorPreparedRaw(MySQLCursorPrepared): + """ + This class is a blend of features from MySQLCursorRaw and MySQLCursorPrepared + """ + + _raw: bool = True + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + if self._cursor_exists: + self._connection.cmd_stmt_fetch(self._prepared["statement_id"]) + return self._fetch_row(raw=self._raw) or None + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._have_unread_result(): + cnt -= 1 + row = self._fetch_row(raw=self._raw) + if row: + res.append(row) + return res + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + rows = [] + if self._nextrow[0]: + rows.append(self._nextrow[0]) + while self._have_unread_result(): + if self._cursor_exists: + self._connection.cmd_stmt_fetch( + self._prepared["statement_id"], MAX_RESULTS + ) + (tmp, eof) = self._connection.get_rows( + raw=self._raw, binary=self._binary, columns=self.description + ) + rows.extend(tmp) + self._handle_eof(eof) + self._rowcount = len(rows) + return rows diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor_cext.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor_cext.py new file mode 100644 index 0000000..734fa5b --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/cursor_cext.py @@ -0,0 +1,1334 @@ +# Copyright (c) 2014, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="assignment,override,union-attr" + +"""Cursor classes using the C Extension.""" +from __future__ import annotations + +import re +import warnings +import weakref + +from collections import namedtuple +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generator, + Iterator, + List, + NoReturn, + Optional, + Sequence, + Tuple, + Union, +) + +# pylint: disable=import-error,no-name-in-module +from _mysql_connector import MySQLInterfaceError + +from .types import ( + CextEofPacketType, + CextResultType, + DescriptionType, + ParamsSequenceOrDictType, + ParamsSequenceType, + RowItemType, + RowType, + StrOrBytes, + WarningType, +) + +# pylint: enable=import-error,no-name-in-module +# isort: split + +from .abstracts import ( + NAMED_TUPLE_CACHE, + CMySQLPrepStmt, + MySQLConnectionAbstract, + MySQLCursorAbstract, +) +from .cursor import ( + RE_PY_PARAM, + RE_SQL_COMMENT, + RE_SQL_FIND_PARAM, + RE_SQL_INSERT_STMT, + RE_SQL_INSERT_VALUES, + RE_SQL_ON_DUPLICATE, + RE_SQL_PYTHON_CAPTURE_PARAM_NAME, + RE_SQL_PYTHON_REPLACE_PARAM, + is_eol_comment, + parse_multi_statement_query, +) +from .errorcode import CR_NO_RESULT_SET +from .errors import ( + Error, + InterfaceError, + NotSupportedError, + ProgrammingError, + get_mysql_exception, +) + +if TYPE_CHECKING: + from .connection_cext import CMySQLConnection + +ERR_NO_RESULT_TO_FETCH = "No result set to fetch from" + + +class _ParamSubstitutor: + + """ + Substitutes parameters into SQL statement. + """ + + def __init__(self, params: Sequence[bytes]) -> None: + self.params: Sequence[bytes] = params + self.index: int = 0 + + def __call__(self, matchobj: object) -> bytes: + index = self.index + self.index += 1 + try: + return self.params[index] + except IndexError: + raise ProgrammingError( + "Not enough parameters for the SQL statement" + ) from None + + @property + def remaining(self) -> int: + """Returns number of parameters remaining to be substituted""" + return len(self.params) - self.index + + +class CMySQLCursor(MySQLCursorAbstract): + + """Default cursor for interacting with MySQL using C Extension""" + + _raw: bool = False + _buffered: bool = False + _raw_as_string: bool = False + + def __init__(self, connection: CMySQLConnection) -> None: + """Initialize""" + MySQLCursorAbstract.__init__(self) + + self._affected_rows: int = -1 + self._rowcount: int = -1 + self._nextrow: Tuple[Optional[RowType], Optional[CextEofPacketType]] = ( + None, + None, + ) + + if not isinstance(connection, MySQLConnectionAbstract): + raise InterfaceError(errno=2048) + self._cnx: CMySQLConnection = weakref.proxy(connection) + + def reset(self, free: bool = True) -> None: + """Reset the cursor + + When free is True (default) the result will be freed. + """ + self._rowcount = -1 + self._nextrow = None + self._affected_rows = -1 + self._last_insert_id: int = 0 + self._warning_count: int = 0 + self._warnings: Optional[List[WarningType]] = None + self._warnings = None + self._warning_count = 0 + self._description: Optional[List[DescriptionType]] = None + self._executed_list: List[StrOrBytes] = [] + if free and self._cnx: + self._cnx.free_result() + super().reset() + + def _check_executed(self) -> None: + """Check if the statement has been executed. + + Raises an error if the statement has not been executed. + """ + if self._executed is None: + raise InterfaceError(ERR_NO_RESULT_TO_FETCH) + + def _fetch_warnings(self) -> Optional[List[WarningType]]: + """Fetch warnings + + Fetch warnings doing a SHOW WARNINGS. Can be called after getting + the result. + + Returns a result set or None when there were no warnings. + + Raises Error (or subclass) on errors. + + Returns list of tuples or None. + """ + warns = [] + try: + # force freeing result + self._cnx.consume_results() + _ = self._cnx.cmd_query("SHOW WARNINGS") + warns = self._cnx.get_rows()[0] + self._cnx.consume_results() + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + except Exception as err: + raise InterfaceError(f"Failed getting warnings; {err}") from None + + if warns: + return warns # type: ignore[return-value] + + return None + + def _handle_warnings(self) -> None: + """Handle possible warnings after all results are consumed. + + Raises: + Error: Also raises exceptions if raise_on_warnings is set. + """ + if self._cnx.get_warnings and self._warning_count: + self._warnings = self._fetch_warnings() + + if not self._warnings: + return + + err = get_mysql_exception( + *self._warnings[0][1:3], warning=not self._cnx.raise_on_warnings + ) + if self._cnx.raise_on_warnings: + raise err + + warnings.warn(str(err), stacklevel=4) + + def _handle_result(self, result: Union[CextEofPacketType, CextResultType]) -> None: + """Handles the result after statement execution""" + if "columns" in result: + self._description = result["columns"] + self._rowcount = 0 + self._handle_resultset() + else: + self._last_insert_id = result["insert_id"] + self._warning_count = result["warning_count"] + self._affected_rows = result["affected_rows"] + self._rowcount = -1 + self._handle_warnings() + + def _handle_resultset(self) -> None: + """Handle a result set""" + + def _handle_eof(self) -> None: + """Handle end of reading the result + + Raises an Error on errors. + """ + self._warning_count = self._cnx.warning_count + self._handle_warnings() + if not self._cnx.more_results: + self._cnx.free_result() + + def _execute_iter(self) -> Generator[CMySQLCursor, None, None]: + """Generator returns MySQLCursor objects for multiple statements. + + This method is only used when multiple statements are executed + by the `cursor.execute(multi_stmt_query, multi=True)` method. + + How does this method work? To properly map each statement (stmt) to a result, + the following facts must be considered: + + 1. Read operations such as `SELECT` produce a non-empty result + (calling `next(query_iter)` gets a result that includes at least one column). + 2. Write operatios such as `INSERT` produce an empty result + (calling `next(query_iter)` gets a result with no columns - aka empty). + 3. End-of-line (EOL) comments do not produce a result, unless is the last stmt + in which case produces an empty result. + 4. Calling procedures such as `CALL my_proc` produce a sequence `(1)*0` which + means it may produce zero or more non-empty results followed by just one + empty result. In other words, a callproc stmt always terminates with an + empty result. E.g., `my_proc` includes an update + select + select + update, + then the result sequence will be `110` - note how the write ops results get + annulated, just the read ops results are produced. Other examples: + * insert + insert -> 0 + * select + select + insert + select -> 1110 + * select -> 10 + Observe how 0 indicates the end of the result sequence. This property is + vital to know what result corresponds to what callproc stmt. + + In this regard, the implementation is composed of: + 1. Parsing: the multi-statement is broken down into single statements, and then + for each of these, leading white spaces are removed (including + jumping line, vertical line, tab, etc.). Also, EOL comments are removed from + the stream, except when the comment is the last statement of the + multi-statement string. + 2. Mapping: the facts described above as used as "game rules" to properly match + statements and results. In case, if we run out of statements before running out + of results we use a sentinel named "stmt_overflow!" to indicate that the mapping + went wrong. + + Acronyms + 1: a non-empty result + 2: an empty result + """ + executed_list = parse_multi_statement_query(multi_stmt=self._executed) + self._executed = None + stmt = b"" + result: bool = False + while True: + try: + if not stmt.upper().startswith(b"CALL") or not result: + stmt = ( + executed_list.popleft() if executed_list else b"stmt_overflow!" + ) + + # at this point the result has been fetched already + result = self._cnx.result_set_available + + if is_eol_comment(stmt): + continue + + self._executed = stmt.rstrip() + yield self + + if not self.nextset(): + raise StopIteration + except InterfaceError as err: + # Result without result set + if err.errno != CR_NO_RESULT_SET: + raise + except StopIteration: + return + + def execute( + self, + operation: StrOrBytes, + params: ParamsSequenceOrDictType = (), + multi: bool = False, + ) -> Optional[Generator[CMySQLCursor, None, None]]: + """Execute given statement using given parameters + + Deprecated: The multi argument is not needed and nextset() should + be used to handle multiple result sets. + """ + if not operation: + return None + + try: + if not self._cnx or self._cnx.is_closed(): + raise ProgrammingError + except (ProgrammingError, ReferenceError) as err: + raise ProgrammingError("Cursor is not connected", 2055) from err + self._cnx.handle_unread_result() + + stmt = b"" + self.reset() + + try: + if isinstance(operation, str): + stmt = operation.encode(self._cnx.python_charset) + else: + stmt = operation + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + + if params: + prepared = self._cnx.prepare_for_mysql(params) + if isinstance(prepared, dict): + for key, value in prepared.items(): + stmt = stmt.replace(f"%({key})s".encode(), value) + elif isinstance(prepared, (list, tuple)): + psub = _ParamSubstitutor(prepared) + stmt = RE_PY_PARAM.sub(psub, stmt) + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + + try: + result = self._cnx.cmd_query( + stmt, + raw=self._raw, + buffered=self._buffered, + raw_as_string=self._raw_as_string, + ) + except MySQLInterfaceError as err: + raise get_mysql_exception( + msg=err.msg, errno=err.errno, sqlstate=err.sqlstate + ) from err + + self._executed = stmt + self._handle_result(result) + + if multi: + return self._execute_iter() + + return None + + def _batch_insert( + self, + operation: str, + seq_params: Sequence[ParamsSequenceOrDictType], + ) -> Optional[bytes]: + """Implements multi row insert""" + + def remove_comments(match: re.Match) -> str: + """Remove comments from INSERT statements. + + This function is used while removing comments from INSERT + statements. If the matched string is a comment not enclosed + by quotes, it returns an empty string, else the string itself. + """ + if match.group(1): + return "" + return match.group(2) + + tmp = re.sub( + RE_SQL_ON_DUPLICATE, + "", + re.sub(RE_SQL_COMMENT, remove_comments, operation), + ) + + matches = re.search(RE_SQL_INSERT_VALUES, tmp) + if not matches: + raise InterfaceError( + "Failed rewriting statement for multi-row INSERT. Check SQL syntax" + ) + fmt = matches.group(1).encode(self._cnx.python_charset) + values = [] + + try: + stmt = operation.encode(self._cnx.python_charset) + for params in seq_params: + tmp = fmt + prepared = self._cnx.prepare_for_mysql(params) + if isinstance(prepared, dict): + for key, value in prepared.items(): + tmp = tmp.replace( + f"%({key})s".encode(), value # type: ignore[arg-type] + ) + elif isinstance(prepared, (list, tuple)): + psub = _ParamSubstitutor(prepared) + tmp = RE_PY_PARAM.sub(psub, tmp) # type: ignore[call-overload] + if psub.remaining != 0: + raise ProgrammingError( + "Not all parameters were used in the SQL statement" + ) + values.append(tmp) + + if fmt in stmt: + stmt = stmt.replace(fmt, b",".join(values), 1) # type: ignore[arg-type] + self._executed = stmt + return stmt + return None + except (UnicodeDecodeError, UnicodeEncodeError) as err: + raise ProgrammingError(str(err)) from err + except Exception as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + + def executemany( + self, + operation: str, + seq_params: Sequence[ParamsSequenceOrDictType], + ) -> Optional[Generator[CMySQLCursor, None, None]]: + """Execute the given operation multiple times + + The executemany() method will execute the operation iterating + over the list of parameters in seq_params. + + Example: Inserting 3 new employees and their phone number + + data = [ + ('Jane','555-001'), + ('Joe', '555-001'), + ('John', '555-003') + ] + stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s)" + cursor.executemany(stmt, data) + + INSERT statements are optimized by batching the data, that is + using the MySQL multiple rows syntax. + + Results are discarded! If they are needed, consider looping over + data using the execute() method. + """ + if not operation or not seq_params: + return None + + try: + if not self._cnx: + raise ProgrammingError + except (ProgrammingError, ReferenceError) as err: + raise ProgrammingError("Cursor is not connected") from err + self._cnx.handle_unread_result() + + if not isinstance(seq_params, (list, tuple)): + raise ProgrammingError("Parameters for query must be list or tuple.") + + # Optimize INSERTs by batching them + if re.match(RE_SQL_INSERT_STMT, operation): + if not seq_params: + self._rowcount = 0 + return None + stmt = self._batch_insert(operation, seq_params) + if stmt is not None: + self._executed = stmt + return self.execute(stmt) + + rowcnt = 0 + try: + # When processing read ops (e.g., SELECT), rowcnt is updated + # based on self._rowcount. For write ops (e.g., INSERT) is + # updated based on self._affected_rows. + # The variable self._description is None for write ops, that's + # why we use it as indicator for updating rowcnt. + for params in seq_params: + self.execute(operation, params) + if self.with_rows and self._cnx.unread_result: + self.fetchall() + rowcnt += self._rowcount if self.description else self._affected_rows + except (ValueError, TypeError) as err: + raise InterfaceError(f"Failed executing the operation; {err}") from None + + self._rowcount = rowcnt + return None + + @property + def description(self) -> Optional[List[DescriptionType]]: + """Returns description of columns in a result""" + return self._description + + @property + def rowcount(self) -> int: + """Returns the number of rows produced or affected""" + if self._rowcount == -1: + return self._affected_rows + return self._rowcount + + def close(self) -> bool: + """Close the cursor + + The result will be freed. + """ + if not self._cnx: + return False + + self._cnx.handle_unread_result() + self._warnings = None + self._cnx = None + return True + + def callproc( + self, + procname: str, + args: Sequence = (), + ) -> Optional[Union[Dict[str, RowItemType], RowType]]: + """Calls a stored procedure with the given arguments""" + if not procname or not isinstance(procname, str): + raise ValueError("procname must be a string") + + if not isinstance(args, (tuple, list)): + raise ValueError("args must be a sequence") + + argfmt = "@_{name}_arg{index}" + self._stored_results = [] + + try: + argnames = [] + argtypes = [] + + # MySQL itself does support calling procedures with their full + # name .. It's necessary to split + # by '.' and grab the procedure name from procname. + procname_abs = procname.split(".")[-1] + if args: + argvalues = [] + for idx, arg in enumerate(args): + argname = argfmt.format(name=procname_abs, index=idx + 1) + argnames.append(argname) + if isinstance(arg, tuple): + argtypes.append(f" CAST({argname} AS {arg[1]})") + argvalues.append(arg[0]) + else: + argtypes.append(argname) + argvalues.append(arg) + + placeholders = ",".join(f"{arg}=%s" for arg in argnames) + self.execute(f"SET {placeholders}", argvalues) + + call = f"CALL {procname}({','.join(argnames)})" + + result = self._cnx.cmd_query( + call, raw=self._raw, raw_as_string=self._raw_as_string + ) + + results = [] + while self._cnx.result_set_available: + result = self._cnx.fetch_eof_columns() + if isinstance(self, (CMySQLCursorDict, CMySQLCursorBufferedDict)): + cursor_class = CMySQLCursorBufferedDict + elif isinstance( + self, + (CMySQLCursorNamedTuple, CMySQLCursorBufferedNamedTuple), + ): + cursor_class = CMySQLCursorBufferedNamedTuple + elif self._raw: + cursor_class = CMySQLCursorBufferedRaw + else: + cursor_class = CMySQLCursorBuffered + # pylint: disable=protected-access + cur = cursor_class(self._cnx.get_self()) # type: ignore[arg-type] + cur._executed = f"(a result of {call})" + cur._handle_result(result) + # pylint: enable=protected-access + results.append(cur) + self._cnx.next_result() + self._stored_results = results + self._handle_eof() + + if argnames: + self.reset() + # Create names aliases to be compatible with namedtuples + args = [ + f"{name} AS {alias}" + for name, alias in zip( + argtypes, [arg.lstrip("@_") for arg in argnames] + ) + ] + select = f"SELECT {','.join(args)}" + self.execute(select) + + return self.fetchone() + return tuple() + + except Error: + raise + except Exception as err: + raise InterfaceError(f"Failed calling stored routine; {err}") from None + + def nextset(self) -> Optional[bool]: + """Skip to the next available result set""" + if not self._cnx.next_result(): + self.reset(free=True) + return None + self.reset(free=False) + + if not self._cnx.result_set_available: + eof = self._cnx.fetch_eof_status() + self._handle_result(eof) + raise InterfaceError(errno=CR_NO_RESULT_SET) + + self._handle_result(self._cnx.fetch_eof_columns()) + return True + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + if not self._cnx.unread_result: + return [] + + rows = self._cnx.get_rows() + if self._nextrow and self._nextrow[0]: + rows[0].insert(0, self._nextrow[0]) + + if not rows[0]: + self._handle_eof() + return [] + + self._rowcount += len(rows[0]) + self._handle_eof() + # self._cnx.handle_unread_result() + return rows[0] + + def fetchmany(self, size: int = 1) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + if self._nextrow and self._nextrow[0]: + rows = [self._nextrow[0]] + size -= 1 + else: + rows = [] + + if size and self._cnx.unread_result: + rows.extend(self._cnx.get_rows(size)[0]) + + if size: + if self._cnx.unread_result: + self._nextrow = self._cnx.get_row() + if ( + self._nextrow + and not self._nextrow[0] + and not self._cnx.more_results + ): + self._cnx.free_result() + else: + self._nextrow = (None, None) + + if not rows: + self._handle_eof() + return [] + + self._rowcount += len(rows) + return rows + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + row = self._nextrow + if not row and self._cnx.unread_result: + row = self._cnx.get_row() + + if row and row[0]: + self._nextrow = self._cnx.get_row() + if not self._nextrow[0] and not self._cnx.more_results: + self._cnx.free_result() + else: + self._handle_eof() + return None + self._rowcount += 1 + return row[0] + + def __iter__(self) -> Iterator[RowType]: + """Iteration over the result set + + Iteration over the result set which calls self.fetchone() + and returns the next row. + """ + return iter(self.fetchone, None) + + def stored_results(self) -> Generator[CMySQLCursor, None, None]: + """Returns an iterator for stored results + + This method returns an iterator over results which are stored when + callproc() is called. The iterator will provide MySQLCursorBuffered + instances. + + Returns a iterator. + """ + for result in self._stored_results: + yield result # type: ignore[misc] + self._stored_results = [] + + def __next__(self) -> RowType: + """Iteration over the result set + Used for iterating over the result set. Calls self.fetchone() + to get the next row. + + Raises StopIteration when no more rows are available. + """ + try: + row = self.fetchone() + except InterfaceError: + raise StopIteration from None + if not row: + raise StopIteration from None + return row + + @property + def column_names(self) -> Tuple[str, ...]: + """Returns column names + + This property returns the columns names as a tuple. + + Returns a tuple. + """ + if not self.description: + return () + return tuple(d[0] for d in self.description) + + @property + def statement(self) -> str: + """Returns the executed statement + + This property returns the executed statement. When multiple + statements were executed, the current statement in the iterator + will be returned. + """ + try: + return self._executed.strip().decode("utf8") + except AttributeError: + return self._executed.strip() # type: ignore[return-value] + + @property + def with_rows(self) -> bool: + """Returns whether the cursor could have rows returned + + This property returns True when column descriptions are available + and possibly also rows, which will need to be fetched. + + Returns True or False. + """ + if self.description: + return True + return False + + def __str__(self) -> str: + fmt = "{class_name}: {stmt}" + if self._executed: + try: + executed = self._executed.decode("utf-8") + except AttributeError: + executed = self._executed + if len(executed) > 40: + executed = executed[:40] + ".." + else: + executed = "(Nothing executed yet)" + + return fmt.format(class_name=self.__class__.__name__, stmt=executed) + + +class CMySQLCursorBuffered(CMySQLCursor): + + """Cursor using C Extension buffering results""" + + def __init__(self, connection: CMySQLConnection): + """Initialize""" + super().__init__(connection) + + self._rows: Optional[List[RowType]] = None + self._next_row: int = 0 + + def _handle_resultset(self) -> None: + """Handle a result set""" + self._rows = self._cnx.get_rows()[0] + self._next_row = 0 + self._rowcount: int = len(self._rows) + self._handle_eof() + + def reset(self, free: bool = True) -> None: + """Reset the cursor to default""" + self._rows = None + self._next_row = 0 + super().reset(free=free) + + def _fetch_row(self) -> Optional[RowType]: + """Returns the next row in the result set + + Returns a tuple or None. + """ + row = None + try: + row = self._rows[self._next_row] + except IndexError: + return None + self._next_row += 1 + return row + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + res = self._rows[self._next_row :] + self._next_row = len(self._rows) + return res + + def fetchmany(self, size: int = 1) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0: + cnt -= 1 + row = self._fetch_row() + if row: + res.append(row) + else: + break + return res + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row() + + @property + def with_rows(self) -> bool: + """Returns whether the cursor could have rows returned + + This property returns True when rows are available, + which will need to be fetched. + + Returns True or False. + """ + return self._rows is not None + + +class CMySQLCursorRaw(CMySQLCursor): + """Cursor using C Extension return raw results""" + + _raw: bool = True + + +class CMySQLCursorBufferedRaw(CMySQLCursorBuffered): + """Cursor using C Extension buffering raw results""" + + _raw: bool = True + + +class CMySQLCursorDict(CMySQLCursor): + """Cursor using C Extension returning rows as dictionaries""" + + _raw: bool = False + + def fetchone(self) -> Optional[Dict[str, RowItemType]]: + """Return next row of a query result set. + + Returns: + dict or None: A dict from query result set. + """ + row = super().fetchone() + return dict(zip(self.column_names, row)) if row else None + + def fetchmany(self, size: int = 1) -> List[Dict[str, RowItemType]]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set represented + as a list of dictionaries where column names are used as keys. + """ + res = super().fetchmany(size=size) + return [dict(zip(self.column_names, row)) for row in res] + + def fetchall(self) -> List[Dict[str, RowItemType]]: + """Return all rows of a query result set. + + Returns: + list: A list of dictionaries with all rows of a query + result set where column names are used as keys. + """ + res = super().fetchall() + return [dict(zip(self.column_names, row)) for row in res] + + +class CMySQLCursorBufferedDict(CMySQLCursorBuffered): + """Cursor using C Extension buffering and returning rows as dictionaries""" + + _raw = False + + def _fetch_row(self) -> Optional[Dict[str, RowItemType]]: + row = super()._fetch_row() + if row: + return dict(zip(self.column_names, row)) + return None + + def fetchall(self) -> List[Dict[str, RowItemType]]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + res = super().fetchall() + return [dict(zip(self.column_names, row)) for row in res] + + +class CMySQLCursorNamedTuple(CMySQLCursor): + """Cursor using C Extension returning rows as named tuples""" + + named_tuple: Any = None + + def _handle_resultset(self) -> None: + """Handle a result set""" + super()._handle_resultset() + columns = tuple(self.column_names) + try: + self.named_tuple = NAMED_TUPLE_CACHE[columns] + except KeyError: + self.named_tuple = namedtuple("Row", columns) # type: ignore[misc] + NAMED_TUPLE_CACHE[columns] = self.named_tuple + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + row = super().fetchone() + if row: + return self.named_tuple(*row) + return None + + def fetchmany(self, size: int = 1) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + res = super().fetchmany(size=size) + if not res: + return [] + return [self.named_tuple(*row) for row in res] + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + res = super().fetchall() + return [self.named_tuple(*row) for row in res] + + +class CMySQLCursorBufferedNamedTuple(CMySQLCursorBuffered): + """Cursor using C Extension buffering and returning rows as named tuples""" + + named_tuple: Any = None + + def _handle_resultset(self) -> None: + super()._handle_resultset() + self.named_tuple = namedtuple("Row", self.column_names) # type: ignore[misc] + + def _fetch_row(self) -> Optional[RowType]: + row = super()._fetch_row() + if row: + return self.named_tuple(*row) + return None + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + res = super().fetchall() + return [self.named_tuple(*row) for row in res] + + +class CMySQLCursorPrepared(CMySQLCursor): + """Cursor using MySQL Prepared Statements""" + + def __init__(self, connection: CMySQLConnection): + super().__init__(connection) + self._rows: Optional[List[RowType]] = None + self._rowcount: int = 0 + self._next_row: int = 0 + self._binary: bool = True + self._stmt: Optional[CMySQLPrepStmt] = None + + def _handle_eof(self) -> None: + """Handle EOF packet""" + self._nextrow = (None, None) + self._handle_warnings() + + def _fetch_row(self, raw: bool = False) -> Optional[RowType]: + """Returns the next row in the result set + + Returns a tuple or None. + """ + if not self._stmt or not self._stmt.have_result_set: + return None + row = None + + if self._nextrow == (None, None): + (row, eof) = self._cnx.get_row( + binary=self._binary, + columns=self.description, + raw=raw, + prep_stmt=self._stmt, + ) + else: + (row, eof) = self._nextrow + + if row: + self._nextrow = self._cnx.get_row( + binary=self._binary, + columns=self.description, + raw=raw, + prep_stmt=self._stmt, + ) + eof = self._nextrow[1] + if eof is not None: + self._warning_count = eof["warning_count"] + self._handle_eof() + if self._rowcount == -1: + self._rowcount = 1 + else: + self._rowcount += 1 + if eof: + self._warning_count = eof["warning_count"] + self._handle_eof() + + return row + + def callproc(self, procname: Any, args: Any = None) -> NoReturn: + """Calls a stored procedue + + Not supported with CMySQLCursorPrepared. + """ + raise NotSupportedError() + + def close(self) -> None: + """Close the cursor + + This method will try to deallocate the prepared statement and close + the cursor. + """ + if self._stmt: + self.reset() + self._cnx.cmd_stmt_close(self._stmt) + self._stmt = None + super().close() + + def reset(self, free: bool = True) -> None: + """Resets the prepared statement.""" + if self._stmt: + self._cnx.cmd_stmt_reset(self._stmt) + super().reset(free=free) + + def execute( + self, + operation: StrOrBytes, + params: Optional[ParamsSequenceOrDictType] = None, + multi: bool = False, + ) -> None: # multi is unused + """Prepare and execute a MySQL Prepared Statement + + This method will prepare the given operation and execute it using + the given parameters. + + If the cursor instance already had a prepared statement, it is + first closed. + + Note: argument "multi" is unused. + """ + if not operation: + return + + try: + if not self._cnx or self._cnx.is_closed(): + raise ProgrammingError + except (ProgrammingError, ReferenceError) as err: + raise ProgrammingError("Cursor is not connected", 2055) from err + + self._cnx.handle_unread_result(prepared=True) + + charset = self._cnx.charset + if charset == "utf8mb4": + charset = "utf8" + + if not isinstance(operation, str): + try: + operation = operation.decode(charset) + except UnicodeDecodeError as err: + raise ProgrammingError(str(err)) from err + + if isinstance(params, dict): + replacement_keys = re.findall(RE_SQL_PYTHON_CAPTURE_PARAM_NAME, operation) + try: + # Replace params dict with params tuple in correct order. + params = tuple(params[key] for key in replacement_keys) + except KeyError as err: + raise ProgrammingError( + "Not all placeholders were found in the parameters dict" + ) from err + # Convert %(name)s to ? before sending it to MySQL + operation = re.sub(RE_SQL_PYTHON_REPLACE_PARAM, "?", operation) + + if operation is not self._executed: + if self._stmt: + self._cnx.cmd_stmt_close(self._stmt) + self._executed = operation + + try: + operation = operation.encode(charset) + except UnicodeEncodeError as err: + raise ProgrammingError(str(err)) from err + + if b"%s" in operation: + # Convert %s to ? before sending it to MySQL + operation = re.sub(RE_SQL_FIND_PARAM, b"?", operation) + + try: + self._stmt = self._cnx.cmd_stmt_prepare(operation) + except Error: + self._executed = None + self._stmt = None + raise + + self._cnx.cmd_stmt_reset(self._stmt) + + if self._stmt.param_count > 0 and not params: + return + if params: + if not isinstance(params, (tuple, list)): + raise ProgrammingError( + errno=1210, + msg=f"Incorrect type of argument: {type(params).__name__}({params})" + ", it must be of type tuple or list the argument given to " + "the prepared statement", + ) + if self._stmt.param_count != len(params): + raise ProgrammingError( + errno=1210, + msg="Incorrect number of arguments executing prepared statement", + ) + + if params is None: + params = () + res = self._cnx.cmd_stmt_execute(self._stmt, *params) + if res: + self._handle_result(res) + + def executemany( + self, operation: str, seq_params: Sequence[ParamsSequenceType] + ) -> None: + """Prepare and execute a MySQL Prepared Statement many times + + This method will prepare the given operation and execute with each + tuple found the list seq_params. + + If the cursor instance already had a prepared statement, it is + first closed. + """ + rowcnt = 0 + try: + for params in seq_params: + self.execute(operation, params) + if self.with_rows: + self.fetchall() + rowcnt += self._rowcount + except (ValueError, TypeError) as err: + raise InterfaceError(f"Failed executing the operation; {err}") from err + self._rowcount = rowcnt + + def fetchone(self) -> Optional[RowType]: + """Return next row of a query result set. + + Returns: + tuple or None: A row from query result set. + """ + self._check_executed() + return self._fetch_row() or None + + def fetchmany(self, size: Optional[int] = None) -> List[RowType]: + """Return the next set of rows of a query result set. + + When no more rows are available, it returns an empty list. + The number of rows returned can be specified using the size argument, + which defaults to one. + + Returns: + list: The next set of rows of a query result set. + """ + self._check_executed() + res = [] + cnt = size or self.arraysize + while cnt > 0 and self._stmt.have_result_set: + cnt -= 1 + row = self._fetch_row() + if row: + res.append(row) + return res + + def fetchall(self) -> List[RowType]: + """Return all rows of a query result set. + + Returns: + list: A list of tuples with all rows of a query result set. + """ + self._check_executed() + if not self._stmt.have_result_set: + return [] + + rows = self._cnx.get_rows(prep_stmt=self._stmt) + if self._nextrow and self._nextrow[0]: + rows[0].insert(0, self._nextrow[0]) + + if not rows[0]: + self._handle_eof() + return [] + + self._rowcount += len(rows[0]) + self._handle_eof() + return rows[0] + + +class CMySQLCursorPreparedDict(CMySQLCursorDict, CMySQLCursorPrepared): # type: ignore[misc] + """This class is a blend of features from CMySQLCursorDict and CMySQLCursorPrepared + + Multiple inheritance in python is allowed but care must be taken + when assuming methods resolution. In the case of multiple + inheritance, a given attribute is first searched in the current + class if it's not found then it's searched in the parent classes. + The parent classes are searched in a left-right fashion and each + class is searched once. + Based on python's attribute resolution, in this case, attributes + are searched as follows: + 1. CMySQLCursorPreparedDict (current class) + 2. CMySQLCursorDict (left parent class) + 3. CMySQLCursorPrepared (right parent class) + 4. CMySQLCursor (base class) + """ + + +class CMySQLCursorPreparedNamedTuple(CMySQLCursorNamedTuple, CMySQLCursorPrepared): + """This class is a blend of features from CMySQLCursorNamedTuple and CMySQLCursorPrepared""" + + +class CMySQLCursorPreparedRaw(CMySQLCursorPrepared): + """This class is a blend of features from CMySQLCursorRaw and CMySQLCursorPrepared""" + + _raw: bool = True diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/custom_types.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/custom_types.py new file mode 100644 index 0000000..7fcacbd --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/custom_types.py @@ -0,0 +1,50 @@ +# Copyright (c) 2014, 2022, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Custom Python types used by MySQL Connector/Python""" +from __future__ import annotations + +from typing import Type + + +class HexLiteral(str): + + """Class holding MySQL hex literals""" + + charset: str = "" + original: str = "" + + def __new__(cls: Type[HexLiteral], str_: str, charset: str = "utf8") -> HexLiteral: + hexed = [f"{i:02x}" for i in str_.encode(charset)] + obj = str.__new__(cls, "".join(hexed)) + obj.charset = charset + obj.original = str_ + return obj + + def __str__(self) -> str: + return "0x" + self diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/dbapi.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/dbapi.py new file mode 100644 index 0000000..398db39 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/dbapi.py @@ -0,0 +1,92 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +""" +This module implements some constructors and singletons as required by the +DB API v2.0 (PEP-249). +""" + +# Python Db API v2 +# pylint: disable=invalid-name +apilevel: str = "2.0" +"""This attribute is a string that indicates the supported DB API level.""" + +threadsafety: int = 1 +"""This attribute is an integer that indicates the supported level of thread safety +provided by Connector/Python.""" + +paramstyle: str = "pyformat" +"""This attribute is a string that indicates the Connector/Python default +parameter style.""" + +import datetime +import time + +from typing import Tuple + +from . import constants + + +class _DBAPITypeObject: + def __init__(self, *values: int) -> None: + self.values: Tuple[int, ...] = values + + def __eq__(self, other: object) -> bool: + return other in self.values + + def __ne__(self, other: object) -> bool: + return other not in self.values + + +Date = datetime.date +Time = datetime.time +Timestamp = datetime.datetime + + +def DateFromTicks(ticks: int) -> datetime.date: + """Construct an object holding a date value from the given ticks value.""" + return Date(*time.localtime(ticks)[:3]) + + +def TimeFromTicks(ticks: int) -> datetime.time: + """Construct an object holding a time value from the given ticks value.""" + return Time(*time.localtime(ticks)[3:6]) + + +def TimestampFromTicks(ticks: int) -> datetime.datetime: + """Construct an object holding a time stamp from the given ticks value.""" + return Timestamp(*time.localtime(ticks)[:6]) + + +Binary = bytes + +STRING = _DBAPITypeObject(*constants.FieldType.get_string_types()) +BINARY = _DBAPITypeObject(*constants.FieldType.get_binary_types()) +NUMBER = _DBAPITypeObject(*constants.FieldType.get_number_types()) +DATETIME = _DBAPITypeObject(*constants.FieldType.get_timestamp_types()) +ROWID = _DBAPITypeObject() diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..188fd46 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/base.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/base.cpython-310.pyc new file mode 100644 index 0000000..a9b3104 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/base.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/client.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/client.cpython-310.pyc new file mode 100644 index 0000000..da967c4 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/client.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/compiler.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/compiler.cpython-310.pyc new file mode 100644 index 0000000..8631c40 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/compiler.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/creation.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/creation.cpython-310.pyc new file mode 100644 index 0000000..da75ba2 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/creation.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/features.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/features.cpython-310.pyc new file mode 100644 index 0000000..b9b5e6e Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/features.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/introspection.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/introspection.cpython-310.pyc new file mode 100644 index 0000000..1d55527 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/introspection.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/operations.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/operations.cpython-310.pyc new file mode 100644 index 0000000..8ad1ef5 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/operations.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/schema.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/schema.cpython-310.pyc new file mode 100644 index 0000000..03876ec Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/schema.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/validation.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/validation.cpython-310.pyc new file mode 100644 index 0000000..2bf23e2 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/__pycache__/validation.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/base.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/base.py new file mode 100644 index 0000000..65cd4c5 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/base.py @@ -0,0 +1,652 @@ +# Copyright (c) 2020, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="override" + +"""Django database Backend using MySQL Connector/Python. + +This Django database backend is heavily based on the MySQL backend from Django. + +Changes include: +* Support for microseconds (MySQL 5.6.3 and later) +* Using INFORMATION_SCHEMA where possible +* Using new defaults for, for example SQL_AUTO_IS_NULL + +Requires and comes with MySQL Connector/Python v8.0.22 and later: + http://dev.mysql.com/downloads/connector/python/ +""" + +import warnings + +from datetime import datetime, time +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generator, + Iterator, + List, + Optional, + Sequence, + Set, + Tuple, + Union, +) + +from django.conf import settings +from django.core.exceptions import ImproperlyConfigured +from django.db import IntegrityError +from django.db.backends.base.base import BaseDatabaseWrapper +from django.utils import dateparse, timezone +from django.utils.functional import cached_property + +from mysql.connector.types import MySQLConvertibleType + +try: + import mysql.connector + + from mysql.connector.conversion import MySQLConverter + from mysql.connector.custom_types import HexLiteral + from mysql.connector.pooling import PooledMySQLConnection + from mysql.connector.types import ParamsSequenceOrDictType, RowType, StrOrBytes +except ImportError as err: + raise ImproperlyConfigured(f"Error loading mysql.connector module: {err}") from err + +try: + from _mysql_connector import datetime_to_mysql +except ImportError: + HAVE_CEXT = False +else: + HAVE_CEXT = True + +from .client import DatabaseClient +from .creation import DatabaseCreation +from .features import DatabaseFeatures +from .introspection import DatabaseIntrospection +from .operations import DatabaseOperations +from .schema import DatabaseSchemaEditor +from .validation import DatabaseValidation + +Error = mysql.connector.Error +DatabaseError = mysql.connector.DatabaseError +NotSupportedError = mysql.connector.NotSupportedError +OperationalError = mysql.connector.OperationalError +ProgrammingError = mysql.connector.ProgrammingError + +if TYPE_CHECKING: + from mysql.connector.abstracts import MySQLConnectionAbstract, MySQLCursorAbstract + + +def adapt_datetime_with_timezone_support(value: datetime) -> StrOrBytes: + """Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.""" + if settings.USE_TZ: + if timezone.is_naive(value): + warnings.warn( + f"MySQL received a naive datetime ({value})" + " while time zone support is active.", + RuntimeWarning, + ) + default_timezone = timezone.get_default_timezone() + value = timezone.make_aware(value, default_timezone) + value = value.astimezone(timezone.utc).replace( # pylint: disable=no-member + tzinfo=None + ) + if HAVE_CEXT: + mysql_datetime: bytes = datetime_to_mysql(value) + return mysql_datetime + return value.strftime("%Y-%m-%d %H:%M:%S.%f") + + +class CursorWrapper: + """Wrapper around MySQL Connector/Python's cursor class. + + The cursor class is defined by the options passed to MySQL + Connector/Python. If buffered option is True in those options, + MySQLCursorBuffered will be used. + """ + + codes_for_integrityerror = ( + 1048, # Column cannot be null + 1690, # BIGINT UNSIGNED value is out of range + 3819, # CHECK constraint is violated + 4025, # CHECK constraint failed + ) + + def __init__(self, cursor: "MySQLCursorAbstract") -> None: + self.cursor: "MySQLCursorAbstract" = cursor + + @staticmethod + def _adapt_execute_args_dict( + args: Dict[str, MySQLConvertibleType] + ) -> Dict[str, MySQLConvertibleType]: + if not args: + return args + new_args = dict(args) + for key, value in args.items(): + if isinstance(value, datetime): + new_args[key] = adapt_datetime_with_timezone_support(value) + + return new_args + + @staticmethod + def _adapt_execute_args( + args: Optional[Sequence[MySQLConvertibleType]], + ) -> Optional[Sequence[MySQLConvertibleType]]: + if not args: + return args + new_args = list(args) + for i, arg in enumerate(args): + if isinstance(arg, datetime): + new_args[i] = adapt_datetime_with_timezone_support(arg) + + return tuple(new_args) + + def execute( + self, + query: str, + args: Optional[ + Union[Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType]] + ] = None, + ) -> Optional[Generator["MySQLCursorAbstract", None, None]]: + """Executes the given operation + + This wrapper method around the execute()-method of the cursor is + mainly needed to re-raise using different exceptions. + """ + new_args: Optional[ParamsSequenceOrDictType] = None + if isinstance(args, dict): + new_args = self._adapt_execute_args_dict(args) + else: + new_args = self._adapt_execute_args(args) + try: + return self.cursor.execute(query, new_args) + except mysql.connector.OperationalError as exc: + if exc.args[0] in self.codes_for_integrityerror: + raise IntegrityError(*tuple(exc.args)) from None + raise + + def executemany( + self, + query: str, + args: Sequence[ + Union[Sequence[MySQLConvertibleType], Dict[str, MySQLConvertibleType]] + ], + ) -> Optional[Generator["MySQLCursorAbstract", None, None]]: + """Executes the given operation + + This wrapper method around the executemany()-method of the cursor is + mainly needed to re-raise using different exceptions. + """ + try: + return self.cursor.executemany(query, args) + except mysql.connector.OperationalError as exc: + if exc.args[0] in self.codes_for_integrityerror: + raise IntegrityError(*tuple(exc.args)) from None + raise + + def __getattr__(self, attr: Any) -> Any: + """Return an attribute of wrapped cursor""" + return getattr(self.cursor, attr) + + def __iter__(self) -> Iterator[RowType]: + """Return an iterator over wrapped cursor""" + return iter(self.cursor) + + +class DatabaseWrapper(BaseDatabaseWrapper): # pylint: disable=abstract-method + """Represent a database connection.""" + + vendor = "mysql" + # This dictionary maps Field objects to their associated MySQL column + # types, as strings. Column-type strings can contain format strings; they'll + # be interpolated against the values of Field.__dict__ before being output. + # If a column type is set to None, it won't be included in the output. + data_types = { + "AutoField": "integer AUTO_INCREMENT", + "BigAutoField": "bigint AUTO_INCREMENT", + "BinaryField": "longblob", + "BooleanField": "bool", + "CharField": "varchar(%(max_length)s)", + "DateField": "date", + "DateTimeField": "datetime(6)", + "DecimalField": "numeric(%(max_digits)s, %(decimal_places)s)", + "DurationField": "bigint", + "FileField": "varchar(%(max_length)s)", + "FilePathField": "varchar(%(max_length)s)", + "FloatField": "double precision", + "IntegerField": "integer", + "BigIntegerField": "bigint", + "IPAddressField": "char(15)", + "GenericIPAddressField": "char(39)", + "JSONField": "json", + "NullBooleanField": "bool", + "OneToOneField": "integer", + "PositiveBigIntegerField": "bigint UNSIGNED", + "PositiveIntegerField": "integer UNSIGNED", + "PositiveSmallIntegerField": "smallint UNSIGNED", + "SlugField": "varchar(%(max_length)s)", + "SmallAutoField": "smallint AUTO_INCREMENT", + "SmallIntegerField": "smallint", + "TextField": "longtext", + "TimeField": "time(6)", + "UUIDField": "char(32)", + } + + # For these data types: + # - MySQL < 8.0.13 doesn't accept default values and + # implicitly treat them as nullable + # - all versions of MySQL doesn't support full width database + # indexes + _limited_data_types = ( + "tinyblob", + "blob", + "mediumblob", + "longblob", + "tinytext", + "text", + "mediumtext", + "longtext", + "json", + ) + + operators = { + "exact": "= %s", + "iexact": "LIKE %s", + "contains": "LIKE BINARY %s", + "icontains": "LIKE %s", + "regex": "REGEXP BINARY %s", + "iregex": "REGEXP %s", + "gt": "> %s", + "gte": ">= %s", + "lt": "< %s", + "lte": "<= %s", + "startswith": "LIKE BINARY %s", + "endswith": "LIKE BINARY %s", + "istartswith": "LIKE %s", + "iendswith": "LIKE %s", + } + + # The patterns below are used to generate SQL pattern lookup clauses when + # the right-hand side of the lookup isn't a raw string (it might be an expression + # or the result of a bilateral transformation). + # In those cases, special characters for LIKE operators (e.g. \, *, _) should be + # escaped on database side. + # + # Note: we use str.format() here for readability as '%' is used as a wildcard for + # the LIKE operator. + pattern_esc = r"REPLACE(REPLACE(REPLACE({}, '\\', '\\\\'), '%%', '\%%'), '_', '\_')" + pattern_ops = { + "contains": "LIKE BINARY CONCAT('%%', {}, '%%')", + "icontains": "LIKE CONCAT('%%', {}, '%%')", + "startswith": "LIKE BINARY CONCAT({}, '%%')", + "istartswith": "LIKE CONCAT({}, '%%')", + "endswith": "LIKE BINARY CONCAT('%%', {})", + "iendswith": "LIKE CONCAT('%%', {})", + } + + isolation_level: Optional[str] = None + isolation_levels = { + "read uncommitted", + "read committed", + "repeatable read", + "serializable", + } + + Database = mysql.connector + SchemaEditorClass = DatabaseSchemaEditor + # Classes instantiated in __init__(). + client_class = DatabaseClient + creation_class = DatabaseCreation + features_class = DatabaseFeatures + introspection_class = DatabaseIntrospection + ops_class = DatabaseOperations + validation_class = DatabaseValidation + + def __init__(self, *args: Any, **kwargs: Any) -> None: + super().__init__(*args, **kwargs) + + options = self.settings_dict.get("OPTIONS") + if options: + self._use_pure = options.get("use_pure", not HAVE_CEXT) + converter_class = options.get( + "converter_class", + DjangoMySQLConverter, + ) + if not issubclass(converter_class, DjangoMySQLConverter): + raise ProgrammingError( + "Converter class should be a subclass of " + "mysql.connector.django.base.DjangoMySQLConverter" + ) + self.converter = converter_class() + else: + self.converter = DjangoMySQLConverter() + self._use_pure = not HAVE_CEXT + + def __getattr__(self, attr: str) -> bool: + if attr.startswith("mysql_is"): + return False + raise AttributeError + + def get_connection_params(self) -> Dict[str, Any]: + kwargs = { + "charset": "utf8", + "use_unicode": True, + "buffered": False, + "consume_results": True, + } + + settings_dict = self.settings_dict + + if settings_dict["USER"]: + kwargs["user"] = settings_dict["USER"] + if settings_dict["NAME"]: + kwargs["database"] = settings_dict["NAME"] + if settings_dict["PASSWORD"]: + kwargs["passwd"] = settings_dict["PASSWORD"] + if settings_dict["HOST"].startswith("/"): + kwargs["unix_socket"] = settings_dict["HOST"] + elif settings_dict["HOST"]: + kwargs["host"] = settings_dict["HOST"] + if settings_dict["PORT"]: + kwargs["port"] = int(settings_dict["PORT"]) + if settings_dict.get("OPTIONS", {}).get("init_command"): + kwargs["init_command"] = settings_dict["OPTIONS"]["init_command"] + + # Raise exceptions for database warnings if DEBUG is on + kwargs["raise_on_warnings"] = settings.DEBUG + + kwargs["client_flags"] = [ + # Need potentially affected rows on UPDATE + mysql.connector.constants.ClientFlag.FOUND_ROWS, + ] + + try: + options = settings_dict["OPTIONS"].copy() + isolation_level = options.pop("isolation_level", None) + if isolation_level: + isolation_level = isolation_level.lower() + if isolation_level not in self.isolation_levels: + valid_levels = ", ".join( + f"'{level}'" for level in sorted(self.isolation_levels) + ) + raise ImproperlyConfigured( + f"Invalid transaction isolation level '{isolation_level}' " + f"specified.\nUse one of {valid_levels}, or None." + ) + self.isolation_level = isolation_level + kwargs.update(options) + except KeyError: + # OPTIONS missing is OK + pass + return kwargs + + def get_new_connection( + self, conn_params: Dict[str, Any] + ) -> Union[PooledMySQLConnection, "MySQLConnectionAbstract"]: + if "converter_class" not in conn_params: + conn_params["converter_class"] = DjangoMySQLConverter + cnx = mysql.connector.connect(**conn_params) + + return cnx + + def init_connection_state(self) -> None: + assignments = [] + if self.features.is_sql_auto_is_null_enabled: # type: ignore[attr-defined] + # SQL_AUTO_IS_NULL controls whether an AUTO_INCREMENT column on + # a recently inserted row will return when the field is tested + # for NULL. Disabling this brings this aspect of MySQL in line + # with SQL standards. + assignments.append("SET SQL_AUTO_IS_NULL = 0") + + if self.isolation_level: + assignments.append( + "SET SESSION TRANSACTION ISOLATION LEVEL " + f"{self.isolation_level.upper()}" + ) + + if assignments: + with self.cursor() as cursor: + cursor.execute("; ".join(assignments)) + + if "AUTOCOMMIT" in self.settings_dict: + try: + self.set_autocommit(self.settings_dict["AUTOCOMMIT"]) + except AttributeError: + self._set_autocommit(self.settings_dict["AUTOCOMMIT"]) + + def create_cursor(self, name: Any = None) -> CursorWrapper: + cursor = self.connection.cursor() + return CursorWrapper(cursor) + + def _rollback(self) -> None: + try: + BaseDatabaseWrapper._rollback(self) # type: ignore[attr-defined] + except NotSupportedError: + pass + + def _set_autocommit(self, autocommit: bool) -> None: + with self.wrap_database_errors: + self.connection.autocommit = autocommit + + def disable_constraint_checking(self) -> bool: + """ + Disable foreign key checks, primarily for use in adding rows with + forward references. Always return True to indicate constraint checks + need to be re-enabled. + """ + with self.cursor() as cursor: + cursor.execute("SET foreign_key_checks=0") + return True + + def enable_constraint_checking(self) -> None: + """ + Re-enable foreign key checks after they have been disabled. + """ + # Override needs_rollback in case constraint_checks_disabled is + # nested inside transaction.atomic. + self.needs_rollback, needs_rollback = False, self.needs_rollback + try: + with self.cursor() as cursor: + cursor.execute("SET foreign_key_checks=1") + finally: + self.needs_rollback = needs_rollback + + def check_constraints(self, table_names: Optional[List[str]] = None) -> None: + """ + Check each table name in `table_names` for rows with invalid foreign + key references. This method is intended to be used in conjunction with + `disable_constraint_checking()` and `enable_constraint_checking()`, to + determine if rows with invalid references were entered while constraint + checks were off. + """ + with self.cursor() as cursor: + if table_names is None: + table_names = self.introspection.table_names(cursor) + for table_name in table_names: + primary_key_column_name = self.introspection.get_primary_key_column( + cursor, table_name + ) + if not primary_key_column_name: + continue + key_columns = self.introspection.get_key_columns( # type: ignore[attr-defined] + cursor, table_name + ) + for ( + column_name, + referenced_table_name, + referenced_column_name, + ) in key_columns: + cursor.execute( + f""" + SELECT REFERRING.`{primary_key_column_name}`, + REFERRING.`{column_name}` + FROM `{table_name}` as REFERRING + LEFT JOIN `{referenced_table_name}` as REFERRED + ON ( + REFERRING.`{column_name}` = + REFERRED.`{referenced_column_name}` + ) + WHERE REFERRING.`{column_name}` IS NOT NULL + AND REFERRED.`{referenced_column_name}` IS NULL + """ + ) + for bad_row in cursor.fetchall(): + raise IntegrityError( + f"The row in table '{table_name}' with primary " + f"key '{bad_row[0]}' has an invalid foreign key: " + f"{table_name}.{column_name} contains a value " + f"'{bad_row[1]}' that does not have a " + f"corresponding value in " + f"{referenced_table_name}." + f"{referenced_column_name}." + ) + + def is_usable(self) -> bool: + try: + self.connection.ping() + except Error: + return False + return True + + @cached_property # type: ignore[misc] + @staticmethod + def display_name() -> str: + """Display name.""" + return "MySQL" + + @cached_property + def data_type_check_constraints(self) -> Dict[str, str]: + """Mapping of Field objects to their SQL for CHECK constraints.""" + if self.features.supports_column_check_constraints: + check_constraints = { + "PositiveBigIntegerField": "`%(column)s` >= 0", + "PositiveIntegerField": "`%(column)s` >= 0", + "PositiveSmallIntegerField": "`%(column)s` >= 0", + } + return check_constraints + return {} + + @cached_property + def mysql_server_data(self) -> Dict[str, Any]: + """Return MySQL server data.""" + with self.temporary_connection() as cursor: + # Select some server variables and test if the time zone + # definitions are installed. CONVERT_TZ returns NULL if 'UTC' + # timezone isn't loaded into the mysql.time_zone table. + cursor.execute( + """ + SELECT VERSION(), + @@sql_mode, + @@default_storage_engine, + @@sql_auto_is_null, + @@lower_case_table_names, + CONVERT_TZ('2001-01-01 01:00:00', 'UTC', 'UTC') IS NOT NULL + """ + ) + row = cursor.fetchone() + return { + "version": row[0], + "sql_mode": row[1], + "default_storage_engine": row[2], + "sql_auto_is_null": bool(row[3]), + "lower_case_table_names": bool(row[4]), + "has_zoneinfo_database": bool(row[5]), + } + + @cached_property + def mysql_server_info(self) -> Any: + """Return MySQL version.""" + with self.temporary_connection() as cursor: + cursor.execute("SELECT VERSION()") + return cursor.fetchone()[0] + + @cached_property + def mysql_version(self) -> Tuple[int, ...]: + """Return MySQL version.""" + config = self.get_connection_params() + with mysql.connector.connect(**config) as conn: + server_version: Tuple[int, ...] = conn.get_server_version() + return server_version + + @cached_property + def sql_mode(self) -> Set[str]: + """Return SQL mode.""" + with self.cursor() as cursor: + cursor.execute("SELECT @@sql_mode") + sql_mode = cursor.fetchone() + return set(sql_mode[0].split(",") if sql_mode else ()) + + @property + def use_pure(self) -> bool: + """Return True if pure Python version is being used.""" + ans: bool = self._use_pure + return ans + + +class DjangoMySQLConverter(MySQLConverter): + """Custom converter for Django.""" + + # pylint: disable=unused-argument + + @staticmethod + def _time_to_python(value: bytes, dsc: Any = None) -> Optional[time]: + """Return MySQL TIME data type as datetime.time() + + Returns datetime.time() + """ + return dateparse.parse_time(value.decode("utf-8")) + + @staticmethod + def _datetime_to_python(value: bytes, dsc: Any = None) -> Optional[datetime]: + """Connector/Python always returns naive datetime.datetime + + Connector/Python always returns naive timestamps since MySQL has + no time zone support. + + - A naive datetime is a datetime that doesn't know its own timezone. + + Django needs a non-naive datetime, but in this method we don't need + to make a datetime value time zone aware since Django itself at some + point will make it aware (at least in versions 3.2.16 and 4.1.2) when + USE_TZ=True. This may change in a future release, we need to keep an + eye on this behaviour. + + Returns datetime.datetime() + """ + return MySQLConverter._datetime_to_python(value) if value else None + + # pylint: enable=unused-argument + + def _safestring_to_mysql(self, value: str) -> Union[bytes, HexLiteral]: + return self._str_to_mysql(value) + + def _safetext_to_mysql(self, value: str) -> Union[bytes, HexLiteral]: + return self._str_to_mysql(value) + + def _safebytes_to_mysql(self, value: bytes) -> bytes: + return self._bytes_to_mysql(value) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/client.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/client.py new file mode 100644 index 0000000..090ccd5 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/client.py @@ -0,0 +1,106 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Database Client.""" + +import os +import subprocess + +from typing import Any, Dict, Iterable, List, Optional, Tuple + +from django.db.backends.base.client import BaseDatabaseClient + + +class DatabaseClient(BaseDatabaseClient): + """Encapsulate backend-specific methods for opening a client shell.""" + + executable_name = "mysql" + + @classmethod + def settings_to_cmd_args_env( + cls, settings_dict: Dict[str, Any], parameters: Optional[Iterable[str]] = None + ) -> Tuple[List[str], Optional[Dict[str, Any]]]: + args = [cls.executable_name] + + db = settings_dict["OPTIONS"].get("database", settings_dict["NAME"]) + user = settings_dict["OPTIONS"].get("user", settings_dict["USER"]) + passwd = settings_dict["OPTIONS"].get("password", settings_dict["PASSWORD"]) + host = settings_dict["OPTIONS"].get("host", settings_dict["HOST"]) + port = settings_dict["OPTIONS"].get("port", settings_dict["PORT"]) + ssl_ca = settings_dict["OPTIONS"].get("ssl_ca") + ssl_cert = settings_dict["OPTIONS"].get("ssl_cert") + ssl_key = settings_dict["OPTIONS"].get("ssl_key") + defaults_file = settings_dict["OPTIONS"].get("read_default_file") + charset = settings_dict["OPTIONS"].get("charset") + + # --defaults-file should always be the first option + if defaults_file: + args.append(f"--defaults-file={defaults_file}") + + # Load any custom init_commands. We always force SQL_MODE to TRADITIONAL + init_command = settings_dict["OPTIONS"].get("init_command", "") + args.append(f"--init-command=SET @@session.SQL_MODE=TRADITIONAL;{init_command}") + + if user: + args.append(f"--user={user}") + if passwd: + args.append(f"--password={passwd}") + + if host: + if "/" in host: + args.append(f"--socket={host}") + else: + args.append(f"--host={host}") + + if port: + args.append(f"--port={port}") + + if db: + args.append(f"--database={db}") + + if ssl_ca: + args.append(f"--ssl-ca={ssl_ca}") + if ssl_cert: + args.append(f"--ssl-cert={ssl_cert}") + if ssl_key: + args.append(f"--ssl-key={ssl_key}") + + if charset: + args.append(f"--default-character-set={charset}") + + if parameters: + args.extend(parameters) + + return args, None + + def runshell(self, parameters: Optional[Iterable[str]] = None) -> None: + args, env = self.settings_to_cmd_args_env( + self.connection.settings_dict, parameters + ) + env = {**os.environ, **env} if env else None + subprocess.run(args, env=env, check=True) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/compiler.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/compiler.py new file mode 100644 index 0000000..1ee7871 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/compiler.py @@ -0,0 +1,45 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""SQL Compiler classes.""" + +from django.db.backends.mysql.compiler import ( + SQLAggregateCompiler, + SQLCompiler, + SQLDeleteCompiler, + SQLInsertCompiler, + SQLUpdateCompiler, +) + +__all__ = [ + "SQLAggregateCompiler", + "SQLCompiler", + "SQLDeleteCompiler", + "SQLInsertCompiler", + "SQLUpdateCompiler", +] diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/creation.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/creation.py new file mode 100644 index 0000000..82f0853 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/creation.py @@ -0,0 +1,33 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Backend specific database creation.""" + +from django.db.backends.mysql.creation import DatabaseCreation + +__all__ = ["DatabaseCreation"] diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/features.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/features.py new file mode 100644 index 0000000..e8debb8 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/features.py @@ -0,0 +1,50 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Database Features.""" + +from typing import Any, List + +from django.db.backends.mysql.features import DatabaseFeatures as MySQLDatabaseFeatures +from django.utils.functional import cached_property + + +class DatabaseFeatures(MySQLDatabaseFeatures): + """Database Features Specification class.""" + + empty_fetchmany_value: List[Any] = [] + + @cached_property + def can_introspect_check_constraints(self) -> bool: # type: ignore[override] + """Check if backend support introspection CHECK of constraints.""" + return self.connection.mysql_version >= (8, 0, 16) + + @cached_property + def supports_microsecond_precision(self) -> bool: + """Check if backend support microsecond precision.""" + return self.connection.mysql_version >= (5, 6, 3) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/introspection.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/introspection.py new file mode 100644 index 0000000..304a0ec --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/introspection.py @@ -0,0 +1,461 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="override,attr-defined,call-arg" + +"""Database Introspection.""" + +from collections import namedtuple +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple + +import sqlparse + +from django import VERSION as DJANGO_VERSION +from django.db.backends.base.introspection import ( + BaseDatabaseIntrospection, + FieldInfo as BaseFieldInfo, + TableInfo, +) +from django.db.models import Index +from django.utils.datastructures import OrderedSet + +from mysql.connector.constants import FieldType + +# from .base import CursorWrapper produces a circular import error, +# avoiding importing CursorWrapper explicitly, using a documented +# trick; write the imports inside if TYPE_CHECKING: so that they +# are not executed at runtime. +# Ref: https://buildmedia.readthedocs.org/media/pdf/mypy/stable/mypy.pdf [page 42] +if TYPE_CHECKING: + # CursorWraper is used exclusively for type hinting + from mysql.connector.django.base import CursorWrapper + +# Based on my investigation, named tuples to +# comply with mypy need to define a static list or tuple +# for field_names (second argument). In this case, the field +# names are created dynamically for FieldInfo which triggers +# a mypy error. The solution is not straightforward since +# FieldInfo attributes are Django version dependent. Code +# refactory is needed to fix this issue. +FieldInfo = namedtuple( # type: ignore[misc] + "FieldInfo", + BaseFieldInfo._fields + ("extra", "is_unsigned", "has_json_constraint"), +) +if DJANGO_VERSION < (3, 2, 0): + InfoLine = namedtuple( + "InfoLine", + "col_name data_type max_len num_prec num_scale extra column_default " + "is_unsigned", + ) +else: + InfoLine = namedtuple( # type: ignore[no-redef] + "InfoLine", + "col_name data_type max_len num_prec num_scale extra column_default " + "collation is_unsigned", + ) + + +class DatabaseIntrospection(BaseDatabaseIntrospection): + """Encapsulate backend-specific introspection utilities.""" + + data_types_reverse = { + FieldType.BLOB: "TextField", + FieldType.DECIMAL: "DecimalField", + FieldType.NEWDECIMAL: "DecimalField", + FieldType.DATE: "DateField", + FieldType.DATETIME: "DateTimeField", + FieldType.DOUBLE: "FloatField", + FieldType.FLOAT: "FloatField", + FieldType.INT24: "IntegerField", + FieldType.LONG: "IntegerField", + FieldType.LONGLONG: "BigIntegerField", + FieldType.SHORT: "SmallIntegerField", + FieldType.STRING: "CharField", + FieldType.TIME: "TimeField", + FieldType.TIMESTAMP: "DateTimeField", + FieldType.TINY: "IntegerField", + FieldType.TINY_BLOB: "TextField", + FieldType.MEDIUM_BLOB: "TextField", + FieldType.LONG_BLOB: "TextField", + FieldType.VAR_STRING: "CharField", + } + + def get_field_type(self, data_type: str, description: FieldInfo) -> str: + field_type = super().get_field_type(data_type, description) # type: ignore[arg-type] + if "auto_increment" in description.extra: + if field_type == "IntegerField": + return "AutoField" + if field_type == "BigIntegerField": + return "BigAutoField" + if field_type == "SmallIntegerField": + return "SmallAutoField" + if description.is_unsigned: + if field_type == "BigIntegerField": + return "PositiveBigIntegerField" + if field_type == "IntegerField": + return "PositiveIntegerField" + if field_type == "SmallIntegerField": + return "PositiveSmallIntegerField" + # JSON data type is an alias for LONGTEXT in MariaDB, use check + # constraints clauses to introspect JSONField. + if description.has_json_constraint: + return "JSONField" + return field_type + + def get_table_list(self, cursor: "CursorWrapper") -> List[TableInfo]: + """Return a list of table and view names in the current database.""" + cursor.execute("SHOW FULL TABLES") + return [ + TableInfo(row[0], {"BASE TABLE": "t", "VIEW": "v"}.get(row[1])) + for row in cursor.fetchall() + ] + + def get_table_description( + self, cursor: "CursorWrapper", table_name: str + ) -> List[FieldInfo]: + """ + Return a description of the table with the DB-API cursor.description + interface." + """ + json_constraints: Dict[Any, Any] = {} + # A default collation for the given table. + cursor.execute( + """ + SELECT table_collation + FROM information_schema.tables + WHERE table_schema = DATABASE() + AND table_name = %s + """, + [table_name], + ) + row = cursor.fetchone() + default_column_collation = row[0] if row else "" + # information_schema database gives more accurate results for some figures: + # - varchar length returned by cursor.description is an internal length, + # not visible length (#5725) + # - precision and scale (for decimal fields) (#5014) + # - auto_increment is not available in cursor.description + if DJANGO_VERSION < (3, 2, 0): + cursor.execute( + """ + SELECT + column_name, data_type, character_maximum_length, + numeric_precision, numeric_scale, extra, column_default, + CASE + WHEN column_type LIKE '%% unsigned' THEN 1 + ELSE 0 + END AS is_unsigned + FROM information_schema.columns + WHERE table_name = %s AND table_schema = DATABASE() + """, + [table_name], + ) + else: + cursor.execute( + """ + SELECT + column_name, data_type, character_maximum_length, + numeric_precision, numeric_scale, extra, column_default, + CASE + WHEN collation_name = %s THEN NULL + ELSE collation_name + END AS collation_name, + CASE + WHEN column_type LIKE '%% unsigned' THEN 1 + ELSE 0 + END AS is_unsigned + FROM information_schema.columns + WHERE table_name = %s AND table_schema = DATABASE() + """, + [default_column_collation, table_name], + ) + field_info = {line[0]: InfoLine(*line) for line in cursor.fetchall()} + + cursor.execute( + f"SELECT * FROM {self.connection.ops.quote_name(table_name)} LIMIT 1" + ) + + def to_int(i: Any) -> Optional[int]: + return int(i) if i is not None else i + + fields = [] + for line in cursor.description: + info = field_info[line[0]] + if DJANGO_VERSION < (3, 2, 0): + fields.append( + FieldInfo( + *line[:3], + to_int(info.max_len) or line[3], + to_int(info.num_prec) or line[4], + to_int(info.num_scale) or line[5], + line[6], + info.column_default, + info.extra, + info.is_unsigned, + line[0] in json_constraints, + ) + ) + else: + fields.append( + FieldInfo( + *line[:3], + to_int(info.max_len) or line[3], + to_int(info.num_prec) or line[4], + to_int(info.num_scale) or line[5], + line[6], + info.column_default, + info.collation, + info.extra, + info.is_unsigned, + line[0] in json_constraints, + ) + ) + return fields + + def get_indexes( + self, cursor: "CursorWrapper", table_name: str + ) -> Dict[int, Dict[str, bool]]: + """Return indexes from table.""" + cursor.execute(f"SHOW INDEX FROM {self.connection.ops.quote_name(table_name)}") + # Do a two-pass search for indexes: on first pass check which indexes + # are multicolumn, on second pass check which single-column indexes + # are present. + rows = list(cursor.fetchall()) + multicol_indexes = set() + for row in rows: + if row[3] > 1: + multicol_indexes.add(row[2]) + indexes: Dict[int, Dict[str, bool]] = {} + for row in rows: + if row[2] in multicol_indexes: + continue + if row[4] not in indexes: + indexes[row[4]] = {"primary_key": False, "unique": False} + # It's possible to have the unique and PK constraints in + # separate indexes. + if row[2] == "PRIMARY": + indexes[row[4]]["primary_key"] = True + if not row[1]: + indexes[row[4]]["unique"] = True + return indexes + + def get_primary_key_column( + self, cursor: "CursorWrapper", table_name: str + ) -> Optional[int]: + """ + Returns the name of the primary key column for the given table + """ + for column in self.get_indexes(cursor, table_name).items(): + if column[1]["primary_key"]: + return column[0] + return None + + def get_sequences( + self, cursor: "CursorWrapper", table_name: str, table_fields: Any = () + ) -> List[Dict[str, str]]: + for field_info in self.get_table_description(cursor, table_name): + if "auto_increment" in field_info.extra: + # MySQL allows only one auto-increment column per table. + return [{"table": table_name, "column": field_info.name}] + return [] + + def get_relations( + self, cursor: "CursorWrapper", table_name: str + ) -> Dict[str, Tuple[str, str]]: + """ + Return a dictionary of {field_name: (field_name_other_table, other_table)} + representing all relationships to the given table. + """ + constraints = self.get_key_columns(cursor, table_name) + relations = {} + for my_fieldname, other_table, other_field in constraints: + relations[my_fieldname] = (other_field, other_table) + return relations + + def get_key_columns( + self, cursor: "CursorWrapper", table_name: str + ) -> List[Tuple[str, str, str]]: + """ + Return a list of (column_name, referenced_table_name, referenced_column_name) + for all key columns in the given table. + """ + key_columns: List[Any] = [] + cursor.execute( + """ + SELECT column_name, referenced_table_name, referenced_column_name + FROM information_schema.key_column_usage + WHERE table_name = %s + AND table_schema = DATABASE() + AND referenced_table_name IS NOT NULL + AND referenced_column_name IS NOT NULL""", + [table_name], + ) + key_columns.extend(cursor.fetchall()) + return key_columns + + def get_storage_engine(self, cursor: "CursorWrapper", table_name: str) -> str: + """ + Retrieve the storage engine for a given table. Return the default + storage engine if the table doesn't exist. + """ + cursor.execute( + "SELECT engine FROM information_schema.tables WHERE table_name = %s", + [table_name], + ) + result = cursor.fetchone() + # pylint: disable=protected-access + if not result: + return self.connection.features._mysql_storage_engine + # pylint: enable=protected-access + return result[0] + + def _parse_constraint_columns( + self, check_clause: Any, columns: Set[str] + ) -> OrderedSet: + check_columns: OrderedSet = OrderedSet() + statement = sqlparse.parse(check_clause)[0] + tokens = (token for token in statement.flatten() if not token.is_whitespace) + for token in tokens: + if ( + token.ttype == sqlparse.tokens.Name + and self.connection.ops.quote_name(token.value) == token.value + and token.value[1:-1] in columns + ): + check_columns.add(token.value[1:-1]) + return check_columns + + def get_constraints( + self, cursor: "CursorWrapper", table_name: str + ) -> Dict[str, Any]: + """ + Retrieve any constraints or keys (unique, pk, fk, check, index) across + one or more columns. + """ + constraints: Dict[str, Any] = {} + # Get the actual constraint names and columns + name_query = """ + SELECT kc.`constraint_name`, kc.`column_name`, + kc.`referenced_table_name`, kc.`referenced_column_name` + FROM information_schema.key_column_usage AS kc + WHERE + kc.table_schema = DATABASE() AND + kc.table_name = %s + ORDER BY kc.`ordinal_position` + """ + cursor.execute(name_query, [table_name]) + for constraint, column, ref_table, ref_column in cursor.fetchall(): + if constraint not in constraints: + constraints[constraint] = { + "columns": OrderedSet(), + "primary_key": False, + "unique": False, + "index": False, + "check": False, + "foreign_key": (ref_table, ref_column) if ref_column else None, + } + if self.connection.features.supports_index_column_ordering: + constraints[constraint]["orders"] = [] + constraints[constraint]["columns"].add(column) + # Now get the constraint types + type_query = """ + SELECT c.constraint_name, c.constraint_type + FROM information_schema.table_constraints AS c + WHERE + c.table_schema = DATABASE() AND + c.table_name = %s + """ + cursor.execute(type_query, [table_name]) + for constraint, kind in cursor.fetchall(): + if kind.lower() == "primary key": + constraints[constraint]["primary_key"] = True + constraints[constraint]["unique"] = True + elif kind.lower() == "unique": + constraints[constraint]["unique"] = True + # Add check constraints. + if self.connection.features.can_introspect_check_constraints: + unnamed_constraints_index = 0 + columns = { + info.name for info in self.get_table_description(cursor, table_name) + } + type_query = """ + SELECT cc.constraint_name, cc.check_clause + FROM + information_schema.check_constraints AS cc, + information_schema.table_constraints AS tc + WHERE + cc.constraint_schema = DATABASE() AND + tc.table_schema = cc.constraint_schema AND + cc.constraint_name = tc.constraint_name AND + tc.constraint_type = 'CHECK' AND + tc.table_name = %s + """ + cursor.execute(type_query, [table_name]) + for constraint, check_clause in cursor.fetchall(): + constraint_columns = self._parse_constraint_columns( + check_clause, columns + ) + # Ensure uniqueness of unnamed constraints. Unnamed unique + # and check columns constraints have the same name as + # a column. + if set(constraint_columns) == {constraint}: + unnamed_constraints_index += 1 + constraint = f"__unnamed_constraint_{unnamed_constraints_index}__" + constraints[constraint] = { + "columns": constraint_columns, + "primary_key": False, + "unique": False, + "index": False, + "check": True, + "foreign_key": None, + } + # Now add in the indexes + cursor.execute(f"SHOW INDEX FROM {self.connection.ops.quote_name(table_name)}") + for _, _, index, _, column, order, type_ in [ + x[:6] + (x[10],) for x in cursor.fetchall() + ]: + if index not in constraints: + constraints[index] = { + "columns": OrderedSet(), + "primary_key": False, + "unique": False, + "check": False, + "foreign_key": None, + } + if self.connection.features.supports_index_column_ordering: + constraints[index]["orders"] = [] + constraints[index]["index"] = True + constraints[index]["type"] = ( + Index.suffix if type_ == "BTREE" else type_.lower() + ) + constraints[index]["columns"].add(column) + if self.connection.features.supports_index_column_ordering: + constraints[index]["orders"].append("DESC" if order == "D" else "ASC") + # Convert the sorted sets to lists + for constraint in constraints.values(): + constraint["columns"] = list(constraint["columns"]) + return constraints diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/operations.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/operations.py new file mode 100644 index 0000000..02d296c --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/operations.py @@ -0,0 +1,104 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="override,attr-defined" + +"""Database Operations.""" + +from datetime import datetime, time, timezone +from typing import Optional + +from django.conf import settings +from django.db.backends.mysql.operations import ( + DatabaseOperations as MySQLDatabaseOperations, +) +from django.utils import timezone as django_timezone + +try: + from _mysql_connector import datetime_to_mysql, time_to_mysql +except ImportError: + HAVE_CEXT = False +else: + HAVE_CEXT = True + + +class DatabaseOperations(MySQLDatabaseOperations): + """Database Operations class.""" + + compiler_module = "mysql.connector.django.compiler" + + def regex_lookup(self, lookup_type: str) -> str: + """Return the string to use in a query when performing regular + expression lookup.""" + if self.connection.mysql_version < (8, 0, 0): + if lookup_type == "regex": + return "%s REGEXP BINARY %s" + return "%s REGEXP %s" + + match_option = "c" if lookup_type == "regex" else "i" + return f"REGEXP_LIKE(%s, %s, '{match_option}')" + + def adapt_datetimefield_value(self, value: Optional[datetime]) -> Optional[bytes]: + """Transform a datetime value to an object compatible with what is + expected by the backend driver for datetime columns.""" + return self.value_to_db_datetime(value) + + def value_to_db_datetime(self, value: Optional[datetime]) -> Optional[bytes]: + """Convert value to MySQL DATETIME.""" + ans: Optional[bytes] = None + if value is None: + return ans + # MySQL doesn't support tz-aware times + if django_timezone.is_aware(value): + if settings.USE_TZ: + value = value.astimezone(timezone.utc).replace(tzinfo=None) + else: + raise ValueError("MySQL backend does not support timezone-aware times") + if not self.connection.features.supports_microsecond_precision: + value = value.replace(microsecond=0) + if not self.connection.use_pure: + return datetime_to_mysql(value) + return self.connection.converter.to_mysql(value) + + def adapt_timefield_value(self, value: Optional[time]) -> Optional[bytes]: + """Transform a time value to an object compatible with what is expected + by the backend driver for time columns.""" + return self.value_to_db_time(value) + + def value_to_db_time(self, value: Optional[time]) -> Optional[bytes]: + """Convert value to MySQL TIME.""" + if value is None: + return None + + # MySQL doesn't support tz-aware times + if django_timezone.is_aware(value): + raise ValueError("MySQL backend does not support timezone-aware times") + + if not self.connection.use_pure: + return time_to_mysql(value) + return self.connection.converter.to_mysql(value) diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/schema.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/schema.py new file mode 100644 index 0000000..4d4f454 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/schema.py @@ -0,0 +1,59 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="override" + +"""Database schema editor.""" +from typing import Any + +from django.db.backends.mysql.schema import ( + DatabaseSchemaEditor as MySQLDatabaseSchemaEditor, +) + + +class DatabaseSchemaEditor(MySQLDatabaseSchemaEditor): + """This class is responsible for emitting schema-changing statements to the + databases. + """ + + def quote_value(self, value: Any) -> Any: + """Quote value.""" + self.connection.ensure_connection() + if isinstance(value, str): + value = value.replace("%", "%%") + quoted = self.connection.connection.converter.escape(value) + if isinstance(value, str) and isinstance(quoted, bytes): + quoted = quoted.decode() + return quoted + + def prepare_default(self, value: Any) -> Any: + """Implement the required abstract method. + + MySQL has requires_literal_defaults=False, therefore return the value. + """ + return value diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/validation.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/validation.py new file mode 100644 index 0000000..9096e1c --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/django/validation.py @@ -0,0 +1,33 @@ +# Copyright (c) 2020, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Backend specific database validation.""" + +from django.db.backends.mysql.validation import DatabaseValidation + +__all__ = ["DatabaseValidation"] diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errorcode.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errorcode.py new file mode 100644 index 0000000..39fdb1b --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errorcode.py @@ -0,0 +1,1877 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2013, 2022, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""This module contains the MySQL Server and Client error codes.""" + +# This file was auto-generated. +_GENERATED_ON = "2021-08-11" +_MYSQL_VERSION = (8, 0, 27) + +# Start MySQL Errors +OBSOLETE_ER_HASHCHK = 1000 +OBSOLETE_ER_NISAMCHK = 1001 +ER_NO = 1002 +ER_YES = 1003 +ER_CANT_CREATE_FILE = 1004 +ER_CANT_CREATE_TABLE = 1005 +ER_CANT_CREATE_DB = 1006 +ER_DB_CREATE_EXISTS = 1007 +ER_DB_DROP_EXISTS = 1008 +OBSOLETE_ER_DB_DROP_DELETE = 1009 +ER_DB_DROP_RMDIR = 1010 +OBSOLETE_ER_CANT_DELETE_FILE = 1011 +ER_CANT_FIND_SYSTEM_REC = 1012 +ER_CANT_GET_STAT = 1013 +OBSOLETE_ER_CANT_GET_WD = 1014 +ER_CANT_LOCK = 1015 +ER_CANT_OPEN_FILE = 1016 +ER_FILE_NOT_FOUND = 1017 +ER_CANT_READ_DIR = 1018 +OBSOLETE_ER_CANT_SET_WD = 1019 +ER_CHECKREAD = 1020 +OBSOLETE_ER_DISK_FULL = 1021 +ER_DUP_KEY = 1022 +OBSOLETE_ER_ERROR_ON_CLOSE = 1023 +ER_ERROR_ON_READ = 1024 +ER_ERROR_ON_RENAME = 1025 +ER_ERROR_ON_WRITE = 1026 +ER_FILE_USED = 1027 +OBSOLETE_ER_FILSORT_ABORT = 1028 +OBSOLETE_ER_FORM_NOT_FOUND = 1029 +ER_GET_ERRNO = 1030 +ER_ILLEGAL_HA = 1031 +ER_KEY_NOT_FOUND = 1032 +ER_NOT_FORM_FILE = 1033 +ER_NOT_KEYFILE = 1034 +ER_OLD_KEYFILE = 1035 +ER_OPEN_AS_READONLY = 1036 +ER_OUTOFMEMORY = 1037 +ER_OUT_OF_SORTMEMORY = 1038 +OBSOLETE_ER_UNEXPECTED_EOF = 1039 +ER_CON_COUNT_ERROR = 1040 +ER_OUT_OF_RESOURCES = 1041 +ER_BAD_HOST_ERROR = 1042 +ER_HANDSHAKE_ERROR = 1043 +ER_DBACCESS_DENIED_ERROR = 1044 +ER_ACCESS_DENIED_ERROR = 1045 +ER_NO_DB_ERROR = 1046 +ER_UNKNOWN_COM_ERROR = 1047 +ER_BAD_NULL_ERROR = 1048 +ER_BAD_DB_ERROR = 1049 +ER_TABLE_EXISTS_ERROR = 1050 +ER_BAD_TABLE_ERROR = 1051 +ER_NON_UNIQ_ERROR = 1052 +ER_SERVER_SHUTDOWN = 1053 +ER_BAD_FIELD_ERROR = 1054 +ER_WRONG_FIELD_WITH_GROUP = 1055 +ER_WRONG_GROUP_FIELD = 1056 +ER_WRONG_SUM_SELECT = 1057 +ER_WRONG_VALUE_COUNT = 1058 +ER_TOO_LONG_IDENT = 1059 +ER_DUP_FIELDNAME = 1060 +ER_DUP_KEYNAME = 1061 +ER_DUP_ENTRY = 1062 +ER_WRONG_FIELD_SPEC = 1063 +ER_PARSE_ERROR = 1064 +ER_EMPTY_QUERY = 1065 +ER_NONUNIQ_TABLE = 1066 +ER_INVALID_DEFAULT = 1067 +ER_MULTIPLE_PRI_KEY = 1068 +ER_TOO_MANY_KEYS = 1069 +ER_TOO_MANY_KEY_PARTS = 1070 +ER_TOO_LONG_KEY = 1071 +ER_KEY_COLUMN_DOES_NOT_EXITS = 1072 +ER_BLOB_USED_AS_KEY = 1073 +ER_TOO_BIG_FIELDLENGTH = 1074 +ER_WRONG_AUTO_KEY = 1075 +ER_READY = 1076 +OBSOLETE_ER_NORMAL_SHUTDOWN = 1077 +OBSOLETE_ER_GOT_SIGNAL = 1078 +ER_SHUTDOWN_COMPLETE = 1079 +ER_FORCING_CLOSE = 1080 +ER_IPSOCK_ERROR = 1081 +ER_NO_SUCH_INDEX = 1082 +ER_WRONG_FIELD_TERMINATORS = 1083 +ER_BLOBS_AND_NO_TERMINATED = 1084 +ER_TEXTFILE_NOT_READABLE = 1085 +ER_FILE_EXISTS_ERROR = 1086 +ER_LOAD_INFO = 1087 +ER_ALTER_INFO = 1088 +ER_WRONG_SUB_KEY = 1089 +ER_CANT_REMOVE_ALL_FIELDS = 1090 +ER_CANT_DROP_FIELD_OR_KEY = 1091 +ER_INSERT_INFO = 1092 +ER_UPDATE_TABLE_USED = 1093 +ER_NO_SUCH_THREAD = 1094 +ER_KILL_DENIED_ERROR = 1095 +ER_NO_TABLES_USED = 1096 +ER_TOO_BIG_SET = 1097 +ER_NO_UNIQUE_LOGFILE = 1098 +ER_TABLE_NOT_LOCKED_FOR_WRITE = 1099 +ER_TABLE_NOT_LOCKED = 1100 +ER_BLOB_CANT_HAVE_DEFAULT = 1101 +ER_WRONG_DB_NAME = 1102 +ER_WRONG_TABLE_NAME = 1103 +ER_TOO_BIG_SELECT = 1104 +ER_UNKNOWN_ERROR = 1105 +ER_UNKNOWN_PROCEDURE = 1106 +ER_WRONG_PARAMCOUNT_TO_PROCEDURE = 1107 +ER_WRONG_PARAMETERS_TO_PROCEDURE = 1108 +ER_UNKNOWN_TABLE = 1109 +ER_FIELD_SPECIFIED_TWICE = 1110 +ER_INVALID_GROUP_FUNC_USE = 1111 +ER_UNSUPPORTED_EXTENSION = 1112 +ER_TABLE_MUST_HAVE_COLUMNS = 1113 +ER_RECORD_FILE_FULL = 1114 +ER_UNKNOWN_CHARACTER_SET = 1115 +ER_TOO_MANY_TABLES = 1116 +ER_TOO_MANY_FIELDS = 1117 +ER_TOO_BIG_ROWSIZE = 1118 +ER_STACK_OVERRUN = 1119 +ER_WRONG_OUTER_JOIN_UNUSED = 1120 +ER_NULL_COLUMN_IN_INDEX = 1121 +ER_CANT_FIND_UDF = 1122 +ER_CANT_INITIALIZE_UDF = 1123 +ER_UDF_NO_PATHS = 1124 +ER_UDF_EXISTS = 1125 +ER_CANT_OPEN_LIBRARY = 1126 +ER_CANT_FIND_DL_ENTRY = 1127 +ER_FUNCTION_NOT_DEFINED = 1128 +ER_HOST_IS_BLOCKED = 1129 +ER_HOST_NOT_PRIVILEGED = 1130 +ER_PASSWORD_ANONYMOUS_USER = 1131 +ER_PASSWORD_NOT_ALLOWED = 1132 +ER_PASSWORD_NO_MATCH = 1133 +ER_UPDATE_INFO = 1134 +ER_CANT_CREATE_THREAD = 1135 +ER_WRONG_VALUE_COUNT_ON_ROW = 1136 +ER_CANT_REOPEN_TABLE = 1137 +ER_INVALID_USE_OF_NULL = 1138 +ER_REGEXP_ERROR = 1139 +ER_MIX_OF_GROUP_FUNC_AND_FIELDS = 1140 +ER_NONEXISTING_GRANT = 1141 +ER_TABLEACCESS_DENIED_ERROR = 1142 +ER_COLUMNACCESS_DENIED_ERROR = 1143 +ER_ILLEGAL_GRANT_FOR_TABLE = 1144 +ER_GRANT_WRONG_HOST_OR_USER = 1145 +ER_NO_SUCH_TABLE = 1146 +ER_NONEXISTING_TABLE_GRANT = 1147 +ER_NOT_ALLOWED_COMMAND = 1148 +ER_SYNTAX_ERROR = 1149 +OBSOLETE_ER_UNUSED1 = 1150 +OBSOLETE_ER_UNUSED2 = 1151 +ER_ABORTING_CONNECTION = 1152 +ER_NET_PACKET_TOO_LARGE = 1153 +ER_NET_READ_ERROR_FROM_PIPE = 1154 +ER_NET_FCNTL_ERROR = 1155 +ER_NET_PACKETS_OUT_OF_ORDER = 1156 +ER_NET_UNCOMPRESS_ERROR = 1157 +ER_NET_READ_ERROR = 1158 +ER_NET_READ_INTERRUPTED = 1159 +ER_NET_ERROR_ON_WRITE = 1160 +ER_NET_WRITE_INTERRUPTED = 1161 +ER_TOO_LONG_STRING = 1162 +ER_TABLE_CANT_HANDLE_BLOB = 1163 +ER_TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164 +OBSOLETE_ER_UNUSED3 = 1165 +ER_WRONG_COLUMN_NAME = 1166 +ER_WRONG_KEY_COLUMN = 1167 +ER_WRONG_MRG_TABLE = 1168 +ER_DUP_UNIQUE = 1169 +ER_BLOB_KEY_WITHOUT_LENGTH = 1170 +ER_PRIMARY_CANT_HAVE_NULL = 1171 +ER_TOO_MANY_ROWS = 1172 +ER_REQUIRES_PRIMARY_KEY = 1173 +OBSOLETE_ER_NO_RAID_COMPILED = 1174 +ER_UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175 +ER_KEY_DOES_NOT_EXITS = 1176 +ER_CHECK_NO_SUCH_TABLE = 1177 +ER_CHECK_NOT_IMPLEMENTED = 1178 +ER_CANT_DO_THIS_DURING_AN_TRANSACTION = 1179 +ER_ERROR_DURING_COMMIT = 1180 +ER_ERROR_DURING_ROLLBACK = 1181 +ER_ERROR_DURING_FLUSH_LOGS = 1182 +OBSOLETE_ER_ERROR_DURING_CHECKPOINT = 1183 +ER_NEW_ABORTING_CONNECTION = 1184 +OBSOLETE_ER_DUMP_NOT_IMPLEMENTED = 1185 +OBSOLETE_ER_FLUSH_MASTER_BINLOG_CLOSED = 1186 +OBSOLETE_ER_INDEX_REBUILD = 1187 +ER_MASTER = 1188 +ER_MASTER_NET_READ = 1189 +ER_MASTER_NET_WRITE = 1190 +ER_FT_MATCHING_KEY_NOT_FOUND = 1191 +ER_LOCK_OR_ACTIVE_TRANSACTION = 1192 +ER_UNKNOWN_SYSTEM_VARIABLE = 1193 +ER_CRASHED_ON_USAGE = 1194 +ER_CRASHED_ON_REPAIR = 1195 +ER_WARNING_NOT_COMPLETE_ROLLBACK = 1196 +ER_TRANS_CACHE_FULL = 1197 +OBSOLETE_ER_SLAVE_MUST_STOP = 1198 +ER_SLAVE_NOT_RUNNING = 1199 +ER_BAD_SLAVE = 1200 +ER_MASTER_INFO = 1201 +ER_SLAVE_THREAD = 1202 +ER_TOO_MANY_USER_CONNECTIONS = 1203 +ER_SET_CONSTANTS_ONLY = 1204 +ER_LOCK_WAIT_TIMEOUT = 1205 +ER_LOCK_TABLE_FULL = 1206 +ER_READ_ONLY_TRANSACTION = 1207 +OBSOLETE_ER_DROP_DB_WITH_READ_LOCK = 1208 +OBSOLETE_ER_CREATE_DB_WITH_READ_LOCK = 1209 +ER_WRONG_ARGUMENTS = 1210 +ER_NO_PERMISSION_TO_CREATE_USER = 1211 +OBSOLETE_ER_UNION_TABLES_IN_DIFFERENT_DIR = 1212 +ER_LOCK_DEADLOCK = 1213 +ER_TABLE_CANT_HANDLE_FT = 1214 +ER_CANNOT_ADD_FOREIGN = 1215 +ER_NO_REFERENCED_ROW = 1216 +ER_ROW_IS_REFERENCED = 1217 +ER_CONNECT_TO_MASTER = 1218 +OBSOLETE_ER_QUERY_ON_MASTER = 1219 +ER_ERROR_WHEN_EXECUTING_COMMAND = 1220 +ER_WRONG_USAGE = 1221 +ER_WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222 +ER_CANT_UPDATE_WITH_READLOCK = 1223 +ER_MIXING_NOT_ALLOWED = 1224 +ER_DUP_ARGUMENT = 1225 +ER_USER_LIMIT_REACHED = 1226 +ER_SPECIFIC_ACCESS_DENIED_ERROR = 1227 +ER_LOCAL_VARIABLE = 1228 +ER_GLOBAL_VARIABLE = 1229 +ER_NO_DEFAULT = 1230 +ER_WRONG_VALUE_FOR_VAR = 1231 +ER_WRONG_TYPE_FOR_VAR = 1232 +ER_VAR_CANT_BE_READ = 1233 +ER_CANT_USE_OPTION_HERE = 1234 +ER_NOT_SUPPORTED_YET = 1235 +ER_MASTER_FATAL_ERROR_READING_BINLOG = 1236 +ER_SLAVE_IGNORED_TABLE = 1237 +ER_INCORRECT_GLOBAL_LOCAL_VAR = 1238 +ER_WRONG_FK_DEF = 1239 +ER_KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240 +ER_OPERAND_COLUMNS = 1241 +ER_SUBQUERY_NO_1_ROW = 1242 +ER_UNKNOWN_STMT_HANDLER = 1243 +ER_CORRUPT_HELP_DB = 1244 +OBSOLETE_ER_CYCLIC_REFERENCE = 1245 +ER_AUTO_CONVERT = 1246 +ER_ILLEGAL_REFERENCE = 1247 +ER_DERIVED_MUST_HAVE_ALIAS = 1248 +ER_SELECT_REDUCED = 1249 +ER_TABLENAME_NOT_ALLOWED_HERE = 1250 +ER_NOT_SUPPORTED_AUTH_MODE = 1251 +ER_SPATIAL_CANT_HAVE_NULL = 1252 +ER_COLLATION_CHARSET_MISMATCH = 1253 +OBSOLETE_ER_SLAVE_WAS_RUNNING = 1254 +OBSOLETE_ER_SLAVE_WAS_NOT_RUNNING = 1255 +ER_TOO_BIG_FOR_UNCOMPRESS = 1256 +ER_ZLIB_Z_MEM_ERROR = 1257 +ER_ZLIB_Z_BUF_ERROR = 1258 +ER_ZLIB_Z_DATA_ERROR = 1259 +ER_CUT_VALUE_GROUP_CONCAT = 1260 +ER_WARN_TOO_FEW_RECORDS = 1261 +ER_WARN_TOO_MANY_RECORDS = 1262 +ER_WARN_NULL_TO_NOTNULL = 1263 +ER_WARN_DATA_OUT_OF_RANGE = 1264 +WARN_DATA_TRUNCATED = 1265 +ER_WARN_USING_OTHER_HANDLER = 1266 +ER_CANT_AGGREGATE_2COLLATIONS = 1267 +OBSOLETE_ER_DROP_USER = 1268 +ER_REVOKE_GRANTS = 1269 +ER_CANT_AGGREGATE_3COLLATIONS = 1270 +ER_CANT_AGGREGATE_NCOLLATIONS = 1271 +ER_VARIABLE_IS_NOT_STRUCT = 1272 +ER_UNKNOWN_COLLATION = 1273 +ER_SLAVE_IGNORED_SSL_PARAMS = 1274 +OBSOLETE_ER_SERVER_IS_IN_SECURE_AUTH_MODE = 1275 +ER_WARN_FIELD_RESOLVED = 1276 +ER_BAD_SLAVE_UNTIL_COND = 1277 +ER_MISSING_SKIP_SLAVE = 1278 +ER_UNTIL_COND_IGNORED = 1279 +ER_WRONG_NAME_FOR_INDEX = 1280 +ER_WRONG_NAME_FOR_CATALOG = 1281 +OBSOLETE_ER_WARN_QC_RESIZE = 1282 +ER_BAD_FT_COLUMN = 1283 +ER_UNKNOWN_KEY_CACHE = 1284 +ER_WARN_HOSTNAME_WONT_WORK = 1285 +ER_UNKNOWN_STORAGE_ENGINE = 1286 +ER_WARN_DEPRECATED_SYNTAX = 1287 +ER_NON_UPDATABLE_TABLE = 1288 +ER_FEATURE_DISABLED = 1289 +ER_OPTION_PREVENTS_STATEMENT = 1290 +ER_DUPLICATED_VALUE_IN_TYPE = 1291 +ER_TRUNCATED_WRONG_VALUE = 1292 +OBSOLETE_ER_TOO_MUCH_AUTO_TIMESTAMP_COLS = 1293 +ER_INVALID_ON_UPDATE = 1294 +ER_UNSUPPORTED_PS = 1295 +ER_GET_ERRMSG = 1296 +ER_GET_TEMPORARY_ERRMSG = 1297 +ER_UNKNOWN_TIME_ZONE = 1298 +ER_WARN_INVALID_TIMESTAMP = 1299 +ER_INVALID_CHARACTER_STRING = 1300 +ER_WARN_ALLOWED_PACKET_OVERFLOWED = 1301 +ER_CONFLICTING_DECLARATIONS = 1302 +ER_SP_NO_RECURSIVE_CREATE = 1303 +ER_SP_ALREADY_EXISTS = 1304 +ER_SP_DOES_NOT_EXIST = 1305 +ER_SP_DROP_FAILED = 1306 +ER_SP_STORE_FAILED = 1307 +ER_SP_LILABEL_MISMATCH = 1308 +ER_SP_LABEL_REDEFINE = 1309 +ER_SP_LABEL_MISMATCH = 1310 +ER_SP_UNINIT_VAR = 1311 +ER_SP_BADSELECT = 1312 +ER_SP_BADRETURN = 1313 +ER_SP_BADSTATEMENT = 1314 +ER_UPDATE_LOG_DEPRECATED_IGNORED = 1315 +ER_UPDATE_LOG_DEPRECATED_TRANSLATED = 1316 +ER_QUERY_INTERRUPTED = 1317 +ER_SP_WRONG_NO_OF_ARGS = 1318 +ER_SP_COND_MISMATCH = 1319 +ER_SP_NORETURN = 1320 +ER_SP_NORETURNEND = 1321 +ER_SP_BAD_CURSOR_QUERY = 1322 +ER_SP_BAD_CURSOR_SELECT = 1323 +ER_SP_CURSOR_MISMATCH = 1324 +ER_SP_CURSOR_ALREADY_OPEN = 1325 +ER_SP_CURSOR_NOT_OPEN = 1326 +ER_SP_UNDECLARED_VAR = 1327 +ER_SP_WRONG_NO_OF_FETCH_ARGS = 1328 +ER_SP_FETCH_NO_DATA = 1329 +ER_SP_DUP_PARAM = 1330 +ER_SP_DUP_VAR = 1331 +ER_SP_DUP_COND = 1332 +ER_SP_DUP_CURS = 1333 +ER_SP_CANT_ALTER = 1334 +ER_SP_SUBSELECT_NYI = 1335 +ER_STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336 +ER_SP_VARCOND_AFTER_CURSHNDLR = 1337 +ER_SP_CURSOR_AFTER_HANDLER = 1338 +ER_SP_CASE_NOT_FOUND = 1339 +ER_FPARSER_TOO_BIG_FILE = 1340 +ER_FPARSER_BAD_HEADER = 1341 +ER_FPARSER_EOF_IN_COMMENT = 1342 +ER_FPARSER_ERROR_IN_PARAMETER = 1343 +ER_FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344 +ER_VIEW_NO_EXPLAIN = 1345 +OBSOLETE_ER_FRM_UNKNOWN_TYPE = 1346 +ER_WRONG_OBJECT = 1347 +ER_NONUPDATEABLE_COLUMN = 1348 +OBSOLETE_ER_VIEW_SELECT_DERIVED_UNUSED = 1349 +ER_VIEW_SELECT_CLAUSE = 1350 +ER_VIEW_SELECT_VARIABLE = 1351 +ER_VIEW_SELECT_TMPTABLE = 1352 +ER_VIEW_WRONG_LIST = 1353 +ER_WARN_VIEW_MERGE = 1354 +ER_WARN_VIEW_WITHOUT_KEY = 1355 +ER_VIEW_INVALID = 1356 +ER_SP_NO_DROP_SP = 1357 +OBSOLETE_ER_SP_GOTO_IN_HNDLR = 1358 +ER_TRG_ALREADY_EXISTS = 1359 +ER_TRG_DOES_NOT_EXIST = 1360 +ER_TRG_ON_VIEW_OR_TEMP_TABLE = 1361 +ER_TRG_CANT_CHANGE_ROW = 1362 +ER_TRG_NO_SUCH_ROW_IN_TRG = 1363 +ER_NO_DEFAULT_FOR_FIELD = 1364 +ER_DIVISION_BY_ZERO = 1365 +ER_TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366 +ER_ILLEGAL_VALUE_FOR_TYPE = 1367 +ER_VIEW_NONUPD_CHECK = 1368 +ER_VIEW_CHECK_FAILED = 1369 +ER_PROCACCESS_DENIED_ERROR = 1370 +ER_RELAY_LOG_FAIL = 1371 +OBSOLETE_ER_PASSWD_LENGTH = 1372 +ER_UNKNOWN_TARGET_BINLOG = 1373 +ER_IO_ERR_LOG_INDEX_READ = 1374 +ER_BINLOG_PURGE_PROHIBITED = 1375 +ER_FSEEK_FAIL = 1376 +ER_BINLOG_PURGE_FATAL_ERR = 1377 +ER_LOG_IN_USE = 1378 +ER_LOG_PURGE_UNKNOWN_ERR = 1379 +ER_RELAY_LOG_INIT = 1380 +ER_NO_BINARY_LOGGING = 1381 +ER_RESERVED_SYNTAX = 1382 +OBSOLETE_ER_WSAS_FAILED = 1383 +OBSOLETE_ER_DIFF_GROUPS_PROC = 1384 +OBSOLETE_ER_NO_GROUP_FOR_PROC = 1385 +OBSOLETE_ER_ORDER_WITH_PROC = 1386 +OBSOLETE_ER_LOGGING_PROHIBIT_CHANGING_OF = 1387 +OBSOLETE_ER_NO_FILE_MAPPING = 1388 +OBSOLETE_ER_WRONG_MAGIC = 1389 +ER_PS_MANY_PARAM = 1390 +ER_KEY_PART_0 = 1391 +ER_VIEW_CHECKSUM = 1392 +ER_VIEW_MULTIUPDATE = 1393 +ER_VIEW_NO_INSERT_FIELD_LIST = 1394 +ER_VIEW_DELETE_MERGE_VIEW = 1395 +ER_CANNOT_USER = 1396 +ER_XAER_NOTA = 1397 +ER_XAER_INVAL = 1398 +ER_XAER_RMFAIL = 1399 +ER_XAER_OUTSIDE = 1400 +ER_XAER_RMERR = 1401 +ER_XA_RBROLLBACK = 1402 +ER_NONEXISTING_PROC_GRANT = 1403 +ER_PROC_AUTO_GRANT_FAIL = 1404 +ER_PROC_AUTO_REVOKE_FAIL = 1405 +ER_DATA_TOO_LONG = 1406 +ER_SP_BAD_SQLSTATE = 1407 +ER_STARTUP = 1408 +ER_LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409 +ER_CANT_CREATE_USER_WITH_GRANT = 1410 +ER_WRONG_VALUE_FOR_TYPE = 1411 +ER_TABLE_DEF_CHANGED = 1412 +ER_SP_DUP_HANDLER = 1413 +ER_SP_NOT_VAR_ARG = 1414 +ER_SP_NO_RETSET = 1415 +ER_CANT_CREATE_GEOMETRY_OBJECT = 1416 +OBSOLETE_ER_FAILED_ROUTINE_BREAK_BINLOG = 1417 +ER_BINLOG_UNSAFE_ROUTINE = 1418 +ER_BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419 +OBSOLETE_ER_EXEC_STMT_WITH_OPEN_CURSOR = 1420 +ER_STMT_HAS_NO_OPEN_CURSOR = 1421 +ER_COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422 +ER_NO_DEFAULT_FOR_VIEW_FIELD = 1423 +ER_SP_NO_RECURSION = 1424 +ER_TOO_BIG_SCALE = 1425 +ER_TOO_BIG_PRECISION = 1426 +ER_M_BIGGER_THAN_D = 1427 +ER_WRONG_LOCK_OF_SYSTEM_TABLE = 1428 +ER_CONNECT_TO_FOREIGN_DATA_SOURCE = 1429 +ER_QUERY_ON_FOREIGN_DATA_SOURCE = 1430 +ER_FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431 +ER_FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432 +ER_FOREIGN_DATA_STRING_INVALID = 1433 +OBSOLETE_ER_CANT_CREATE_FEDERATED_TABLE = 1434 +ER_TRG_IN_WRONG_SCHEMA = 1435 +ER_STACK_OVERRUN_NEED_MORE = 1436 +ER_TOO_LONG_BODY = 1437 +ER_WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438 +ER_TOO_BIG_DISPLAYWIDTH = 1439 +ER_XAER_DUPID = 1440 +ER_DATETIME_FUNCTION_OVERFLOW = 1441 +ER_CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442 +ER_VIEW_PREVENT_UPDATE = 1443 +ER_PS_NO_RECURSION = 1444 +ER_SP_CANT_SET_AUTOCOMMIT = 1445 +OBSOLETE_ER_MALFORMED_DEFINER = 1446 +ER_VIEW_FRM_NO_USER = 1447 +ER_VIEW_OTHER_USER = 1448 +ER_NO_SUCH_USER = 1449 +ER_FORBID_SCHEMA_CHANGE = 1450 +ER_ROW_IS_REFERENCED_2 = 1451 +ER_NO_REFERENCED_ROW_2 = 1452 +ER_SP_BAD_VAR_SHADOW = 1453 +ER_TRG_NO_DEFINER = 1454 +ER_OLD_FILE_FORMAT = 1455 +ER_SP_RECURSION_LIMIT = 1456 +OBSOLETE_ER_SP_PROC_TABLE_CORRUPT = 1457 +ER_SP_WRONG_NAME = 1458 +ER_TABLE_NEEDS_UPGRADE = 1459 +ER_SP_NO_AGGREGATE = 1460 +ER_MAX_PREPARED_STMT_COUNT_REACHED = 1461 +ER_VIEW_RECURSIVE = 1462 +ER_NON_GROUPING_FIELD_USED = 1463 +ER_TABLE_CANT_HANDLE_SPKEYS = 1464 +ER_NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465 +ER_REMOVED_SPACES = 1466 +ER_AUTOINC_READ_FAILED = 1467 +ER_USERNAME = 1468 +ER_HOSTNAME = 1469 +ER_WRONG_STRING_LENGTH = 1470 +ER_NON_INSERTABLE_TABLE = 1471 +ER_ADMIN_WRONG_MRG_TABLE = 1472 +ER_TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473 +ER_NAME_BECOMES_EMPTY = 1474 +ER_AMBIGUOUS_FIELD_TERM = 1475 +ER_FOREIGN_SERVER_EXISTS = 1476 +ER_FOREIGN_SERVER_DOESNT_EXIST = 1477 +ER_ILLEGAL_HA_CREATE_OPTION = 1478 +ER_PARTITION_REQUIRES_VALUES_ERROR = 1479 +ER_PARTITION_WRONG_VALUES_ERROR = 1480 +ER_PARTITION_MAXVALUE_ERROR = 1481 +OBSOLETE_ER_PARTITION_SUBPARTITION_ERROR = 1482 +OBSOLETE_ER_PARTITION_SUBPART_MIX_ERROR = 1483 +ER_PARTITION_WRONG_NO_PART_ERROR = 1484 +ER_PARTITION_WRONG_NO_SUBPART_ERROR = 1485 +ER_WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486 +OBSOLETE_ER_NO_CONST_EXPR_IN_RANGE_OR_LIST_ERROR = 1487 +ER_FIELD_NOT_FOUND_PART_ERROR = 1488 +OBSOLETE_ER_LIST_OF_FIELDS_ONLY_IN_HASH_ERROR = 1489 +ER_INCONSISTENT_PARTITION_INFO_ERROR = 1490 +ER_PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491 +ER_PARTITIONS_MUST_BE_DEFINED_ERROR = 1492 +ER_RANGE_NOT_INCREASING_ERROR = 1493 +ER_INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494 +ER_MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495 +ER_PARTITION_ENTRY_ERROR = 1496 +ER_MIX_HANDLER_ERROR = 1497 +ER_PARTITION_NOT_DEFINED_ERROR = 1498 +ER_TOO_MANY_PARTITIONS_ERROR = 1499 +ER_SUBPARTITION_ERROR = 1500 +ER_CANT_CREATE_HANDLER_FILE = 1501 +ER_BLOB_FIELD_IN_PART_FUNC_ERROR = 1502 +ER_UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503 +ER_NO_PARTS_ERROR = 1504 +ER_PARTITION_MGMT_ON_NONPARTITIONED = 1505 +ER_FOREIGN_KEY_ON_PARTITIONED = 1506 +ER_DROP_PARTITION_NON_EXISTENT = 1507 +ER_DROP_LAST_PARTITION = 1508 +ER_COALESCE_ONLY_ON_HASH_PARTITION = 1509 +ER_REORG_HASH_ONLY_ON_SAME_NO = 1510 +ER_REORG_NO_PARAM_ERROR = 1511 +ER_ONLY_ON_RANGE_LIST_PARTITION = 1512 +ER_ADD_PARTITION_SUBPART_ERROR = 1513 +ER_ADD_PARTITION_NO_NEW_PARTITION = 1514 +ER_COALESCE_PARTITION_NO_PARTITION = 1515 +ER_REORG_PARTITION_NOT_EXIST = 1516 +ER_SAME_NAME_PARTITION = 1517 +ER_NO_BINLOG_ERROR = 1518 +ER_CONSECUTIVE_REORG_PARTITIONS = 1519 +ER_REORG_OUTSIDE_RANGE = 1520 +ER_PARTITION_FUNCTION_FAILURE = 1521 +OBSOLETE_ER_PART_STATE_ERROR = 1522 +ER_LIMITED_PART_RANGE = 1523 +ER_PLUGIN_IS_NOT_LOADED = 1524 +ER_WRONG_VALUE = 1525 +ER_NO_PARTITION_FOR_GIVEN_VALUE = 1526 +ER_FILEGROUP_OPTION_ONLY_ONCE = 1527 +ER_CREATE_FILEGROUP_FAILED = 1528 +ER_DROP_FILEGROUP_FAILED = 1529 +ER_TABLESPACE_AUTO_EXTEND_ERROR = 1530 +ER_WRONG_SIZE_NUMBER = 1531 +ER_SIZE_OVERFLOW_ERROR = 1532 +ER_ALTER_FILEGROUP_FAILED = 1533 +ER_BINLOG_ROW_LOGGING_FAILED = 1534 +OBSOLETE_ER_BINLOG_ROW_WRONG_TABLE_DEF = 1535 +OBSOLETE_ER_BINLOG_ROW_RBR_TO_SBR = 1536 +ER_EVENT_ALREADY_EXISTS = 1537 +OBSOLETE_ER_EVENT_STORE_FAILED = 1538 +ER_EVENT_DOES_NOT_EXIST = 1539 +OBSOLETE_ER_EVENT_CANT_ALTER = 1540 +OBSOLETE_ER_EVENT_DROP_FAILED = 1541 +ER_EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542 +ER_EVENT_ENDS_BEFORE_STARTS = 1543 +ER_EVENT_EXEC_TIME_IN_THE_PAST = 1544 +OBSOLETE_ER_EVENT_OPEN_TABLE_FAILED = 1545 +OBSOLETE_ER_EVENT_NEITHER_M_EXPR_NOR_M_AT = 1546 +OBSOLETE_ER_COL_COUNT_DOESNT_MATCH_CORRUPTED = 1547 +OBSOLETE_ER_CANNOT_LOAD_FROM_TABLE = 1548 +OBSOLETE_ER_EVENT_CANNOT_DELETE = 1549 +OBSOLETE_ER_EVENT_COMPILE_ERROR = 1550 +ER_EVENT_SAME_NAME = 1551 +OBSOLETE_ER_EVENT_DATA_TOO_LONG = 1552 +ER_DROP_INDEX_FK = 1553 +ER_WARN_DEPRECATED_SYNTAX_WITH_VER = 1554 +OBSOLETE_ER_CANT_WRITE_LOCK_LOG_TABLE = 1555 +ER_CANT_LOCK_LOG_TABLE = 1556 +ER_FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557 +ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558 +OBSOLETE_ER_TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559 +ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560 +OBSOLETE_ER_NDB_CANT_SWITCH_BINLOG_FORMAT = 1561 +ER_PARTITION_NO_TEMPORARY = 1562 +ER_PARTITION_CONST_DOMAIN_ERROR = 1563 +ER_PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564 +OBSOLETE_ER_DDL_LOG_ERROR_UNUSED = 1565 +ER_NULL_IN_VALUES_LESS_THAN = 1566 +ER_WRONG_PARTITION_NAME = 1567 +ER_CANT_CHANGE_TX_CHARACTERISTICS = 1568 +ER_DUP_ENTRY_AUTOINCREMENT_CASE = 1569 +OBSOLETE_ER_EVENT_MODIFY_QUEUE_ERROR = 1570 +ER_EVENT_SET_VAR_ERROR = 1571 +ER_PARTITION_MERGE_ERROR = 1572 +OBSOLETE_ER_CANT_ACTIVATE_LOG = 1573 +OBSOLETE_ER_RBR_NOT_AVAILABLE = 1574 +ER_BASE64_DECODE_ERROR = 1575 +ER_EVENT_RECURSION_FORBIDDEN = 1576 +OBSOLETE_ER_EVENTS_DB_ERROR = 1577 +ER_ONLY_INTEGERS_ALLOWED = 1578 +ER_UNSUPORTED_LOG_ENGINE = 1579 +ER_BAD_LOG_STATEMENT = 1580 +ER_CANT_RENAME_LOG_TABLE = 1581 +ER_WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582 +ER_WRONG_PARAMETERS_TO_NATIVE_FCT = 1583 +ER_WRONG_PARAMETERS_TO_STORED_FCT = 1584 +ER_NATIVE_FCT_NAME_COLLISION = 1585 +ER_DUP_ENTRY_WITH_KEY_NAME = 1586 +ER_BINLOG_PURGE_EMFILE = 1587 +ER_EVENT_CANNOT_CREATE_IN_THE_PAST = 1588 +ER_EVENT_CANNOT_ALTER_IN_THE_PAST = 1589 +OBSOLETE_ER_SLAVE_INCIDENT = 1590 +ER_NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591 +ER_BINLOG_UNSAFE_STATEMENT = 1592 +ER_BINLOG_FATAL_ERROR = 1593 +OBSOLETE_ER_SLAVE_RELAY_LOG_READ_FAILURE = 1594 +OBSOLETE_ER_SLAVE_RELAY_LOG_WRITE_FAILURE = 1595 +OBSOLETE_ER_SLAVE_CREATE_EVENT_FAILURE = 1596 +OBSOLETE_ER_SLAVE_MASTER_COM_FAILURE = 1597 +ER_BINLOG_LOGGING_IMPOSSIBLE = 1598 +ER_VIEW_NO_CREATION_CTX = 1599 +ER_VIEW_INVALID_CREATION_CTX = 1600 +OBSOLETE_ER_SR_INVALID_CREATION_CTX = 1601 +ER_TRG_CORRUPTED_FILE = 1602 +ER_TRG_NO_CREATION_CTX = 1603 +ER_TRG_INVALID_CREATION_CTX = 1604 +ER_EVENT_INVALID_CREATION_CTX = 1605 +ER_TRG_CANT_OPEN_TABLE = 1606 +OBSOLETE_ER_CANT_CREATE_SROUTINE = 1607 +OBSOLETE_ER_NEVER_USED = 1608 +ER_NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609 +ER_SLAVE_CORRUPT_EVENT = 1610 +OBSOLETE_ER_LOAD_DATA_INVALID_COLUMN_UNUSED = 1611 +ER_LOG_PURGE_NO_FILE = 1612 +ER_XA_RBTIMEOUT = 1613 +ER_XA_RBDEADLOCK = 1614 +ER_NEED_REPREPARE = 1615 +OBSOLETE_ER_DELAYED_NOT_SUPPORTED = 1616 +WARN_NO_MASTER_INFO = 1617 +WARN_OPTION_IGNORED = 1618 +ER_PLUGIN_DELETE_BUILTIN = 1619 +WARN_PLUGIN_BUSY = 1620 +ER_VARIABLE_IS_READONLY = 1621 +ER_WARN_ENGINE_TRANSACTION_ROLLBACK = 1622 +OBSOLETE_ER_SLAVE_HEARTBEAT_FAILURE = 1623 +ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624 +ER_NDB_REPLICATION_SCHEMA_ERROR = 1625 +ER_CONFLICT_FN_PARSE_ERROR = 1626 +ER_EXCEPTIONS_WRITE_ERROR = 1627 +ER_TOO_LONG_TABLE_COMMENT = 1628 +ER_TOO_LONG_FIELD_COMMENT = 1629 +ER_FUNC_INEXISTENT_NAME_COLLISION = 1630 +ER_DATABASE_NAME = 1631 +ER_TABLE_NAME = 1632 +ER_PARTITION_NAME = 1633 +ER_SUBPARTITION_NAME = 1634 +ER_TEMPORARY_NAME = 1635 +ER_RENAMED_NAME = 1636 +ER_TOO_MANY_CONCURRENT_TRXS = 1637 +WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED = 1638 +ER_DEBUG_SYNC_TIMEOUT = 1639 +ER_DEBUG_SYNC_HIT_LIMIT = 1640 +ER_DUP_SIGNAL_SET = 1641 +ER_SIGNAL_WARN = 1642 +ER_SIGNAL_NOT_FOUND = 1643 +ER_SIGNAL_EXCEPTION = 1644 +ER_RESIGNAL_WITHOUT_ACTIVE_HANDLER = 1645 +ER_SIGNAL_BAD_CONDITION_TYPE = 1646 +WARN_COND_ITEM_TRUNCATED = 1647 +ER_COND_ITEM_TOO_LONG = 1648 +ER_UNKNOWN_LOCALE = 1649 +ER_SLAVE_IGNORE_SERVER_IDS = 1650 +OBSOLETE_ER_QUERY_CACHE_DISABLED = 1651 +ER_SAME_NAME_PARTITION_FIELD = 1652 +ER_PARTITION_COLUMN_LIST_ERROR = 1653 +ER_WRONG_TYPE_COLUMN_VALUE_ERROR = 1654 +ER_TOO_MANY_PARTITION_FUNC_FIELDS_ERROR = 1655 +ER_MAXVALUE_IN_VALUES_IN = 1656 +ER_TOO_MANY_VALUES_ERROR = 1657 +ER_ROW_SINGLE_PARTITION_FIELD_ERROR = 1658 +ER_FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD = 1659 +ER_PARTITION_FIELDS_TOO_LONG = 1660 +ER_BINLOG_ROW_ENGINE_AND_STMT_ENGINE = 1661 +ER_BINLOG_ROW_MODE_AND_STMT_ENGINE = 1662 +ER_BINLOG_UNSAFE_AND_STMT_ENGINE = 1663 +ER_BINLOG_ROW_INJECTION_AND_STMT_ENGINE = 1664 +ER_BINLOG_STMT_MODE_AND_ROW_ENGINE = 1665 +ER_BINLOG_ROW_INJECTION_AND_STMT_MODE = 1666 +ER_BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1667 +ER_BINLOG_UNSAFE_LIMIT = 1668 +OBSOLETE_ER_UNUSED4 = 1669 +ER_BINLOG_UNSAFE_SYSTEM_TABLE = 1670 +ER_BINLOG_UNSAFE_AUTOINC_COLUMNS = 1671 +ER_BINLOG_UNSAFE_UDF = 1672 +ER_BINLOG_UNSAFE_SYSTEM_VARIABLE = 1673 +ER_BINLOG_UNSAFE_SYSTEM_FUNCTION = 1674 +ER_BINLOG_UNSAFE_NONTRANS_AFTER_TRANS = 1675 +ER_MESSAGE_AND_STATEMENT = 1676 +OBSOLETE_ER_SLAVE_CONVERSION_FAILED = 1677 +ER_SLAVE_CANT_CREATE_CONVERSION = 1678 +ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1679 +ER_PATH_LENGTH = 1680 +ER_WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT = 1681 +ER_WRONG_NATIVE_TABLE_STRUCTURE = 1682 +ER_WRONG_PERFSCHEMA_USAGE = 1683 +ER_WARN_I_S_SKIPPED_TABLE = 1684 +ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1685 +ER_STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1686 +ER_SPATIAL_MUST_HAVE_GEOM_COL = 1687 +ER_TOO_LONG_INDEX_COMMENT = 1688 +ER_LOCK_ABORTED = 1689 +ER_DATA_OUT_OF_RANGE = 1690 +OBSOLETE_ER_WRONG_SPVAR_TYPE_IN_LIMIT = 1691 +ER_BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1692 +ER_BINLOG_UNSAFE_MIXED_STATEMENT = 1693 +ER_INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1694 +ER_STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1695 +ER_FAILED_READ_FROM_PAR_FILE = 1696 +ER_VALUES_IS_NOT_INT_TYPE_ERROR = 1697 +ER_ACCESS_DENIED_NO_PASSWORD_ERROR = 1698 +ER_SET_PASSWORD_AUTH_PLUGIN = 1699 +OBSOLETE_ER_GRANT_PLUGIN_USER_EXISTS = 1700 +ER_TRUNCATE_ILLEGAL_FK = 1701 +ER_PLUGIN_IS_PERMANENT = 1702 +ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN = 1703 +ER_SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX = 1704 +ER_STMT_CACHE_FULL = 1705 +ER_MULTI_UPDATE_KEY_CONFLICT = 1706 +ER_TABLE_NEEDS_REBUILD = 1707 +WARN_OPTION_BELOW_LIMIT = 1708 +ER_INDEX_COLUMN_TOO_LONG = 1709 +ER_ERROR_IN_TRIGGER_BODY = 1710 +ER_ERROR_IN_UNKNOWN_TRIGGER_BODY = 1711 +ER_INDEX_CORRUPT = 1712 +ER_UNDO_RECORD_TOO_BIG = 1713 +ER_BINLOG_UNSAFE_INSERT_IGNORE_SELECT = 1714 +ER_BINLOG_UNSAFE_INSERT_SELECT_UPDATE = 1715 +ER_BINLOG_UNSAFE_REPLACE_SELECT = 1716 +ER_BINLOG_UNSAFE_CREATE_IGNORE_SELECT = 1717 +ER_BINLOG_UNSAFE_CREATE_REPLACE_SELECT = 1718 +ER_BINLOG_UNSAFE_UPDATE_IGNORE = 1719 +ER_PLUGIN_NO_UNINSTALL = 1720 +ER_PLUGIN_NO_INSTALL = 1721 +ER_BINLOG_UNSAFE_WRITE_AUTOINC_SELECT = 1722 +ER_BINLOG_UNSAFE_CREATE_SELECT_AUTOINC = 1723 +ER_BINLOG_UNSAFE_INSERT_TWO_KEYS = 1724 +ER_TABLE_IN_FK_CHECK = 1725 +ER_UNSUPPORTED_ENGINE = 1726 +ER_BINLOG_UNSAFE_AUTOINC_NOT_FIRST = 1727 +ER_CANNOT_LOAD_FROM_TABLE_V2 = 1728 +ER_MASTER_DELAY_VALUE_OUT_OF_RANGE = 1729 +ER_ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT = 1730 +ER_PARTITION_EXCHANGE_DIFFERENT_OPTION = 1731 +ER_PARTITION_EXCHANGE_PART_TABLE = 1732 +ER_PARTITION_EXCHANGE_TEMP_TABLE = 1733 +ER_PARTITION_INSTEAD_OF_SUBPARTITION = 1734 +ER_UNKNOWN_PARTITION = 1735 +ER_TABLES_DIFFERENT_METADATA = 1736 +ER_ROW_DOES_NOT_MATCH_PARTITION = 1737 +ER_BINLOG_CACHE_SIZE_GREATER_THAN_MAX = 1738 +ER_WARN_INDEX_NOT_APPLICABLE = 1739 +ER_PARTITION_EXCHANGE_FOREIGN_KEY = 1740 +OBSOLETE_ER_NO_SUCH_KEY_VALUE = 1741 +ER_RPL_INFO_DATA_TOO_LONG = 1742 +OBSOLETE_ER_NETWORK_READ_EVENT_CHECKSUM_FAILURE = 1743 +OBSOLETE_ER_BINLOG_READ_EVENT_CHECKSUM_FAILURE = 1744 +ER_BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX = 1745 +ER_CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT = 1746 +ER_PARTITION_CLAUSE_ON_NONPARTITIONED = 1747 +ER_ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET = 1748 +OBSOLETE_ER_NO_SUCH_PARTITION__UNUSED = 1749 +ER_CHANGE_RPL_INFO_REPOSITORY_FAILURE = 1750 +ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE = 1751 +ER_WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE = 1752 +ER_MTS_FEATURE_IS_NOT_SUPPORTED = 1753 +ER_MTS_UPDATED_DBS_GREATER_MAX = 1754 +ER_MTS_CANT_PARALLEL = 1755 +ER_MTS_INCONSISTENT_DATA = 1756 +ER_FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING = 1757 +ER_DA_INVALID_CONDITION_NUMBER = 1758 +ER_INSECURE_PLAIN_TEXT = 1759 +ER_INSECURE_CHANGE_MASTER = 1760 +ER_FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO = 1761 +ER_FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO = 1762 +ER_SQLTHREAD_WITH_SECURE_SLAVE = 1763 +ER_TABLE_HAS_NO_FT = 1764 +ER_VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER = 1765 +ER_VARIABLE_NOT_SETTABLE_IN_TRANSACTION = 1766 +OBSOLETE_ER_GTID_NEXT_IS_NOT_IN_GTID_NEXT_LIST = 1767 +OBSOLETE_ER_CANT_CHANGE_GTID_NEXT_IN_TRANSACTION = 1768 +ER_SET_STATEMENT_CANNOT_INVOKE_FUNCTION = 1769 +ER_GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL = 1770 +OBSOLETE_ER_SKIPPING_LOGGED_TRANSACTION = 1771 +ER_MALFORMED_GTID_SET_SPECIFICATION = 1772 +ER_MALFORMED_GTID_SET_ENCODING = 1773 +ER_MALFORMED_GTID_SPECIFICATION = 1774 +ER_GNO_EXHAUSTED = 1775 +ER_BAD_SLAVE_AUTO_POSITION = 1776 +ER_AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF = 1777 +ER_CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET = 1778 +ER_GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON = 1779 +OBSOLETE_ER_GTID_MODE_REQUIRES_BINLOG = 1780 +ER_CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF = 1781 +ER_CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON = 1782 +ER_CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF = 1783 +OBSOLETE_ER_FOUND_GTID_EVENT_WHEN_GTID_MODE_IS_OFF__UNUSED = 1784 +ER_GTID_UNSAFE_NON_TRANSACTIONAL_TABLE = 1785 +ER_GTID_UNSAFE_CREATE_SELECT = 1786 +OBSOLETE_ER_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRANSACTION = 1787 +ER_GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME = 1788 +ER_MASTER_HAS_PURGED_REQUIRED_GTIDS = 1789 +ER_CANT_SET_GTID_NEXT_WHEN_OWNING_GTID = 1790 +ER_UNKNOWN_EXPLAIN_FORMAT = 1791 +ER_CANT_EXECUTE_IN_READ_ONLY_TRANSACTION = 1792 +ER_TOO_LONG_TABLE_PARTITION_COMMENT = 1793 +ER_SLAVE_CONFIGURATION = 1794 +ER_INNODB_FT_LIMIT = 1795 +ER_INNODB_NO_FT_TEMP_TABLE = 1796 +ER_INNODB_FT_WRONG_DOCID_COLUMN = 1797 +ER_INNODB_FT_WRONG_DOCID_INDEX = 1798 +ER_INNODB_ONLINE_LOG_TOO_BIG = 1799 +ER_UNKNOWN_ALTER_ALGORITHM = 1800 +ER_UNKNOWN_ALTER_LOCK = 1801 +ER_MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS = 1802 +ER_MTS_RECOVERY_FAILURE = 1803 +ER_MTS_RESET_WORKERS = 1804 +ER_COL_COUNT_DOESNT_MATCH_CORRUPTED_V2 = 1805 +ER_SLAVE_SILENT_RETRY_TRANSACTION = 1806 +ER_DISCARD_FK_CHECKS_RUNNING = 1807 +ER_TABLE_SCHEMA_MISMATCH = 1808 +ER_TABLE_IN_SYSTEM_TABLESPACE = 1809 +ER_IO_READ_ERROR = 1810 +ER_IO_WRITE_ERROR = 1811 +ER_TABLESPACE_MISSING = 1812 +ER_TABLESPACE_EXISTS = 1813 +ER_TABLESPACE_DISCARDED = 1814 +ER_INTERNAL_ERROR = 1815 +ER_INNODB_IMPORT_ERROR = 1816 +ER_INNODB_INDEX_CORRUPT = 1817 +ER_INVALID_YEAR_COLUMN_LENGTH = 1818 +ER_NOT_VALID_PASSWORD = 1819 +ER_MUST_CHANGE_PASSWORD = 1820 +ER_FK_NO_INDEX_CHILD = 1821 +ER_FK_NO_INDEX_PARENT = 1822 +ER_FK_FAIL_ADD_SYSTEM = 1823 +ER_FK_CANNOT_OPEN_PARENT = 1824 +ER_FK_INCORRECT_OPTION = 1825 +ER_FK_DUP_NAME = 1826 +ER_PASSWORD_FORMAT = 1827 +ER_FK_COLUMN_CANNOT_DROP = 1828 +ER_FK_COLUMN_CANNOT_DROP_CHILD = 1829 +ER_FK_COLUMN_NOT_NULL = 1830 +ER_DUP_INDEX = 1831 +ER_FK_COLUMN_CANNOT_CHANGE = 1832 +ER_FK_COLUMN_CANNOT_CHANGE_CHILD = 1833 +OBSOLETE_ER_UNUSED5 = 1834 +ER_MALFORMED_PACKET = 1835 +ER_READ_ONLY_MODE = 1836 +ER_GTID_NEXT_TYPE_UNDEFINED_GTID = 1837 +ER_VARIABLE_NOT_SETTABLE_IN_SP = 1838 +OBSOLETE_ER_CANT_SET_GTID_PURGED_WHEN_GTID_MODE_IS_OFF = 1839 +ER_CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY = 1840 +ER_CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY = 1841 +ER_GTID_PURGED_WAS_CHANGED = 1842 +ER_GTID_EXECUTED_WAS_CHANGED = 1843 +ER_BINLOG_STMT_MODE_AND_NO_REPL_TABLES = 1844 +ER_ALTER_OPERATION_NOT_SUPPORTED = 1845 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON = 1846 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY = 1847 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION = 1848 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME = 1849 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE = 1850 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK = 1851 +OBSOLETE_ER_UNUSED6 = 1852 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK = 1853 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC = 1854 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS = 1855 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS = 1856 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS = 1857 +OBSOLETE_ER_SQL_REPLICA_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE = 1858 +ER_DUP_UNKNOWN_IN_INDEX = 1859 +ER_IDENT_CAUSES_TOO_LONG_PATH = 1860 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL = 1861 +ER_MUST_CHANGE_PASSWORD_LOGIN = 1862 +ER_ROW_IN_WRONG_PARTITION = 1863 +ER_MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX = 1864 +OBSOLETE_ER_INNODB_NO_FT_USES_PARSER = 1865 +ER_BINLOG_LOGICAL_CORRUPTION = 1866 +ER_WARN_PURGE_LOG_IN_USE = 1867 +ER_WARN_PURGE_LOG_IS_ACTIVE = 1868 +ER_AUTO_INCREMENT_CONFLICT = 1869 +WARN_ON_BLOCKHOLE_IN_RBR = 1870 +ER_SLAVE_MI_INIT_REPOSITORY = 1871 +ER_SLAVE_RLI_INIT_REPOSITORY = 1872 +ER_ACCESS_DENIED_CHANGE_USER_ERROR = 1873 +ER_INNODB_READ_ONLY = 1874 +ER_STOP_SLAVE_SQL_THREAD_TIMEOUT = 1875 +ER_STOP_SLAVE_IO_THREAD_TIMEOUT = 1876 +ER_TABLE_CORRUPT = 1877 +ER_TEMP_FILE_WRITE_FAILURE = 1878 +ER_INNODB_FT_AUX_NOT_HEX_ID = 1879 +ER_OLD_TEMPORALS_UPGRADED = 1880 +ER_INNODB_FORCED_RECOVERY = 1881 +ER_AES_INVALID_IV = 1882 +ER_PLUGIN_CANNOT_BE_UNINSTALLED = 1883 +ER_GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID = 1884 +ER_SLAVE_HAS_MORE_GTIDS_THAN_MASTER = 1885 +ER_MISSING_KEY = 1886 +WARN_NAMED_PIPE_ACCESS_EVERYONE = 1887 +ER_FILE_CORRUPT = 3000 +ER_ERROR_ON_MASTER = 3001 +OBSOLETE_ER_INCONSISTENT_ERROR = 3002 +ER_STORAGE_ENGINE_NOT_LOADED = 3003 +ER_GET_STACKED_DA_WITHOUT_ACTIVE_HANDLER = 3004 +ER_WARN_LEGACY_SYNTAX_CONVERTED = 3005 +ER_BINLOG_UNSAFE_FULLTEXT_PLUGIN = 3006 +ER_CANNOT_DISCARD_TEMPORARY_TABLE = 3007 +ER_FK_DEPTH_EXCEEDED = 3008 +ER_COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE_V2 = 3009 +ER_WARN_TRIGGER_DOESNT_HAVE_CREATED = 3010 +ER_REFERENCED_TRG_DOES_NOT_EXIST = 3011 +ER_EXPLAIN_NOT_SUPPORTED = 3012 +ER_INVALID_FIELD_SIZE = 3013 +ER_MISSING_HA_CREATE_OPTION = 3014 +ER_ENGINE_OUT_OF_MEMORY = 3015 +ER_PASSWORD_EXPIRE_ANONYMOUS_USER = 3016 +ER_SLAVE_SQL_THREAD_MUST_STOP = 3017 +ER_NO_FT_MATERIALIZED_SUBQUERY = 3018 +ER_INNODB_UNDO_LOG_FULL = 3019 +ER_INVALID_ARGUMENT_FOR_LOGARITHM = 3020 +ER_SLAVE_CHANNEL_IO_THREAD_MUST_STOP = 3021 +ER_WARN_OPEN_TEMP_TABLES_MUST_BE_ZERO = 3022 +ER_WARN_ONLY_MASTER_LOG_FILE_NO_POS = 3023 +ER_QUERY_TIMEOUT = 3024 +ER_NON_RO_SELECT_DISABLE_TIMER = 3025 +ER_DUP_LIST_ENTRY = 3026 +OBSOLETE_ER_SQL_MODE_NO_EFFECT = 3027 +ER_AGGREGATE_ORDER_FOR_UNION = 3028 +ER_AGGREGATE_ORDER_NON_AGG_QUERY = 3029 +ER_SLAVE_WORKER_STOPPED_PREVIOUS_THD_ERROR = 3030 +ER_DONT_SUPPORT_REPLICA_PRESERVE_COMMIT_ORDER = 3031 +ER_SERVER_OFFLINE_MODE = 3032 +ER_GIS_DIFFERENT_SRIDS = 3033 +ER_GIS_UNSUPPORTED_ARGUMENT = 3034 +ER_GIS_UNKNOWN_ERROR = 3035 +ER_GIS_UNKNOWN_EXCEPTION = 3036 +ER_GIS_INVALID_DATA = 3037 +ER_BOOST_GEOMETRY_EMPTY_INPUT_EXCEPTION = 3038 +ER_BOOST_GEOMETRY_CENTROID_EXCEPTION = 3039 +ER_BOOST_GEOMETRY_OVERLAY_INVALID_INPUT_EXCEPTION = 3040 +ER_BOOST_GEOMETRY_TURN_INFO_EXCEPTION = 3041 +ER_BOOST_GEOMETRY_SELF_INTERSECTION_POINT_EXCEPTION = 3042 +ER_BOOST_GEOMETRY_UNKNOWN_EXCEPTION = 3043 +ER_STD_BAD_ALLOC_ERROR = 3044 +ER_STD_DOMAIN_ERROR = 3045 +ER_STD_LENGTH_ERROR = 3046 +ER_STD_INVALID_ARGUMENT = 3047 +ER_STD_OUT_OF_RANGE_ERROR = 3048 +ER_STD_OVERFLOW_ERROR = 3049 +ER_STD_RANGE_ERROR = 3050 +ER_STD_UNDERFLOW_ERROR = 3051 +ER_STD_LOGIC_ERROR = 3052 +ER_STD_RUNTIME_ERROR = 3053 +ER_STD_UNKNOWN_EXCEPTION = 3054 +ER_GIS_DATA_WRONG_ENDIANESS = 3055 +ER_CHANGE_MASTER_PASSWORD_LENGTH = 3056 +ER_USER_LOCK_WRONG_NAME = 3057 +ER_USER_LOCK_DEADLOCK = 3058 +ER_REPLACE_INACCESSIBLE_ROWS = 3059 +ER_ALTER_OPERATION_NOT_SUPPORTED_REASON_GIS = 3060 +ER_ILLEGAL_USER_VAR = 3061 +ER_GTID_MODE_OFF = 3062 +OBSOLETE_ER_UNSUPPORTED_BY_REPLICATION_THREAD = 3063 +ER_INCORRECT_TYPE = 3064 +ER_FIELD_IN_ORDER_NOT_SELECT = 3065 +ER_AGGREGATE_IN_ORDER_NOT_SELECT = 3066 +ER_INVALID_RPL_WILD_TABLE_FILTER_PATTERN = 3067 +ER_NET_OK_PACKET_TOO_LARGE = 3068 +ER_INVALID_JSON_DATA = 3069 +ER_INVALID_GEOJSON_MISSING_MEMBER = 3070 +ER_INVALID_GEOJSON_WRONG_TYPE = 3071 +ER_INVALID_GEOJSON_UNSPECIFIED = 3072 +ER_DIMENSION_UNSUPPORTED = 3073 +ER_SLAVE_CHANNEL_DOES_NOT_EXIST = 3074 +OBSOLETE_ER_SLAVE_MULTIPLE_CHANNELS_HOST_PORT = 3075 +ER_SLAVE_CHANNEL_NAME_INVALID_OR_TOO_LONG = 3076 +ER_SLAVE_NEW_CHANNEL_WRONG_REPOSITORY = 3077 +OBSOLETE_ER_SLAVE_CHANNEL_DELETE = 3078 +ER_SLAVE_MULTIPLE_CHANNELS_CMD = 3079 +ER_SLAVE_MAX_CHANNELS_EXCEEDED = 3080 +ER_SLAVE_CHANNEL_MUST_STOP = 3081 +ER_SLAVE_CHANNEL_NOT_RUNNING = 3082 +ER_SLAVE_CHANNEL_WAS_RUNNING = 3083 +ER_SLAVE_CHANNEL_WAS_NOT_RUNNING = 3084 +ER_SLAVE_CHANNEL_SQL_THREAD_MUST_STOP = 3085 +ER_SLAVE_CHANNEL_SQL_SKIP_COUNTER = 3086 +ER_WRONG_FIELD_WITH_GROUP_V2 = 3087 +ER_MIX_OF_GROUP_FUNC_AND_FIELDS_V2 = 3088 +ER_WARN_DEPRECATED_SYSVAR_UPDATE = 3089 +ER_WARN_DEPRECATED_SQLMODE = 3090 +ER_CANNOT_LOG_PARTIAL_DROP_DATABASE_WITH_GTID = 3091 +ER_GROUP_REPLICATION_CONFIGURATION = 3092 +ER_GROUP_REPLICATION_RUNNING = 3093 +ER_GROUP_REPLICATION_APPLIER_INIT_ERROR = 3094 +ER_GROUP_REPLICATION_STOP_APPLIER_THREAD_TIMEOUT = 3095 +ER_GROUP_REPLICATION_COMMUNICATION_LAYER_SESSION_ERROR = 3096 +ER_GROUP_REPLICATION_COMMUNICATION_LAYER_JOIN_ERROR = 3097 +ER_BEFORE_DML_VALIDATION_ERROR = 3098 +ER_PREVENTS_VARIABLE_WITHOUT_RBR = 3099 +ER_RUN_HOOK_ERROR = 3100 +ER_TRANSACTION_ROLLBACK_DURING_COMMIT = 3101 +ER_GENERATED_COLUMN_FUNCTION_IS_NOT_ALLOWED = 3102 +ER_UNSUPPORTED_ALTER_INPLACE_ON_VIRTUAL_COLUMN = 3103 +ER_WRONG_FK_OPTION_FOR_GENERATED_COLUMN = 3104 +ER_NON_DEFAULT_VALUE_FOR_GENERATED_COLUMN = 3105 +ER_UNSUPPORTED_ACTION_ON_GENERATED_COLUMN = 3106 +ER_GENERATED_COLUMN_NON_PRIOR = 3107 +ER_DEPENDENT_BY_GENERATED_COLUMN = 3108 +ER_GENERATED_COLUMN_REF_AUTO_INC = 3109 +ER_FEATURE_NOT_AVAILABLE = 3110 +ER_CANT_SET_GTID_MODE = 3111 +ER_CANT_USE_AUTO_POSITION_WITH_GTID_MODE_OFF = 3112 +OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_AUTO_POSITION = 3113 +OBSOLETE_ER_CANT_REPLICATE_ANONYMOUS_WITH_GTID_MODE_ON = 3114 +OBSOLETE_ER_CANT_REPLICATE_GTID_WITH_GTID_MODE_OFF = 3115 +ER_CANT_ENFORCE_GTID_CONSISTENCY_WITH_ONGOING_GTID_VIOLATING_TX = 3116 +ER_ENFORCE_GTID_CONSISTENCY_WARN_WITH_ONGOING_GTID_VIOLATING_TX = 3117 +ER_ACCOUNT_HAS_BEEN_LOCKED = 3118 +ER_WRONG_TABLESPACE_NAME = 3119 +ER_TABLESPACE_IS_NOT_EMPTY = 3120 +ER_WRONG_FILE_NAME = 3121 +ER_BOOST_GEOMETRY_INCONSISTENT_TURNS_EXCEPTION = 3122 +ER_WARN_OPTIMIZER_HINT_SYNTAX_ERROR = 3123 +ER_WARN_BAD_MAX_EXECUTION_TIME = 3124 +ER_WARN_UNSUPPORTED_MAX_EXECUTION_TIME = 3125 +ER_WARN_CONFLICTING_HINT = 3126 +ER_WARN_UNKNOWN_QB_NAME = 3127 +ER_UNRESOLVED_HINT_NAME = 3128 +ER_WARN_ON_MODIFYING_GTID_EXECUTED_TABLE = 3129 +ER_PLUGGABLE_PROTOCOL_COMMAND_NOT_SUPPORTED = 3130 +ER_LOCKING_SERVICE_WRONG_NAME = 3131 +ER_LOCKING_SERVICE_DEADLOCK = 3132 +ER_LOCKING_SERVICE_TIMEOUT = 3133 +ER_GIS_MAX_POINTS_IN_GEOMETRY_OVERFLOWED = 3134 +ER_SQL_MODE_MERGED = 3135 +ER_VTOKEN_PLUGIN_TOKEN_MISMATCH = 3136 +ER_VTOKEN_PLUGIN_TOKEN_NOT_FOUND = 3137 +ER_CANT_SET_VARIABLE_WHEN_OWNING_GTID = 3138 +ER_SLAVE_CHANNEL_OPERATION_NOT_ALLOWED = 3139 +ER_INVALID_JSON_TEXT = 3140 +ER_INVALID_JSON_TEXT_IN_PARAM = 3141 +ER_INVALID_JSON_BINARY_DATA = 3142 +ER_INVALID_JSON_PATH = 3143 +ER_INVALID_JSON_CHARSET = 3144 +ER_INVALID_JSON_CHARSET_IN_FUNCTION = 3145 +ER_INVALID_TYPE_FOR_JSON = 3146 +ER_INVALID_CAST_TO_JSON = 3147 +ER_INVALID_JSON_PATH_CHARSET = 3148 +ER_INVALID_JSON_PATH_WILDCARD = 3149 +ER_JSON_VALUE_TOO_BIG = 3150 +ER_JSON_KEY_TOO_BIG = 3151 +ER_JSON_USED_AS_KEY = 3152 +ER_JSON_VACUOUS_PATH = 3153 +ER_JSON_BAD_ONE_OR_ALL_ARG = 3154 +ER_NUMERIC_JSON_VALUE_OUT_OF_RANGE = 3155 +ER_INVALID_JSON_VALUE_FOR_CAST = 3156 +ER_JSON_DOCUMENT_TOO_DEEP = 3157 +ER_JSON_DOCUMENT_NULL_KEY = 3158 +ER_SECURE_TRANSPORT_REQUIRED = 3159 +ER_NO_SECURE_TRANSPORTS_CONFIGURED = 3160 +ER_DISABLED_STORAGE_ENGINE = 3161 +ER_USER_DOES_NOT_EXIST = 3162 +ER_USER_ALREADY_EXISTS = 3163 +ER_AUDIT_API_ABORT = 3164 +ER_INVALID_JSON_PATH_ARRAY_CELL = 3165 +ER_BUFPOOL_RESIZE_INPROGRESS = 3166 +ER_FEATURE_DISABLED_SEE_DOC = 3167 +ER_SERVER_ISNT_AVAILABLE = 3168 +ER_SESSION_WAS_KILLED = 3169 +ER_CAPACITY_EXCEEDED = 3170 +ER_CAPACITY_EXCEEDED_IN_RANGE_OPTIMIZER = 3171 +OBSOLETE_ER_TABLE_NEEDS_UPG_PART = 3172 +ER_CANT_WAIT_FOR_EXECUTED_GTID_SET_WHILE_OWNING_A_GTID = 3173 +ER_CANNOT_ADD_FOREIGN_BASE_COL_VIRTUAL = 3174 +ER_CANNOT_CREATE_VIRTUAL_INDEX_CONSTRAINT = 3175 +ER_ERROR_ON_MODIFYING_GTID_EXECUTED_TABLE = 3176 +ER_LOCK_REFUSED_BY_ENGINE = 3177 +ER_UNSUPPORTED_ALTER_ONLINE_ON_VIRTUAL_COLUMN = 3178 +ER_MASTER_KEY_ROTATION_NOT_SUPPORTED_BY_SE = 3179 +OBSOLETE_ER_MASTER_KEY_ROTATION_ERROR_BY_SE = 3180 +ER_MASTER_KEY_ROTATION_BINLOG_FAILED = 3181 +ER_MASTER_KEY_ROTATION_SE_UNAVAILABLE = 3182 +ER_TABLESPACE_CANNOT_ENCRYPT = 3183 +ER_INVALID_ENCRYPTION_OPTION = 3184 +ER_CANNOT_FIND_KEY_IN_KEYRING = 3185 +ER_CAPACITY_EXCEEDED_IN_PARSER = 3186 +ER_UNSUPPORTED_ALTER_ENCRYPTION_INPLACE = 3187 +ER_KEYRING_UDF_KEYRING_SERVICE_ERROR = 3188 +ER_USER_COLUMN_OLD_LENGTH = 3189 +ER_CANT_RESET_MASTER = 3190 +ER_GROUP_REPLICATION_MAX_GROUP_SIZE = 3191 +ER_CANNOT_ADD_FOREIGN_BASE_COL_STORED = 3192 +ER_TABLE_REFERENCED = 3193 +OBSOLETE_ER_PARTITION_ENGINE_DEPRECATED_FOR_TABLE = 3194 +OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID_ZERO = 3195 +OBSOLETE_ER_WARN_USING_GEOMFROMWKB_TO_SET_SRID = 3196 +ER_XA_RETRY = 3197 +ER_KEYRING_AWS_UDF_AWS_KMS_ERROR = 3198 +ER_BINLOG_UNSAFE_XA = 3199 +ER_UDF_ERROR = 3200 +ER_KEYRING_MIGRATION_FAILURE = 3201 +ER_KEYRING_ACCESS_DENIED_ERROR = 3202 +ER_KEYRING_MIGRATION_STATUS = 3203 +OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLES = 3204 +OBSOLETE_ER_PLUGIN_FAILED_TO_OPEN_TABLE = 3205 +OBSOLETE_ER_AUDIT_LOG_NO_KEYRING_PLUGIN_INSTALLED = 3206 +OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_HAS_NOT_BEEN_SET = 3207 +OBSOLETE_ER_AUDIT_LOG_COULD_NOT_CREATE_AES_KEY = 3208 +OBSOLETE_ER_AUDIT_LOG_ENCRYPTION_PASSWORD_CANNOT_BE_FETCHED = 3209 +OBSOLETE_ER_AUDIT_LOG_JSON_FILTERING_NOT_ENABLED = 3210 +OBSOLETE_ER_AUDIT_LOG_UDF_INSUFFICIENT_PRIVILEGE = 3211 +OBSOLETE_ER_AUDIT_LOG_SUPER_PRIVILEGE_REQUIRED = 3212 +OBSOLETE_ER_COULD_NOT_REINITIALIZE_AUDIT_LOG_FILTERS = 3213 +OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_TYPE = 3214 +OBSOLETE_ER_AUDIT_LOG_UDF_INVALID_ARGUMENT_COUNT = 3215 +OBSOLETE_ER_AUDIT_LOG_HAS_NOT_BEEN_INSTALLED = 3216 +OBSOLETE_ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_TYPE = 3217 +ER_AUDIT_LOG_UDF_READ_INVALID_MAX_ARRAY_LENGTH_ARG_VALUE = 3218 +OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_PARSING_ERROR = 3219 +OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_NAME_CANNOT_BE_EMPTY = 3220 +OBSOLETE_ER_AUDIT_LOG_JSON_USER_NAME_CANNOT_BE_EMPTY = 3221 +OBSOLETE_ER_AUDIT_LOG_JSON_FILTER_DOES_NOT_EXISTS = 3222 +OBSOLETE_ER_AUDIT_LOG_USER_FIRST_CHARACTER_MUST_BE_ALPHANUMERIC = 3223 +OBSOLETE_ER_AUDIT_LOG_USER_NAME_INVALID_CHARACTER = 3224 +OBSOLETE_ER_AUDIT_LOG_HOST_NAME_INVALID_CHARACTER = 3225 +OBSOLETE_ER_XA_REPLICATION_FILTERS = 3226 +OBSOLETE_ER_CANT_OPEN_ERROR_LOG = 3227 +OBSOLETE_ER_GROUPING_ON_TIMESTAMP_IN_DST = 3228 +OBSOLETE_ER_CANT_START_SERVER_NAMED_PIPE = 3229 +ER_WRITE_SET_EXCEEDS_LIMIT = 3230 +ER_UNSUPPORT_COMPRESSED_TEMPORARY_TABLE = 3500 +ER_ACL_OPERATION_FAILED = 3501 +ER_UNSUPPORTED_INDEX_ALGORITHM = 3502 +ER_NO_SUCH_DB = 3503 +ER_TOO_BIG_ENUM = 3504 +ER_TOO_LONG_SET_ENUM_VALUE = 3505 +ER_INVALID_DD_OBJECT = 3506 +ER_UPDATING_DD_TABLE = 3507 +ER_INVALID_DD_OBJECT_ID = 3508 +ER_INVALID_DD_OBJECT_NAME = 3509 +ER_TABLESPACE_MISSING_WITH_NAME = 3510 +ER_TOO_LONG_ROUTINE_COMMENT = 3511 +ER_SP_LOAD_FAILED = 3512 +ER_INVALID_BITWISE_OPERANDS_SIZE = 3513 +ER_INVALID_BITWISE_AGGREGATE_OPERANDS_SIZE = 3514 +ER_WARN_UNSUPPORTED_HINT = 3515 +ER_UNEXPECTED_GEOMETRY_TYPE = 3516 +ER_SRS_PARSE_ERROR = 3517 +ER_SRS_PROJ_PARAMETER_MISSING = 3518 +ER_WARN_SRS_NOT_FOUND = 3519 +ER_SRS_NOT_CARTESIAN = 3520 +ER_SRS_NOT_CARTESIAN_UNDEFINED = 3521 +ER_PK_INDEX_CANT_BE_INVISIBLE = 3522 +ER_UNKNOWN_AUTHID = 3523 +ER_FAILED_ROLE_GRANT = 3524 +ER_OPEN_ROLE_TABLES = 3525 +ER_FAILED_DEFAULT_ROLES = 3526 +ER_COMPONENTS_NO_SCHEME = 3527 +ER_COMPONENTS_NO_SCHEME_SERVICE = 3528 +ER_COMPONENTS_CANT_LOAD = 3529 +ER_ROLE_NOT_GRANTED = 3530 +ER_FAILED_REVOKE_ROLE = 3531 +ER_RENAME_ROLE = 3532 +ER_COMPONENTS_CANT_ACQUIRE_SERVICE_IMPLEMENTATION = 3533 +ER_COMPONENTS_CANT_SATISFY_DEPENDENCY = 3534 +ER_COMPONENTS_LOAD_CANT_REGISTER_SERVICE_IMPLEMENTATION = 3535 +ER_COMPONENTS_LOAD_CANT_INITIALIZE = 3536 +ER_COMPONENTS_UNLOAD_NOT_LOADED = 3537 +ER_COMPONENTS_UNLOAD_CANT_DEINITIALIZE = 3538 +ER_COMPONENTS_CANT_RELEASE_SERVICE = 3539 +ER_COMPONENTS_UNLOAD_CANT_UNREGISTER_SERVICE = 3540 +ER_COMPONENTS_CANT_UNLOAD = 3541 +ER_WARN_UNLOAD_THE_NOT_PERSISTED = 3542 +ER_COMPONENT_TABLE_INCORRECT = 3543 +ER_COMPONENT_MANIPULATE_ROW_FAILED = 3544 +ER_COMPONENTS_UNLOAD_DUPLICATE_IN_GROUP = 3545 +ER_CANT_SET_GTID_PURGED_DUE_SETS_CONSTRAINTS = 3546 +ER_CANNOT_LOCK_USER_MANAGEMENT_CACHES = 3547 +ER_SRS_NOT_FOUND = 3548 +ER_VARIABLE_NOT_PERSISTED = 3549 +ER_IS_QUERY_INVALID_CLAUSE = 3550 +ER_UNABLE_TO_STORE_STATISTICS = 3551 +ER_NO_SYSTEM_SCHEMA_ACCESS = 3552 +ER_NO_SYSTEM_TABLESPACE_ACCESS = 3553 +ER_NO_SYSTEM_TABLE_ACCESS = 3554 +ER_NO_SYSTEM_TABLE_ACCESS_FOR_DICTIONARY_TABLE = 3555 +ER_NO_SYSTEM_TABLE_ACCESS_FOR_SYSTEM_TABLE = 3556 +ER_NO_SYSTEM_TABLE_ACCESS_FOR_TABLE = 3557 +ER_INVALID_OPTION_KEY = 3558 +ER_INVALID_OPTION_VALUE = 3559 +ER_INVALID_OPTION_KEY_VALUE_PAIR = 3560 +ER_INVALID_OPTION_START_CHARACTER = 3561 +ER_INVALID_OPTION_END_CHARACTER = 3562 +ER_INVALID_OPTION_CHARACTERS = 3563 +ER_DUPLICATE_OPTION_KEY = 3564 +ER_WARN_SRS_NOT_FOUND_AXIS_ORDER = 3565 +ER_NO_ACCESS_TO_NATIVE_FCT = 3566 +ER_RESET_MASTER_TO_VALUE_OUT_OF_RANGE = 3567 +ER_UNRESOLVED_TABLE_LOCK = 3568 +ER_DUPLICATE_TABLE_LOCK = 3569 +ER_BINLOG_UNSAFE_SKIP_LOCKED = 3570 +ER_BINLOG_UNSAFE_NOWAIT = 3571 +ER_LOCK_NOWAIT = 3572 +ER_CTE_RECURSIVE_REQUIRES_UNION = 3573 +ER_CTE_RECURSIVE_REQUIRES_NONRECURSIVE_FIRST = 3574 +ER_CTE_RECURSIVE_FORBIDS_AGGREGATION = 3575 +ER_CTE_RECURSIVE_FORBIDDEN_JOIN_ORDER = 3576 +ER_CTE_RECURSIVE_REQUIRES_SINGLE_REFERENCE = 3577 +ER_SWITCH_TMP_ENGINE = 3578 +ER_WINDOW_NO_SUCH_WINDOW = 3579 +ER_WINDOW_CIRCULARITY_IN_WINDOW_GRAPH = 3580 +ER_WINDOW_NO_CHILD_PARTITIONING = 3581 +ER_WINDOW_NO_INHERIT_FRAME = 3582 +ER_WINDOW_NO_REDEFINE_ORDER_BY = 3583 +ER_WINDOW_FRAME_START_ILLEGAL = 3584 +ER_WINDOW_FRAME_END_ILLEGAL = 3585 +ER_WINDOW_FRAME_ILLEGAL = 3586 +ER_WINDOW_RANGE_FRAME_ORDER_TYPE = 3587 +ER_WINDOW_RANGE_FRAME_TEMPORAL_TYPE = 3588 +ER_WINDOW_RANGE_FRAME_NUMERIC_TYPE = 3589 +ER_WINDOW_RANGE_BOUND_NOT_CONSTANT = 3590 +ER_WINDOW_DUPLICATE_NAME = 3591 +ER_WINDOW_ILLEGAL_ORDER_BY = 3592 +ER_WINDOW_INVALID_WINDOW_FUNC_USE = 3593 +ER_WINDOW_INVALID_WINDOW_FUNC_ALIAS_USE = 3594 +ER_WINDOW_NESTED_WINDOW_FUNC_USE_IN_WINDOW_SPEC = 3595 +ER_WINDOW_ROWS_INTERVAL_USE = 3596 +ER_WINDOW_NO_GROUP_ORDER_UNUSED = 3597 +ER_WINDOW_EXPLAIN_JSON = 3598 +ER_WINDOW_FUNCTION_IGNORES_FRAME = 3599 +ER_WL9236_NOW_UNUSED = 3600 +ER_INVALID_NO_OF_ARGS = 3601 +ER_FIELD_IN_GROUPING_NOT_GROUP_BY = 3602 +ER_TOO_LONG_TABLESPACE_COMMENT = 3603 +ER_ENGINE_CANT_DROP_TABLE = 3604 +ER_ENGINE_CANT_DROP_MISSING_TABLE = 3605 +ER_TABLESPACE_DUP_FILENAME = 3606 +ER_DB_DROP_RMDIR2 = 3607 +ER_IMP_NO_FILES_MATCHED = 3608 +ER_IMP_SCHEMA_DOES_NOT_EXIST = 3609 +ER_IMP_TABLE_ALREADY_EXISTS = 3610 +ER_IMP_INCOMPATIBLE_MYSQLD_VERSION = 3611 +ER_IMP_INCOMPATIBLE_DD_VERSION = 3612 +ER_IMP_INCOMPATIBLE_SDI_VERSION = 3613 +ER_WARN_INVALID_HINT = 3614 +ER_VAR_DOES_NOT_EXIST = 3615 +ER_LONGITUDE_OUT_OF_RANGE = 3616 +ER_LATITUDE_OUT_OF_RANGE = 3617 +ER_NOT_IMPLEMENTED_FOR_GEOGRAPHIC_SRS = 3618 +ER_ILLEGAL_PRIVILEGE_LEVEL = 3619 +ER_NO_SYSTEM_VIEW_ACCESS = 3620 +ER_COMPONENT_FILTER_FLABBERGASTED = 3621 +ER_PART_EXPR_TOO_LONG = 3622 +ER_UDF_DROP_DYNAMICALLY_REGISTERED = 3623 +ER_UNABLE_TO_STORE_COLUMN_STATISTICS = 3624 +ER_UNABLE_TO_UPDATE_COLUMN_STATISTICS = 3625 +ER_UNABLE_TO_DROP_COLUMN_STATISTICS = 3626 +ER_UNABLE_TO_BUILD_HISTOGRAM = 3627 +ER_MANDATORY_ROLE = 3628 +ER_MISSING_TABLESPACE_FILE = 3629 +ER_PERSIST_ONLY_ACCESS_DENIED_ERROR = 3630 +ER_CMD_NEED_SUPER = 3631 +ER_PATH_IN_DATADIR = 3632 +ER_CLONE_DDL_IN_PROGRESS = 3633 +ER_CLONE_TOO_MANY_CONCURRENT_CLONES = 3634 +ER_APPLIER_LOG_EVENT_VALIDATION_ERROR = 3635 +ER_CTE_MAX_RECURSION_DEPTH = 3636 +ER_NOT_HINT_UPDATABLE_VARIABLE = 3637 +ER_CREDENTIALS_CONTRADICT_TO_HISTORY = 3638 +ER_WARNING_PASSWORD_HISTORY_CLAUSES_VOID = 3639 +ER_CLIENT_DOES_NOT_SUPPORT = 3640 +ER_I_S_SKIPPED_TABLESPACE = 3641 +ER_TABLESPACE_ENGINE_MISMATCH = 3642 +ER_WRONG_SRID_FOR_COLUMN = 3643 +ER_CANNOT_ALTER_SRID_DUE_TO_INDEX = 3644 +ER_WARN_BINLOG_PARTIAL_UPDATES_DISABLED = 3645 +ER_WARN_BINLOG_V1_ROW_EVENTS_DISABLED = 3646 +ER_WARN_BINLOG_PARTIAL_UPDATES_SUGGESTS_PARTIAL_IMAGES = 3647 +ER_COULD_NOT_APPLY_JSON_DIFF = 3648 +ER_CORRUPTED_JSON_DIFF = 3649 +ER_RESOURCE_GROUP_EXISTS = 3650 +ER_RESOURCE_GROUP_NOT_EXISTS = 3651 +ER_INVALID_VCPU_ID = 3652 +ER_INVALID_VCPU_RANGE = 3653 +ER_INVALID_THREAD_PRIORITY = 3654 +ER_DISALLOWED_OPERATION = 3655 +ER_RESOURCE_GROUP_BUSY = 3656 +ER_RESOURCE_GROUP_DISABLED = 3657 +ER_FEATURE_UNSUPPORTED = 3658 +ER_ATTRIBUTE_IGNORED = 3659 +ER_INVALID_THREAD_ID = 3660 +ER_RESOURCE_GROUP_BIND_FAILED = 3661 +ER_INVALID_USE_OF_FORCE_OPTION = 3662 +ER_GROUP_REPLICATION_COMMAND_FAILURE = 3663 +ER_SDI_OPERATION_FAILED = 3664 +ER_MISSING_JSON_TABLE_VALUE = 3665 +ER_WRONG_JSON_TABLE_VALUE = 3666 +ER_TF_MUST_HAVE_ALIAS = 3667 +ER_TF_FORBIDDEN_JOIN_TYPE = 3668 +ER_JT_VALUE_OUT_OF_RANGE = 3669 +ER_JT_MAX_NESTED_PATH = 3670 +ER_PASSWORD_EXPIRATION_NOT_SUPPORTED_BY_AUTH_METHOD = 3671 +ER_INVALID_GEOJSON_CRS_NOT_TOP_LEVEL = 3672 +ER_BAD_NULL_ERROR_NOT_IGNORED = 3673 +WARN_USELESS_SPATIAL_INDEX = 3674 +ER_DISK_FULL_NOWAIT = 3675 +ER_PARSE_ERROR_IN_DIGEST_FN = 3676 +ER_UNDISCLOSED_PARSE_ERROR_IN_DIGEST_FN = 3677 +ER_SCHEMA_DIR_EXISTS = 3678 +ER_SCHEMA_DIR_MISSING = 3679 +ER_SCHEMA_DIR_CREATE_FAILED = 3680 +ER_SCHEMA_DIR_UNKNOWN = 3681 +ER_ONLY_IMPLEMENTED_FOR_SRID_0_AND_4326 = 3682 +ER_BINLOG_EXPIRE_LOG_DAYS_AND_SECS_USED_TOGETHER = 3683 +ER_REGEXP_BUFFER_OVERFLOW = 3684 +ER_REGEXP_ILLEGAL_ARGUMENT = 3685 +ER_REGEXP_INDEX_OUTOFBOUNDS_ERROR = 3686 +ER_REGEXP_INTERNAL_ERROR = 3687 +ER_REGEXP_RULE_SYNTAX = 3688 +ER_REGEXP_BAD_ESCAPE_SEQUENCE = 3689 +ER_REGEXP_UNIMPLEMENTED = 3690 +ER_REGEXP_MISMATCHED_PAREN = 3691 +ER_REGEXP_BAD_INTERVAL = 3692 +ER_REGEXP_MAX_LT_MIN = 3693 +ER_REGEXP_INVALID_BACK_REF = 3694 +ER_REGEXP_LOOK_BEHIND_LIMIT = 3695 +ER_REGEXP_MISSING_CLOSE_BRACKET = 3696 +ER_REGEXP_INVALID_RANGE = 3697 +ER_REGEXP_STACK_OVERFLOW = 3698 +ER_REGEXP_TIME_OUT = 3699 +ER_REGEXP_PATTERN_TOO_BIG = 3700 +ER_CANT_SET_ERROR_LOG_SERVICE = 3701 +ER_EMPTY_PIPELINE_FOR_ERROR_LOG_SERVICE = 3702 +ER_COMPONENT_FILTER_DIAGNOSTICS = 3703 +ER_NOT_IMPLEMENTED_FOR_CARTESIAN_SRS = 3704 +ER_NOT_IMPLEMENTED_FOR_PROJECTED_SRS = 3705 +ER_NONPOSITIVE_RADIUS = 3706 +ER_RESTART_SERVER_FAILED = 3707 +ER_SRS_MISSING_MANDATORY_ATTRIBUTE = 3708 +ER_SRS_MULTIPLE_ATTRIBUTE_DEFINITIONS = 3709 +ER_SRS_NAME_CANT_BE_EMPTY_OR_WHITESPACE = 3710 +ER_SRS_ORGANIZATION_CANT_BE_EMPTY_OR_WHITESPACE = 3711 +ER_SRS_ID_ALREADY_EXISTS = 3712 +ER_WARN_SRS_ID_ALREADY_EXISTS = 3713 +ER_CANT_MODIFY_SRID_0 = 3714 +ER_WARN_RESERVED_SRID_RANGE = 3715 +ER_CANT_MODIFY_SRS_USED_BY_COLUMN = 3716 +ER_SRS_INVALID_CHARACTER_IN_ATTRIBUTE = 3717 +ER_SRS_ATTRIBUTE_STRING_TOO_LONG = 3718 +ER_DEPRECATED_UTF8_ALIAS = 3719 +ER_DEPRECATED_NATIONAL = 3720 +ER_INVALID_DEFAULT_UTF8MB4_COLLATION = 3721 +ER_UNABLE_TO_COLLECT_LOG_STATUS = 3722 +ER_RESERVED_TABLESPACE_NAME = 3723 +ER_UNABLE_TO_SET_OPTION = 3724 +ER_SLAVE_POSSIBLY_DIVERGED_AFTER_DDL = 3725 +ER_SRS_NOT_GEOGRAPHIC = 3726 +ER_POLYGON_TOO_LARGE = 3727 +ER_SPATIAL_UNIQUE_INDEX = 3728 +ER_INDEX_TYPE_NOT_SUPPORTED_FOR_SPATIAL_INDEX = 3729 +ER_FK_CANNOT_DROP_PARENT = 3730 +ER_GEOMETRY_PARAM_LONGITUDE_OUT_OF_RANGE = 3731 +ER_GEOMETRY_PARAM_LATITUDE_OUT_OF_RANGE = 3732 +ER_FK_CANNOT_USE_VIRTUAL_COLUMN = 3733 +ER_FK_NO_COLUMN_PARENT = 3734 +ER_CANT_SET_ERROR_SUPPRESSION_LIST = 3735 +ER_SRS_GEOGCS_INVALID_AXES = 3736 +ER_SRS_INVALID_SEMI_MAJOR_AXIS = 3737 +ER_SRS_INVALID_INVERSE_FLATTENING = 3738 +ER_SRS_INVALID_ANGULAR_UNIT = 3739 +ER_SRS_INVALID_PRIME_MERIDIAN = 3740 +ER_TRANSFORM_SOURCE_SRS_NOT_SUPPORTED = 3741 +ER_TRANSFORM_TARGET_SRS_NOT_SUPPORTED = 3742 +ER_TRANSFORM_SOURCE_SRS_MISSING_TOWGS84 = 3743 +ER_TRANSFORM_TARGET_SRS_MISSING_TOWGS84 = 3744 +ER_TEMP_TABLE_PREVENTS_SWITCH_SESSION_BINLOG_FORMAT = 3745 +ER_TEMP_TABLE_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3746 +ER_RUNNING_APPLIER_PREVENTS_SWITCH_GLOBAL_BINLOG_FORMAT = 3747 +ER_CLIENT_GTID_UNSAFE_CREATE_DROP_TEMP_TABLE_IN_TRX_IN_SBR = 3748 +OBSOLETE_ER_XA_CANT_CREATE_MDL_BACKUP = 3749 +ER_TABLE_WITHOUT_PK = 3750 +ER_WARN_DATA_TRUNCATED_FUNCTIONAL_INDEX = 3751 +ER_WARN_DATA_OUT_OF_RANGE_FUNCTIONAL_INDEX = 3752 +ER_FUNCTIONAL_INDEX_ON_JSON_OR_GEOMETRY_FUNCTION = 3753 +ER_FUNCTIONAL_INDEX_REF_AUTO_INCREMENT = 3754 +ER_CANNOT_DROP_COLUMN_FUNCTIONAL_INDEX = 3755 +ER_FUNCTIONAL_INDEX_PRIMARY_KEY = 3756 +ER_FUNCTIONAL_INDEX_ON_LOB = 3757 +ER_FUNCTIONAL_INDEX_FUNCTION_IS_NOT_ALLOWED = 3758 +ER_FULLTEXT_FUNCTIONAL_INDEX = 3759 +ER_SPATIAL_FUNCTIONAL_INDEX = 3760 +ER_WRONG_KEY_COLUMN_FUNCTIONAL_INDEX = 3761 +ER_FUNCTIONAL_INDEX_ON_FIELD = 3762 +ER_GENERATED_COLUMN_NAMED_FUNCTION_IS_NOT_ALLOWED = 3763 +ER_GENERATED_COLUMN_ROW_VALUE = 3764 +ER_GENERATED_COLUMN_VARIABLES = 3765 +ER_DEPENDENT_BY_DEFAULT_GENERATED_VALUE = 3766 +ER_DEFAULT_VAL_GENERATED_NON_PRIOR = 3767 +ER_DEFAULT_VAL_GENERATED_REF_AUTO_INC = 3768 +ER_DEFAULT_VAL_GENERATED_FUNCTION_IS_NOT_ALLOWED = 3769 +ER_DEFAULT_VAL_GENERATED_NAMED_FUNCTION_IS_NOT_ALLOWED = 3770 +ER_DEFAULT_VAL_GENERATED_ROW_VALUE = 3771 +ER_DEFAULT_VAL_GENERATED_VARIABLES = 3772 +ER_DEFAULT_AS_VAL_GENERATED = 3773 +ER_UNSUPPORTED_ACTION_ON_DEFAULT_VAL_GENERATED = 3774 +ER_GTID_UNSAFE_ALTER_ADD_COL_WITH_DEFAULT_EXPRESSION = 3775 +ER_FK_CANNOT_CHANGE_ENGINE = 3776 +ER_WARN_DEPRECATED_USER_SET_EXPR = 3777 +ER_WARN_DEPRECATED_UTF8MB3_COLLATION = 3778 +ER_WARN_DEPRECATED_NESTED_COMMENT_SYNTAX = 3779 +ER_FK_INCOMPATIBLE_COLUMNS = 3780 +ER_GR_HOLD_WAIT_TIMEOUT = 3781 +ER_GR_HOLD_KILLED = 3782 +ER_GR_HOLD_MEMBER_STATUS_ERROR = 3783 +ER_RPL_ENCRYPTION_FAILED_TO_FETCH_KEY = 3784 +ER_RPL_ENCRYPTION_KEY_NOT_FOUND = 3785 +ER_RPL_ENCRYPTION_KEYRING_INVALID_KEY = 3786 +ER_RPL_ENCRYPTION_HEADER_ERROR = 3787 +ER_RPL_ENCRYPTION_FAILED_TO_ROTATE_LOGS = 3788 +ER_RPL_ENCRYPTION_KEY_EXISTS_UNEXPECTED = 3789 +ER_RPL_ENCRYPTION_FAILED_TO_GENERATE_KEY = 3790 +ER_RPL_ENCRYPTION_FAILED_TO_STORE_KEY = 3791 +ER_RPL_ENCRYPTION_FAILED_TO_REMOVE_KEY = 3792 +ER_RPL_ENCRYPTION_UNABLE_TO_CHANGE_OPTION = 3793 +ER_RPL_ENCRYPTION_MASTER_KEY_RECOVERY_FAILED = 3794 +ER_SLOW_LOG_MODE_IGNORED_WHEN_NOT_LOGGING_TO_FILE = 3795 +ER_GRP_TRX_CONSISTENCY_NOT_ALLOWED = 3796 +ER_GRP_TRX_CONSISTENCY_BEFORE = 3797 +ER_GRP_TRX_CONSISTENCY_AFTER_ON_TRX_BEGIN = 3798 +ER_GRP_TRX_CONSISTENCY_BEGIN_NOT_ALLOWED = 3799 +ER_FUNCTIONAL_INDEX_ROW_VALUE_IS_NOT_ALLOWED = 3800 +ER_RPL_ENCRYPTION_FAILED_TO_ENCRYPT = 3801 +ER_PAGE_TRACKING_NOT_STARTED = 3802 +ER_PAGE_TRACKING_RANGE_NOT_TRACKED = 3803 +ER_PAGE_TRACKING_CANNOT_PURGE = 3804 +ER_RPL_ENCRYPTION_CANNOT_ROTATE_BINLOG_MASTER_KEY = 3805 +ER_BINLOG_MASTER_KEY_RECOVERY_OUT_OF_COMBINATION = 3806 +ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_OPERATE_KEY = 3807 +ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_ROTATE_LOGS = 3808 +ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_REENCRYPT_LOG = 3809 +ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_UNUSED_KEYS = 3810 +ER_BINLOG_MASTER_KEY_ROTATION_FAIL_TO_CLEANUP_AUX_KEY = 3811 +ER_NON_BOOLEAN_EXPR_FOR_CHECK_CONSTRAINT = 3812 +ER_COLUMN_CHECK_CONSTRAINT_REFERENCES_OTHER_COLUMN = 3813 +ER_CHECK_CONSTRAINT_NAMED_FUNCTION_IS_NOT_ALLOWED = 3814 +ER_CHECK_CONSTRAINT_FUNCTION_IS_NOT_ALLOWED = 3815 +ER_CHECK_CONSTRAINT_VARIABLES = 3816 +ER_CHECK_CONSTRAINT_ROW_VALUE = 3817 +ER_CHECK_CONSTRAINT_REFERS_AUTO_INCREMENT_COLUMN = 3818 +ER_CHECK_CONSTRAINT_VIOLATED = 3819 +ER_CHECK_CONSTRAINT_REFERS_UNKNOWN_COLUMN = 3820 +ER_CHECK_CONSTRAINT_NOT_FOUND = 3821 +ER_CHECK_CONSTRAINT_DUP_NAME = 3822 +ER_CHECK_CONSTRAINT_CLAUSE_USING_FK_REFER_ACTION_COLUMN = 3823 +WARN_UNENCRYPTED_TABLE_IN_ENCRYPTED_DB = 3824 +ER_INVALID_ENCRYPTION_REQUEST = 3825 +ER_CANNOT_SET_TABLE_ENCRYPTION = 3826 +ER_CANNOT_SET_DATABASE_ENCRYPTION = 3827 +ER_CANNOT_SET_TABLESPACE_ENCRYPTION = 3828 +ER_TABLESPACE_CANNOT_BE_ENCRYPTED = 3829 +ER_TABLESPACE_CANNOT_BE_DECRYPTED = 3830 +ER_TABLESPACE_TYPE_UNKNOWN = 3831 +ER_TARGET_TABLESPACE_UNENCRYPTED = 3832 +ER_CANNOT_USE_ENCRYPTION_CLAUSE = 3833 +ER_INVALID_MULTIPLE_CLAUSES = 3834 +ER_UNSUPPORTED_USE_OF_GRANT_AS = 3835 +ER_UKNOWN_AUTH_ID_OR_ACCESS_DENIED_FOR_GRANT_AS = 3836 +ER_DEPENDENT_BY_FUNCTIONAL_INDEX = 3837 +ER_PLUGIN_NOT_EARLY = 3838 +ER_INNODB_REDO_LOG_ARCHIVE_START_SUBDIR_PATH = 3839 +ER_INNODB_REDO_LOG_ARCHIVE_START_TIMEOUT = 3840 +ER_INNODB_REDO_LOG_ARCHIVE_DIRS_INVALID = 3841 +ER_INNODB_REDO_LOG_ARCHIVE_LABEL_NOT_FOUND = 3842 +ER_INNODB_REDO_LOG_ARCHIVE_DIR_EMPTY = 3843 +ER_INNODB_REDO_LOG_ARCHIVE_NO_SUCH_DIR = 3844 +ER_INNODB_REDO_LOG_ARCHIVE_DIR_CLASH = 3845 +ER_INNODB_REDO_LOG_ARCHIVE_DIR_PERMISSIONS = 3846 +ER_INNODB_REDO_LOG_ARCHIVE_FILE_CREATE = 3847 +ER_INNODB_REDO_LOG_ARCHIVE_ACTIVE = 3848 +ER_INNODB_REDO_LOG_ARCHIVE_INACTIVE = 3849 +ER_INNODB_REDO_LOG_ARCHIVE_FAILED = 3850 +ER_INNODB_REDO_LOG_ARCHIVE_SESSION = 3851 +ER_STD_REGEX_ERROR = 3852 +ER_INVALID_JSON_TYPE = 3853 +ER_CANNOT_CONVERT_STRING = 3854 +ER_DEPENDENT_BY_PARTITION_FUNC = 3855 +ER_WARN_DEPRECATED_FLOAT_AUTO_INCREMENT = 3856 +ER_RPL_CANT_STOP_SLAVE_WHILE_LOCKED_BACKUP = 3857 +ER_WARN_DEPRECATED_FLOAT_DIGITS = 3858 +ER_WARN_DEPRECATED_FLOAT_UNSIGNED = 3859 +ER_WARN_DEPRECATED_INTEGER_DISPLAY_WIDTH = 3860 +ER_WARN_DEPRECATED_ZEROFILL = 3861 +ER_CLONE_DONOR = 3862 +ER_CLONE_PROTOCOL = 3863 +ER_CLONE_DONOR_VERSION = 3864 +ER_CLONE_OS = 3865 +ER_CLONE_PLATFORM = 3866 +ER_CLONE_CHARSET = 3867 +ER_CLONE_CONFIG = 3868 +ER_CLONE_SYS_CONFIG = 3869 +ER_CLONE_PLUGIN_MATCH = 3870 +ER_CLONE_LOOPBACK = 3871 +ER_CLONE_ENCRYPTION = 3872 +ER_CLONE_DISK_SPACE = 3873 +ER_CLONE_IN_PROGRESS = 3874 +ER_CLONE_DISALLOWED = 3875 +ER_CANNOT_GRANT_ROLES_TO_ANONYMOUS_USER = 3876 +ER_SECONDARY_ENGINE_PLUGIN = 3877 +ER_SECOND_PASSWORD_CANNOT_BE_EMPTY = 3878 +ER_DB_ACCESS_DENIED = 3879 +ER_DA_AUTH_ID_WITH_SYSTEM_USER_PRIV_IN_MANDATORY_ROLES = 3880 +ER_DA_RPL_GTID_TABLE_CANNOT_OPEN = 3881 +ER_GEOMETRY_IN_UNKNOWN_LENGTH_UNIT = 3882 +ER_DA_PLUGIN_INSTALL_ERROR = 3883 +ER_NO_SESSION_TEMP = 3884 +ER_DA_UNKNOWN_ERROR_NUMBER = 3885 +ER_COLUMN_CHANGE_SIZE = 3886 +ER_REGEXP_INVALID_CAPTURE_GROUP_NAME = 3887 +ER_DA_SSL_LIBRARY_ERROR = 3888 +ER_SECONDARY_ENGINE = 3889 +ER_SECONDARY_ENGINE_DDL = 3890 +ER_INCORRECT_CURRENT_PASSWORD = 3891 +ER_MISSING_CURRENT_PASSWORD = 3892 +ER_CURRENT_PASSWORD_NOT_REQUIRED = 3893 +ER_PASSWORD_CANNOT_BE_RETAINED_ON_PLUGIN_CHANGE = 3894 +ER_CURRENT_PASSWORD_CANNOT_BE_RETAINED = 3895 +ER_PARTIAL_REVOKES_EXIST = 3896 +ER_CANNOT_GRANT_SYSTEM_PRIV_TO_MANDATORY_ROLE = 3897 +ER_XA_REPLICATION_FILTERS = 3898 +ER_UNSUPPORTED_SQL_MODE = 3899 +ER_REGEXP_INVALID_FLAG = 3900 +ER_PARTIAL_REVOKE_AND_DB_GRANT_BOTH_EXISTS = 3901 +ER_UNIT_NOT_FOUND = 3902 +ER_INVALID_JSON_VALUE_FOR_FUNC_INDEX = 3903 +ER_JSON_VALUE_OUT_OF_RANGE_FOR_FUNC_INDEX = 3904 +ER_EXCEEDED_MV_KEYS_NUM = 3905 +ER_EXCEEDED_MV_KEYS_SPACE = 3906 +ER_FUNCTIONAL_INDEX_DATA_IS_TOO_LONG = 3907 +ER_WRONG_MVI_VALUE = 3908 +ER_WARN_FUNC_INDEX_NOT_APPLICABLE = 3909 +ER_GRP_RPL_UDF_ERROR = 3910 +ER_UPDATE_GTID_PURGED_WITH_GR = 3911 +ER_GROUPING_ON_TIMESTAMP_IN_DST = 3912 +ER_TABLE_NAME_CAUSES_TOO_LONG_PATH = 3913 +ER_AUDIT_LOG_INSUFFICIENT_PRIVILEGE = 3914 +OBSOLETE_ER_AUDIT_LOG_PASSWORD_HAS_BEEN_COPIED = 3915 +ER_DA_GRP_RPL_STARTED_AUTO_REJOIN = 3916 +ER_SYSVAR_CHANGE_DURING_QUERY = 3917 +ER_GLOBSTAT_CHANGE_DURING_QUERY = 3918 +ER_GRP_RPL_MESSAGE_SERVICE_INIT_FAILURE = 3919 +ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3920 +ER_CHANGE_MASTER_WRONG_COMPRESSION_LEVEL_CLIENT = 3921 +ER_WRONG_COMPRESSION_ALGORITHM_CLIENT = 3922 +ER_WRONG_COMPRESSION_LEVEL_CLIENT = 3923 +ER_CHANGE_MASTER_WRONG_COMPRESSION_ALGORITHM_LIST_CLIENT = 3924 +ER_CLIENT_PRIVILEGE_CHECKS_USER_CANNOT_BE_ANONYMOUS = 3925 +ER_CLIENT_PRIVILEGE_CHECKS_USER_DOES_NOT_EXIST = 3926 +ER_CLIENT_PRIVILEGE_CHECKS_USER_CORRUPT = 3927 +ER_CLIENT_PRIVILEGE_CHECKS_USER_NEEDS_RPL_APPLIER_PRIV = 3928 +ER_WARN_DA_PRIVILEGE_NOT_REGISTERED = 3929 +ER_CLIENT_KEYRING_UDF_KEY_INVALID = 3930 +ER_CLIENT_KEYRING_UDF_KEY_TYPE_INVALID = 3931 +ER_CLIENT_KEYRING_UDF_KEY_TOO_LONG = 3932 +ER_CLIENT_KEYRING_UDF_KEY_TYPE_TOO_LONG = 3933 +ER_JSON_SCHEMA_VALIDATION_ERROR_WITH_DETAILED_REPORT = 3934 +ER_DA_UDF_INVALID_CHARSET_SPECIFIED = 3935 +ER_DA_UDF_INVALID_CHARSET = 3936 +ER_DA_UDF_INVALID_COLLATION = 3937 +ER_DA_UDF_INVALID_EXTENSION_ARGUMENT_TYPE = 3938 +ER_MULTIPLE_CONSTRAINTS_WITH_SAME_NAME = 3939 +ER_CONSTRAINT_NOT_FOUND = 3940 +ER_ALTER_CONSTRAINT_ENFORCEMENT_NOT_SUPPORTED = 3941 +ER_TABLE_VALUE_CONSTRUCTOR_MUST_HAVE_COLUMNS = 3942 +ER_TABLE_VALUE_CONSTRUCTOR_CANNOT_HAVE_DEFAULT = 3943 +ER_CLIENT_QUERY_FAILURE_INVALID_NON_ROW_FORMAT = 3944 +ER_REQUIRE_ROW_FORMAT_INVALID_VALUE = 3945 +ER_FAILED_TO_DETERMINE_IF_ROLE_IS_MANDATORY = 3946 +ER_FAILED_TO_FETCH_MANDATORY_ROLE_LIST = 3947 +ER_CLIENT_LOCAL_FILES_DISABLED = 3948 +ER_IMP_INCOMPATIBLE_CFG_VERSION = 3949 +ER_DA_OOM = 3950 +ER_DA_UDF_INVALID_ARGUMENT_TO_SET_CHARSET = 3951 +ER_DA_UDF_INVALID_RETURN_TYPE_TO_SET_CHARSET = 3952 +ER_MULTIPLE_INTO_CLAUSES = 3953 +ER_MISPLACED_INTO = 3954 +ER_USER_ACCESS_DENIED_FOR_USER_ACCOUNT_BLOCKED_BY_PASSWORD_LOCK = 3955 +ER_WARN_DEPRECATED_YEAR_UNSIGNED = 3956 +ER_CLONE_NETWORK_PACKET = 3957 +ER_SDI_OPERATION_FAILED_MISSING_RECORD = 3958 +ER_DEPENDENT_BY_CHECK_CONSTRAINT = 3959 +ER_GRP_OPERATION_NOT_ALLOWED_GR_MUST_STOP = 3960 +ER_WARN_DEPRECATED_JSON_TABLE_ON_ERROR_ON_EMPTY = 3961 +ER_WARN_DEPRECATED_INNER_INTO = 3962 +ER_WARN_DEPRECATED_VALUES_FUNCTION_ALWAYS_NULL = 3963 +ER_WARN_DEPRECATED_SQL_CALC_FOUND_ROWS = 3964 +ER_WARN_DEPRECATED_FOUND_ROWS = 3965 +ER_MISSING_JSON_VALUE = 3966 +ER_MULTIPLE_JSON_VALUES = 3967 +ER_HOSTNAME_TOO_LONG = 3968 +ER_WARN_CLIENT_DEPRECATED_PARTITION_PREFIX_KEY = 3969 +ER_GROUP_REPLICATION_USER_EMPTY_MSG = 3970 +ER_GROUP_REPLICATION_USER_MANDATORY_MSG = 3971 +ER_GROUP_REPLICATION_PASSWORD_LENGTH = 3972 +ER_SUBQUERY_TRANSFORM_REJECTED = 3973 +ER_DA_GRP_RPL_RECOVERY_ENDPOINT_FORMAT = 3974 +ER_DA_GRP_RPL_RECOVERY_ENDPOINT_INVALID = 3975 +ER_WRONG_VALUE_FOR_VAR_PLUS_ACTIONABLE_PART = 3976 +ER_STATEMENT_NOT_ALLOWED_AFTER_START_TRANSACTION = 3977 +ER_FOREIGN_KEY_WITH_ATOMIC_CREATE_SELECT = 3978 +ER_NOT_ALLOWED_WITH_START_TRANSACTION = 3979 +ER_INVALID_JSON_ATTRIBUTE = 3980 +ER_ENGINE_ATTRIBUTE_NOT_SUPPORTED = 3981 +ER_INVALID_USER_ATTRIBUTE_JSON = 3982 +ER_INNODB_REDO_DISABLED = 3983 +ER_INNODB_REDO_ARCHIVING_ENABLED = 3984 +ER_MDL_OUT_OF_RESOURCES = 3985 +ER_IMPLICIT_COMPARISON_FOR_JSON = 3986 +ER_FUNCTION_DOES_NOT_SUPPORT_CHARACTER_SET = 3987 +ER_IMPOSSIBLE_STRING_CONVERSION = 3988 +ER_SCHEMA_READ_ONLY = 3989 +ER_RPL_ASYNC_RECONNECT_GTID_MODE_OFF = 3990 +ER_RPL_ASYNC_RECONNECT_AUTO_POSITION_OFF = 3991 +ER_DISABLE_GTID_MODE_REQUIRES_ASYNC_RECONNECT_OFF = 3992 +ER_DISABLE_AUTO_POSITION_REQUIRES_ASYNC_RECONNECT_OFF = 3993 +ER_INVALID_PARAMETER_USE = 3994 +ER_CHARACTER_SET_MISMATCH = 3995 +ER_WARN_VAR_VALUE_CHANGE_NOT_SUPPORTED = 3996 +ER_INVALID_TIME_ZONE_INTERVAL = 3997 +ER_INVALID_CAST = 3998 +ER_HYPERGRAPH_NOT_SUPPORTED_YET = 3999 +ER_WARN_HYPERGRAPH_EXPERIMENTAL = 4000 +ER_DA_NO_ERROR_LOG_PARSER_CONFIGURED = 4001 +ER_DA_ERROR_LOG_TABLE_DISABLED = 4002 +ER_DA_ERROR_LOG_MULTIPLE_FILTERS = 4003 +ER_DA_CANT_OPEN_ERROR_LOG = 4004 +ER_USER_REFERENCED_AS_DEFINER = 4005 +ER_CANNOT_USER_REFERENCED_AS_DEFINER = 4006 +ER_REGEX_NUMBER_TOO_BIG = 4007 +ER_SPVAR_NONINTEGER_TYPE = 4008 +WARN_UNSUPPORTED_ACL_TABLES_READ = 4009 +ER_BINLOG_UNSAFE_ACL_TABLE_READ_IN_DML_DDL = 4010 +ER_STOP_REPLICA_MONITOR_IO_THREAD_TIMEOUT = 4011 +ER_STARTING_REPLICA_MONITOR_IO_THREAD = 4012 +ER_CANT_USE_ANONYMOUS_TO_GTID_WITH_GTID_MODE_NOT_ON = 4013 +ER_CANT_COMBINE_ANONYMOUS_TO_GTID_AND_AUTOPOSITION = 4014 +ER_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_REQUIRES_GTID_MODE_ON = 4015 +ER_SQL_REPLICA_SKIP_COUNTER_USED_WITH_GTID_MODE_ON = 4016 +ER_USING_ASSIGN_GTIDS_TO_ANONYMOUS_TRANSACTIONS_AS_LOCAL_OR_UUID = 4017 +ER_CANT_SET_ANONYMOUS_TO_GTID_AND_WAIT_UNTIL_SQL_THD_AFTER_GTIDS = 4018 +ER_CANT_SET_SQL_AFTER_OR_BEFORE_GTIDS_WITH_ANONYMOUS_TO_GTID = 4019 +ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_GROUP_NAME = 4020 +ER_CANT_USE_SAME_UUID_AS_GROUP_NAME = 4021 +ER_GRP_RPL_RECOVERY_CHANNEL_STILL_RUNNING = 4022 +ER_INNODB_INVALID_AUTOEXTEND_SIZE_VALUE = 4023 +ER_INNODB_INCOMPATIBLE_WITH_TABLESPACE = 4024 +ER_INNODB_AUTOEXTEND_SIZE_OUT_OF_RANGE = 4025 +ER_CANNOT_USE_AUTOEXTEND_SIZE_CLAUSE = 4026 +ER_ROLE_GRANTED_TO_ITSELF = 4027 +ER_TABLE_MUST_HAVE_A_VISIBLE_COLUMN = 4028 +ER_INNODB_COMPRESSION_FAILURE = 4029 +ER_WARN_ASYNC_CONN_FAILOVER_NETWORK_NAMESPACE = 4030 +ER_CLIENT_INTERACTION_TIMEOUT = 4031 +ER_INVALID_CAST_TO_GEOMETRY = 4032 +ER_INVALID_CAST_POLYGON_RING_DIRECTION = 4033 +ER_GIS_DIFFERENT_SRIDS_AGGREGATION = 4034 +ER_RELOAD_KEYRING_FAILURE = 4035 +ER_SDI_GET_KEYS_INVALID_TABLESPACE = 4036 +ER_CHANGE_RPL_SRC_WRONG_COMPRESSION_ALGORITHM_SIZE = 4037 +ER_WARN_DEPRECATED_TLS_VERSION_FOR_CHANNEL_CLI = 4038 +ER_CANT_USE_SAME_UUID_AS_VIEW_CHANGE_UUID = 4039 +ER_ANONYMOUS_TO_GTID_UUID_SAME_AS_VIEW_CHANGE_UUID = 4040 +ER_GRP_RPL_VIEW_CHANGE_UUID_FAIL_GET_VARIABLE = 4041 +ER_WARN_ADUIT_LOG_MAX_SIZE_AND_PRUNE_SECONDS = 4042 +ER_WARN_ADUIT_LOG_MAX_SIZE_CLOSE_TO_ROTATE_ON_SIZE = 4043 +ER_KERBEROS_CREATE_USER = 4044 +ER_INSTALL_PLUGIN_CONFLICT_CLIENT = 4045 +ER_DA_ERROR_LOG_COMPONENT_FLUSH_FAILED = 4046 +ER_WARN_SQL_AFTER_MTS_GAPS_GAP_NOT_CALCULATED = 4047 +ER_INVALID_ASSIGNMENT_TARGET = 4048 +ER_OPERATION_NOT_ALLOWED_ON_GR_SECONDARY = 4049 +ER_GRP_RPL_FAILOVER_CHANNEL_STATUS_PROPAGATION = 4050 +ER_WARN_AUDIT_LOG_FORMAT_UNIX_TIMESTAMP_ONLY_WHEN_JSON = 4051 +ER_INVALID_MFA_PLUGIN_SPECIFIED = 4052 +ER_IDENTIFIED_BY_UNSUPPORTED = 4053 +ER_INVALID_PLUGIN_FOR_REGISTRATION = 4054 +ER_PLUGIN_REQUIRES_REGISTRATION = 4055 +ER_MFA_METHOD_EXISTS = 4056 +ER_MFA_METHOD_NOT_EXISTS = 4057 +ER_AUTHENTICATION_POLICY_MISMATCH = 4058 +ER_PLUGIN_REGISTRATION_DONE = 4059 +ER_INVALID_USER_FOR_REGISTRATION = 4060 +ER_USER_REGISTRATION_FAILED = 4061 +ER_MFA_METHODS_INVALID_ORDER = 4062 +ER_MFA_METHODS_IDENTICAL = 4063 +ER_INVALID_MFA_OPERATIONS_FOR_PASSWORDLESS_USER = 4064 +ER_CHANGE_REPLICATION_SOURCE_NO_OPTIONS_FOR_GTID_ONLY = 4065 +ER_CHANGE_REP_SOURCE_CANT_DISABLE_REQ_ROW_FORMAT_WITH_GTID_ONLY = 4066 +ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POSITION_WITH_GTID_ONLY = 4067 +ER_CHANGE_REP_SOURCE_CANT_DISABLE_GTID_ONLY_WITHOUT_POSITIONS = 4068 +ER_CHANGE_REP_SOURCE_CANT_DISABLE_AUTO_POS_WITHOUT_POSITIONS = 4069 +ER_CHANGE_REP_SOURCE_GR_CHANNEL_WITH_GTID_MODE_NOT_ON = 4070 +ER_CANT_USE_GTID_ONLY_WITH_GTID_MODE_NOT_ON = 4071 +ER_WARN_C_DISABLE_GTID_ONLY_WITH_SOURCE_AUTO_POS_INVALID_POS = 4072 +ER_DA_SSL_FIPS_MODE_ERROR = 4073 +CR_UNKNOWN_ERROR = 2000 +CR_SOCKET_CREATE_ERROR = 2001 +CR_CONNECTION_ERROR = 2002 +CR_CONN_HOST_ERROR = 2003 +CR_IPSOCK_ERROR = 2004 +CR_UNKNOWN_HOST = 2005 +CR_SERVER_GONE_ERROR = 2006 +CR_VERSION_ERROR = 2007 +CR_OUT_OF_MEMORY = 2008 +CR_WRONG_HOST_INFO = 2009 +CR_LOCALHOST_CONNECTION = 2010 +CR_TCP_CONNECTION = 2011 +CR_SERVER_HANDSHAKE_ERR = 2012 +CR_SERVER_LOST = 2013 +CR_COMMANDS_OUT_OF_SYNC = 2014 +CR_NAMEDPIPE_CONNECTION = 2015 +CR_NAMEDPIPEWAIT_ERROR = 2016 +CR_NAMEDPIPEOPEN_ERROR = 2017 +CR_NAMEDPIPESETSTATE_ERROR = 2018 +CR_CANT_READ_CHARSET = 2019 +CR_NET_PACKET_TOO_LARGE = 2020 +CR_EMBEDDED_CONNECTION = 2021 +CR_PROBE_SLAVE_STATUS = 2022 +CR_PROBE_SLAVE_HOSTS = 2023 +CR_PROBE_SLAVE_CONNECT = 2024 +CR_PROBE_MASTER_CONNECT = 2025 +CR_SSL_CONNECTION_ERROR = 2026 +CR_MALFORMED_PACKET = 2027 +CR_WRONG_LICENSE = 2028 +CR_NULL_POINTER = 2029 +CR_NO_PREPARE_STMT = 2030 +CR_PARAMS_NOT_BOUND = 2031 +CR_DATA_TRUNCATED = 2032 +CR_NO_PARAMETERS_EXISTS = 2033 +CR_INVALID_PARAMETER_NO = 2034 +CR_INVALID_BUFFER_USE = 2035 +CR_UNSUPPORTED_PARAM_TYPE = 2036 +CR_SHARED_MEMORY_CONNECTION = 2037 +CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038 +CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039 +CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040 +CR_SHARED_MEMORY_CONNECT_MAP_ERROR = 2041 +CR_SHARED_MEMORY_FILE_MAP_ERROR = 2042 +CR_SHARED_MEMORY_MAP_ERROR = 2043 +CR_SHARED_MEMORY_EVENT_ERROR = 2044 +CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045 +CR_SHARED_MEMORY_CONNECT_SET_ERROR = 2046 +CR_CONN_UNKNOW_PROTOCOL = 2047 +CR_INVALID_CONN_HANDLE = 2048 +CR_UNUSED_1 = 2049 +CR_FETCH_CANCELED = 2050 +CR_NO_DATA = 2051 +CR_NO_STMT_METADATA = 2052 +CR_NO_RESULT_SET = 2053 +CR_NOT_IMPLEMENTED = 2054 +CR_SERVER_LOST_EXTENDED = 2055 +CR_STMT_CLOSED = 2056 +CR_NEW_STMT_METADATA = 2057 +CR_ALREADY_CONNECTED = 2058 +CR_AUTH_PLUGIN_CANNOT_LOAD = 2059 +CR_DUPLICATE_CONNECTION_ATTR = 2060 +CR_AUTH_PLUGIN_ERR = 2061 +CR_INSECURE_API_ERR = 2062 +CR_FILE_NAME_TOO_LONG = 2063 +CR_SSL_FIPS_MODE_ERR = 2064 +CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = 2065 +CR_COMPRESSION_WRONGLY_CONFIGURED = 2066 +CR_KERBEROS_USER_NOT_FOUND = 2067 +CR_LOAD_DATA_LOCAL_INFILE_REJECTED = 2068 +CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = 2069 +CR_DNS_SRV_LOOKUP_FAILED = 2070 +CR_MANDATORY_TRACKER_NOT_FOUND = 2071 +CR_INVALID_FACTOR_NO = 2072 +# End MySQL Errors + +# Start X Plugin Errors +ER_X_BAD_MESSAGE = 5000 +ER_X_CAPABILITIES_PREPARE_FAILED = 5001 +ER_X_CAPABILITY_NOT_FOUND = 5002 +ER_X_INVALID_PROTOCOL_DATA = 5003 +ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_VALUE_LENGTH = 5004 +ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_KEY_LENGTH = 5005 +ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_EMPTY_KEY = 5006 +ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_LENGTH = 5007 +ER_X_BAD_CONNECTION_SESSION_ATTRIBUTE_TYPE = 5008 +ER_X_CAPABILITY_SET_NOT_ALLOWED = 5009 +ER_X_SERVICE_ERROR = 5010 +ER_X_SESSION = 5011 +ER_X_INVALID_ARGUMENT = 5012 +ER_X_MISSING_ARGUMENT = 5013 +ER_X_BAD_INSERT_DATA = 5014 +ER_X_CMD_NUM_ARGUMENTS = 5015 +ER_X_CMD_ARGUMENT_TYPE = 5016 +ER_X_CMD_ARGUMENT_VALUE = 5017 +ER_X_BAD_UPSERT_DATA = 5018 +ER_X_DUPLICATED_CAPABILITIES = 5019 +ER_X_CMD_ARGUMENT_OBJECT_EMPTY = 5020 +ER_X_CMD_INVALID_ARGUMENT = 5021 +ER_X_BAD_UPDATE_DATA = 5050 +ER_X_BAD_TYPE_OF_UPDATE = 5051 +ER_X_BAD_COLUMN_TO_UPDATE = 5052 +ER_X_BAD_MEMBER_TO_UPDATE = 5053 +ER_X_BAD_STATEMENT_ID = 5110 +ER_X_BAD_CURSOR_ID = 5111 +ER_X_BAD_SCHEMA = 5112 +ER_X_BAD_TABLE = 5113 +ER_X_BAD_PROJECTION = 5114 +ER_X_DOC_ID_MISSING = 5115 +ER_X_DUPLICATE_ENTRY = 5116 +ER_X_DOC_REQUIRED_FIELD_MISSING = 5117 +ER_X_PROJ_BAD_KEY_NAME = 5120 +ER_X_BAD_DOC_PATH = 5121 +ER_X_CURSOR_EXISTS = 5122 +ER_X_CURSOR_REACHED_EOF = 5123 +ER_X_PREPARED_STATMENT_CAN_HAVE_ONE_CURSOR = 5131 +ER_X_PREPARED_EXECUTE_ARGUMENT_NOT_SUPPORTED = 5133 +ER_X_PREPARED_EXECUTE_ARGUMENT_CONSISTENCY = 5134 +ER_X_EXPR_BAD_OPERATOR = 5150 +ER_X_EXPR_BAD_NUM_ARGS = 5151 +ER_X_EXPR_MISSING_ARG = 5152 +ER_X_EXPR_BAD_TYPE_VALUE = 5153 +ER_X_EXPR_BAD_VALUE = 5154 +ER_X_INVALID_COLLECTION = 5156 +ER_X_INVALID_ADMIN_COMMAND = 5157 +ER_X_EXPECT_NOT_OPEN = 5158 +ER_X_EXPECT_NO_ERROR_FAILED = 5159 +ER_X_EXPECT_BAD_CONDITION = 5160 +ER_X_EXPECT_BAD_CONDITION_VALUE = 5161 +ER_X_INVALID_NAMESPACE = 5162 +ER_X_BAD_NOTICE = 5163 +ER_X_CANNOT_DISABLE_NOTICE = 5164 +ER_X_BAD_CONFIGURATION = 5165 +ER_X_MYSQLX_ACCOUNT_MISSING_PERMISSIONS = 5167 +ER_X_EXPECT_FIELD_EXISTS_FAILED = 5168 +ER_X_BAD_LOCKING = 5169 +ER_X_FRAME_COMPRESSION_DISABLED = 5170 +ER_X_DECOMPRESSION_FAILED = 5171 +ER_X_BAD_COMPRESSED_FRAME = 5174 +ER_X_CAPABILITY_COMPRESSION_INVALID_ALGORITHM = 5175 +ER_X_CAPABILITY_COMPRESSION_INVALID_SERVER_STYLE = 5176 +ER_X_CAPABILITY_COMPRESSION_INVALID_CLIENT_STYLE = 5177 +ER_X_CAPABILITY_COMPRESSION_INVALID_OPTION = 5178 +ER_X_CAPABILITY_COMPRESSION_MISSING_REQUIRED_FIELDS = 5179 +ER_X_DOCUMENT_DOESNT_MATCH_EXPECTED_SCHEMA = 5180 +ER_X_COLLECTION_OPTION_DOESNT_EXISTS = 5181 +ER_X_INVALID_VALIDATION_SCHEMA = 5182 +# End X Plugin Errors diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errors.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errors.py new file mode 100644 index 0000000..18595e3 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/errors.py @@ -0,0 +1,342 @@ +# Copyright (c) 2009, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Python exceptions.""" +from typing import Dict, Mapping, Optional, Tuple, Type, Union + +from .locales import get_client_error +from .types import StrOrBytes +from .utils import read_bytes, read_int + + +class Error(Exception): + """Exception that is base class for all other error exceptions. + + See [1] for more details. + + References: + [1]: https://dev.mysql.com/doc/connector-python/en/connector-python-api-errors-error.html + """ + + def __init__( + self, + msg: Optional[str] = None, + errno: Optional[int] = None, + values: Optional[Tuple[Union[int, str], ...]] = None, + sqlstate: Optional[str] = None, + ) -> None: + super().__init__() + self.msg = msg + self._full_msg = self.msg + self.errno = errno or -1 + self.sqlstate = sqlstate + + if not self.msg and (2000 <= self.errno < 3000): + self.msg = get_client_error(self.errno) + if values is not None: + try: + self.msg = self.msg % values + except TypeError as err: + self.msg = f"{self.msg} (Warning: {err})" + elif not self.msg: + self._full_msg = self.msg = "Unknown error" + + if self.msg and self.errno != -1: + fields = {"errno": self.errno, "msg": self.msg} + if self.sqlstate: + fmt = "{errno} ({state}): {msg}" + fields["state"] = self.sqlstate + else: + fmt = "{errno}: {msg}" + self._full_msg = fmt.format(**fields) + + self.args = (self.errno, self._full_msg, self.sqlstate) + + def __str__(self) -> str: + return self._full_msg + + +class Warning(Exception): # pylint: disable=redefined-builtin + """Exception for important warnings""" + + +class InterfaceError(Error): + """Exception for errors related to the interface""" + + +class DatabaseError(Error): + """Exception for errors related to the database""" + + +class InternalError(DatabaseError): + """Exception for errors internal database errors""" + + +class OperationalError(DatabaseError): + """Exception for errors related to the database's operation""" + + +class ProgrammingError(DatabaseError): + """Exception for errors programming errors""" + + +class IntegrityError(DatabaseError): + """Exception for errors regarding relational integrity""" + + +class DataError(DatabaseError): + """Exception for errors reporting problems with processed data""" + + +class NotSupportedError(DatabaseError): + """Exception for errors when an unsupported database feature was used""" + + +class PoolError(Error): + """Exception for errors relating to connection pooling""" + + +ErrorClassTypes = Union[ + Type[Error], + Type[InterfaceError], + Type[DatabaseError], + Type[InternalError], + Type[OperationalError], + Type[ProgrammingError], + Type[IntegrityError], + Type[DataError], + Type[NotSupportedError], + Type[PoolError], +] +ErrorTypes = Union[ + Error, + InterfaceError, + DatabaseError, + InternalError, + OperationalError, + ProgrammingError, + IntegrityError, + DataError, + NotSupportedError, + PoolError, + Warning, +] +# _CUSTOM_ERROR_EXCEPTIONS holds custom exceptions and is used by the +# function custom_error_exception. _ERROR_EXCEPTIONS (at bottom of module) +# is similar, but hardcoded exceptions. +_CUSTOM_ERROR_EXCEPTIONS: Dict[int, ErrorClassTypes] = {} + + +def custom_error_exception( + error: Optional[Union[int, Dict[int, Optional[ErrorClassTypes]]]] = None, + exception: Optional[ErrorClassTypes] = None, +) -> Mapping[int, Optional[ErrorClassTypes]]: + """Defines custom exceptions for MySQL server errors. + + This function defines custom exceptions for MySQL server errors and + returns the current set customizations. + + To reset the customizations, simply supply an empty dictionary. + + Args: + error: Can be a MySQL Server error number or a dictionary in which case the + key is the server error number and the value is the exception to be raised. + exception: If `error` is a MySQL Server error number then you have to pass + also the exception class, otherwise you don't. + + Returns: + dictionary: Current set customizations. + + Examples: + ``` + import mysql.connector + from mysql.connector import errorcode + + # Server error 1028 should raise a DatabaseError + mysql.connector.custom_error_exception( + 1028, mysql.connector.DatabaseError) + + # Or using a dictionary: + mysql.connector.custom_error_exception({ + 1028: mysql.connector.DatabaseError, + 1029: mysql.connector.OperationalError, + }) + + # Reset + mysql.connector.custom_error_exception({}) + ``` + """ + global _CUSTOM_ERROR_EXCEPTIONS # pylint: disable=global-statement + + if isinstance(error, dict) and not error: + _CUSTOM_ERROR_EXCEPTIONS = {} + return _CUSTOM_ERROR_EXCEPTIONS + + if not error and not exception: + return _CUSTOM_ERROR_EXCEPTIONS + + if not isinstance(error, (int, dict)): + raise ValueError("The error argument should be either an integer or dictionary") + + if isinstance(error, int): + error = {error: exception} + + for errno, _exception in error.items(): + if not isinstance(errno, int): + raise ValueError("Error number should be an integer") + try: + if _exception is None or not issubclass(_exception, Exception): + raise TypeError + except TypeError as err: + raise ValueError("Exception should be subclass of Exception") from err + _CUSTOM_ERROR_EXCEPTIONS[errno] = _exception + + return _CUSTOM_ERROR_EXCEPTIONS + + +def get_mysql_exception( + errno: int, + msg: Optional[str] = None, + sqlstate: Optional[str] = None, + warning: Optional[bool] = False, +) -> ErrorTypes: + """Get the exception matching the MySQL error + + This function will return an exception based on the SQLState. The given + message will be passed on in the returned exception. + + The exception returned can be customized using the + mysql.connector.custom_error_exception() function. + + Returns an Exception + """ + try: + return _CUSTOM_ERROR_EXCEPTIONS[errno](msg=msg, errno=errno, sqlstate=sqlstate) + except KeyError: + # Error was not mapped to particular exception + pass + + try: + return _ERROR_EXCEPTIONS[errno](msg=msg, errno=errno, sqlstate=sqlstate) + except KeyError: + # Error was not mapped to particular exception + pass + + if not sqlstate: + if warning: + return Warning(errno, msg) + return DatabaseError(msg=msg, errno=errno) + + try: + return _SQLSTATE_CLASS_EXCEPTION[sqlstate[0:2]]( + msg=msg, errno=errno, sqlstate=sqlstate + ) + except KeyError: + # Return default InterfaceError + return DatabaseError(msg=msg, errno=errno, sqlstate=sqlstate) + + +def get_exception(packet: bytes) -> ErrorTypes: + """Returns an exception object based on the MySQL error + + Returns an exception object based on the MySQL error in the given + packet. + + Returns an Error-Object. + """ + errno = errmsg = None + + try: + if packet[4] != 255: + raise ValueError("Packet is not an error packet") + except IndexError as err: + return InterfaceError(f"Failed getting Error information ({err})") + + sqlstate: Optional[StrOrBytes] = None + try: + packet = packet[5:] + packet, errno = read_int(packet, 2) + if packet[0] != 35: + # Error without SQLState + if isinstance(packet, (bytes, bytearray)): + errmsg = packet.decode("utf8") + else: + errmsg = packet + else: + packet, sqlstate = read_bytes(packet[1:], 5) + sqlstate = sqlstate.decode("utf8") + errmsg = packet.decode("utf8") + except (IndexError, UnicodeError) as err: + return InterfaceError(f"Failed getting Error information ({err})") + return get_mysql_exception(errno, errmsg, sqlstate) # type: ignore[arg-type] + + +_SQLSTATE_CLASS_EXCEPTION: Dict[str, ErrorClassTypes] = { + "02": DataError, # no data + "07": DatabaseError, # dynamic SQL error + "08": OperationalError, # connection exception + "0A": NotSupportedError, # feature not supported + "21": DataError, # cardinality violation + "22": DataError, # data exception + "23": IntegrityError, # integrity constraint violation + "24": ProgrammingError, # invalid cursor state + "25": ProgrammingError, # invalid transaction state + "26": ProgrammingError, # invalid SQL statement name + "27": ProgrammingError, # triggered data change violation + "28": ProgrammingError, # invalid authorization specification + "2A": ProgrammingError, # direct SQL syntax error or access rule violation + "2B": DatabaseError, # dependent privilege descriptors still exist + "2C": ProgrammingError, # invalid character set name + "2D": DatabaseError, # invalid transaction termination + "2E": DatabaseError, # invalid connection name + "33": DatabaseError, # invalid SQL descriptor name + "34": ProgrammingError, # invalid cursor name + "35": ProgrammingError, # invalid condition number + "37": ProgrammingError, # dynamic SQL syntax error or access rule violation + "3C": ProgrammingError, # ambiguous cursor name + "3D": ProgrammingError, # invalid catalog name + "3F": ProgrammingError, # invalid schema name + "40": InternalError, # transaction rollback + "42": ProgrammingError, # syntax error or access rule violation + "44": InternalError, # with check option violation + "HZ": OperationalError, # remote database access + "XA": IntegrityError, + "0K": OperationalError, + "HY": DatabaseError, # default when no SQLState provided by MySQL server +} + +_ERROR_EXCEPTIONS: Dict[int, ErrorClassTypes] = { + 1243: ProgrammingError, + 1210: ProgrammingError, + 2002: InterfaceError, + 2013: OperationalError, + 2049: NotSupportedError, + 2055: OperationalError, + 2061: InterfaceError, + 2026: InterfaceError, +} diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__init__.py new file mode 100644 index 0000000..f6727ee --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__init__.py @@ -0,0 +1,80 @@ +# Copyright (c) 2012, 2022, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Translations.""" + +from typing import List, Optional, Union + +__all__: List[str] = ["get_client_error"] + +from .. import errorcode + + +def get_client_error(error: Union[int, str], language: str = "eng") -> Optional[str]: + """Lookup client error + + This function will lookup the client error message based on the given + error and return the error message. If the error was not found, + None will be returned. + + Error can be either an integer or a string. For example: + error: 2000 + error: CR_UNKNOWN_ERROR + + The language attribute can be used to retrieve a localized message, when + available. + + Returns a string or None. + """ + try: + tmp = __import__( + f"mysql.connector.locales.{language}", + globals(), + locals(), + ["client_error"], + ) + except ImportError: + raise ImportError( + f"No localization support for language '{language}'" + ) from None + client_error = tmp.client_error + + if isinstance(error, int): + errno = error + for key, value in errorcode.__dict__.items(): + if value == errno: + error = key + break + + if isinstance(error, (str)): + try: + return getattr(client_error, error) + except AttributeError: + return None + + raise ValueError("error argument needs to be either an integer or string") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..ee9af85 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__init__.py new file mode 100644 index 0000000..2e1c02b --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__init__.py @@ -0,0 +1,30 @@ +# Copyright (c) 2012, 2017, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""English Content +""" diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..7149e56 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-310.pyc new file mode 100644 index 0000000..9e621b8 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/__pycache__/client_error.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/client_error.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/client_error.py new file mode 100644 index 0000000..89fb355 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/locales/eng/client_error.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +# Copyright (c) 2013, 2022, Oracle and/or its affiliates. All rights reserved. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""MySQL Error Messages.""" + +# This file was auto-generated. +_GENERATED_ON = "2021-08-11" +_MYSQL_VERSION = (8, 0, 27) + +# pylint: disable=line-too-long +# Start MySQL Error messages +CR_UNKNOWN_ERROR = "Unknown MySQL error" +CR_SOCKET_CREATE_ERROR = "Can't create UNIX socket (%s)" +CR_CONNECTION_ERROR = ( + "Can't connect to local MySQL server through socket '%-.100s' (%s)" +) +CR_CONN_HOST_ERROR = "Can't connect to MySQL server on '%-.100s:%u' (%s)" +CR_IPSOCK_ERROR = "Can't create TCP/IP socket (%s)" +CR_UNKNOWN_HOST = "Unknown MySQL server host '%-.100s' (%s)" +CR_SERVER_GONE_ERROR = "MySQL server has gone away" +CR_VERSION_ERROR = "Protocol mismatch; server version = %s, client version = %s" +CR_OUT_OF_MEMORY = "MySQL client ran out of memory" +CR_WRONG_HOST_INFO = "Wrong host info" +CR_LOCALHOST_CONNECTION = "Localhost via UNIX socket" +CR_TCP_CONNECTION = "%-.100s via TCP/IP" +CR_SERVER_HANDSHAKE_ERR = "Error in server handshake" +CR_SERVER_LOST = "Lost connection to MySQL server during query" +CR_COMMANDS_OUT_OF_SYNC = "Commands out of sync; you can't run this command now" +CR_NAMEDPIPE_CONNECTION = "Named pipe: %-.32s" +CR_NAMEDPIPEWAIT_ERROR = "Can't wait for named pipe to host: %-.64s pipe: %-.32s (%s)" +CR_NAMEDPIPEOPEN_ERROR = "Can't open named pipe to host: %-.64s pipe: %-.32s (%s)" +CR_NAMEDPIPESETSTATE_ERROR = ( + "Can't set state of named pipe to host: %-.64s pipe: %-.32s (%s)" +) +CR_CANT_READ_CHARSET = "Can't initialize character set %-.32s (path: %-.100s)" +CR_NET_PACKET_TOO_LARGE = "Got packet bigger than 'max_allowed_packet' bytes" +CR_EMBEDDED_CONNECTION = "Embedded server" +CR_PROBE_SLAVE_STATUS = "Error on SHOW SLAVE STATUS:" +CR_PROBE_SLAVE_HOSTS = "Error on SHOW SLAVE HOSTS:" +CR_PROBE_SLAVE_CONNECT = "Error connecting to slave:" +CR_PROBE_MASTER_CONNECT = "Error connecting to master:" +CR_SSL_CONNECTION_ERROR = "SSL connection error: %-.100s" +CR_MALFORMED_PACKET = "Malformed packet" +CR_WRONG_LICENSE = "This client library is licensed only for use with MySQL servers having '%s' license" +CR_NULL_POINTER = "Invalid use of null pointer" +CR_NO_PREPARE_STMT = "Statement not prepared" +CR_PARAMS_NOT_BOUND = "No data supplied for parameters in prepared statement" +CR_DATA_TRUNCATED = "Data truncated" +CR_NO_PARAMETERS_EXISTS = "No parameters exist in the statement" +CR_INVALID_PARAMETER_NO = "Invalid parameter number" +CR_INVALID_BUFFER_USE = ( + "Can't send long data for non-string/non-binary data types (parameter: %s)" +) +CR_UNSUPPORTED_PARAM_TYPE = "Using unsupported buffer type: %s (parameter: %s)" +CR_SHARED_MEMORY_CONNECTION = "Shared memory: %-.100s" +CR_SHARED_MEMORY_CONNECT_REQUEST_ERROR = ( + "Can't open shared memory; client could not create request event (%s)" +) +CR_SHARED_MEMORY_CONNECT_ANSWER_ERROR = ( + "Can't open shared memory; no answer event received from server (%s)" +) +CR_SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = ( + "Can't open shared memory; server could not allocate file mapping (%s)" +) +CR_SHARED_MEMORY_CONNECT_MAP_ERROR = ( + "Can't open shared memory; server could not get pointer to file mapping (%s)" +) +CR_SHARED_MEMORY_FILE_MAP_ERROR = ( + "Can't open shared memory; client could not allocate file mapping (%s)" +) +CR_SHARED_MEMORY_MAP_ERROR = ( + "Can't open shared memory; client could not get pointer to file mapping (%s)" +) +CR_SHARED_MEMORY_EVENT_ERROR = ( + "Can't open shared memory; client could not create %s event (%s)" +) +CR_SHARED_MEMORY_CONNECT_ABANDONED_ERROR = ( + "Can't open shared memory; no answer from server (%s)" +) +CR_SHARED_MEMORY_CONNECT_SET_ERROR = ( + "Can't open shared memory; cannot send request event to server (%s)" +) +CR_CONN_UNKNOW_PROTOCOL = "Wrong or unknown protocol" +CR_INVALID_CONN_HANDLE = "Invalid connection handle" +CR_UNUSED_1 = "Connection using old (pre-4.1.1) authentication protocol refused (client option 'secure_auth' enabled)" +CR_FETCH_CANCELED = "Row retrieval was canceled by mysql_stmt_close() call" +CR_NO_DATA = "Attempt to read column without prior row fetch" +CR_NO_STMT_METADATA = "Prepared statement contains no metadata" +CR_NO_RESULT_SET = ( + "Attempt to read a row while there is no result set associated with the statement" +) +CR_NOT_IMPLEMENTED = "This feature is not implemented yet" +CR_SERVER_LOST_EXTENDED = "Lost connection to MySQL server at '%s', system error: %s" +CR_STMT_CLOSED = "Statement closed indirectly because of a preceding %s() call" +CR_NEW_STMT_METADATA = "The number of columns in the result set differs from the number of bound buffers. You must reset the statement, rebind the result set columns, and execute the statement again" +CR_ALREADY_CONNECTED = ( + "This handle is already connected. Use a separate handle for each connection." +) +CR_AUTH_PLUGIN_CANNOT_LOAD = "Authentication plugin '%s' cannot be loaded: %s" +CR_DUPLICATE_CONNECTION_ATTR = "There is an attribute with the same name already" +CR_AUTH_PLUGIN_ERR = "Authentication plugin '%s' reported error: %s" +CR_INSECURE_API_ERR = "Insecure API function call: '%s' Use instead: '%s'" +CR_FILE_NAME_TOO_LONG = "File name is too long" +CR_SSL_FIPS_MODE_ERR = "Set FIPS mode ON/STRICT failed" +CR_DEPRECATED_COMPRESSION_NOT_SUPPORTED = ( + "Compression protocol not supported with asynchronous protocol" +) +CR_COMPRESSION_WRONGLY_CONFIGURED = ( + "Connection failed due to wrongly configured compression algorithm" +) +CR_KERBEROS_USER_NOT_FOUND = ( + "SSO user not found, Please perform SSO authentication using kerberos." +) +CR_LOAD_DATA_LOCAL_INFILE_REJECTED = ( + "LOAD DATA LOCAL INFILE file request rejected due to restrictions on access." +) +CR_LOAD_DATA_LOCAL_INFILE_REALPATH_FAIL = ( + "Determining the real path for '%s' failed with error (%s): %s" +) +CR_DNS_SRV_LOOKUP_FAILED = "DNS SRV lookup failed with error : %s" +CR_MANDATORY_TRACKER_NOT_FOUND = ( + "Client does not recognise tracker type %s marked as mandatory by server." +) +CR_INVALID_FACTOR_NO = "Invalid first argument for MYSQL_OPT_USER_PASSWORD option. Valid value should be between 1 and 3 inclusive." +# End MySQL Error messages diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/logger.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/logger.py new file mode 100644 index 0000000..c64162a --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/logger.py @@ -0,0 +1,33 @@ +# Copyright (c) 2022, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Setup of the `mysql.connector` logger.""" + +import logging + +logger = logging.getLogger("mysql.connector") diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/network.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/network.py new file mode 100644 index 0000000..9af0d97 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/network.py @@ -0,0 +1,769 @@ +# Copyright (c) 2012, 2023, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +"""Module implementing low-level socket communication with MySQL servers. +""" + +import socket +import struct +import warnings +import zlib + +from abc import ABC, abstractmethod +from collections import deque +from typing import Any, Deque, List, Optional, Tuple, Union + +try: + import ssl + + TLS_VERSIONS = { + "TLSv1": ssl.PROTOCOL_TLSv1, + "TLSv1.1": ssl.PROTOCOL_TLSv1_1, + "TLSv1.2": ssl.PROTOCOL_TLSv1_2, + "TLSv1.3": ssl.PROTOCOL_TLS, + } + TLS_V1_3_SUPPORTED = hasattr(ssl, "HAS_TLSv1_3") and ssl.HAS_TLSv1_3 +except ImportError: + # If import fails, we don't have SSL support. + TLS_V1_3_SUPPORTED = False + ssl = None + +from .errors import ( + InterfaceError, + NotSupportedError, + OperationalError, + ProgrammingError, +) + +MIN_COMPRESS_LENGTH = 50 +MAX_PAYLOAD_LENGTH = 2**24 - 1 +PACKET_HEADER_LENGTH = 4 +COMPRESSED_PACKET_HEADER_LENGTH = 7 + + +def _strioerror(err: IOError) -> str: + """Reformat the IOError error message. + + This function reformats the IOError error message. + """ + return str(err) if not err.errno else f"{err.errno} {err.strerror}" + + +class NetworkBroker(ABC): + """Broker class interface. + + The network object is a broker used as a delegate by a socket object. Whenever the + socket wants to deliver or get packets to or from the MySQL server it needs to rely + on its network broker (netbroker). + + The netbroker sends `payloads` and receives `packets`. + + A packet is a bytes sequence, it has a header and body (referred to as payload). + The first `PACKET_HEADER_LENGTH` or `COMPRESSED_PACKET_HEADER_LENGTH` + (as appropriate) bytes correspond to the `header`, the remaining ones represent the + `payload`. + + The maximum payload length allowed to be sent per packet to the server is + `MAX_PAYLOAD_LENGTH`. When `send` is called with a payload whose length is greater + than `MAX_PAYLOAD_LENGTH` the netbroker breaks it down into packets, so the caller + of `send` can provide payloads of arbitrary length. + + Finally, data received by the netbroker comes directly from the server, expect to + get a packet for each call to `recv`. The received packet contains a header and + payload, the latter respecting `MAX_PAYLOAD_LENGTH`. + """ + + @abstractmethod + def send( + self, + sock: socket.socket, + address: str, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send `payload` to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + + Args: + sock: Object holding the socket connection. + address: Socket's location. + payload: Packet's body to send. + packet_number: Sequence id (packet ID) to attach to the header when sending + plain packets. + compressed_packet_number: Same as `packet_number` but used when sending + compressed packets. + + Raises: + :class:`OperationalError`: If something goes wrong while sending packets to + the MySQL server. + """ + + @abstractmethod + def recv(self, sock: socket.socket, address: str) -> bytearray: + """Get the next available packet from the MySQL server. + + Args: + sock: Object holding the socket connection. + address: Socket's location. + + Returns: + packet: A packet from the MySQL server. + + Raises: + :class:`OperationalError`: If something goes wrong while receiving packets + from the MySQL server. + :class:`InterfaceError`: If something goes wrong while receiving packets + from the MySQL server. + """ + + +class NetworkBrokerPlain(NetworkBroker): + """Broker class for MySQL socket communication.""" + + def __init__(self) -> None: + self._pktnr: int = -1 # packet number + + def _set_next_pktnr(self) -> None: + """Increment packet id.""" + self._pktnr = (self._pktnr + 1) % 256 + + def _send_pkt(self, sock: socket.socket, address: str, pkt: bytes) -> None: + """Write packet to the comm channel.""" + try: + sock.sendall(pkt) + except IOError as err: + raise OperationalError( + errno=2055, values=(address, _strioerror(err)) + ) from err + except AttributeError as err: + raise OperationalError(errno=2006) from err + + def _recv_chunk(self, sock: socket.socket, size: int = 0) -> bytearray: + """Read `size` bytes from the comm channel.""" + pkt = bytearray(size) + pkt_view = memoryview(pkt) + while size: + read = sock.recv_into(pkt_view, size) + if read == 0 and size > 0: + raise InterfaceError(errno=2013) + pkt_view = pkt_view[read:] + size -= read + return pkt + + def send( + self, + sock: socket.socket, + address: str, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send payload to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + """ + if packet_number is None: + self._set_next_pktnr() + else: + self._pktnr = packet_number + + # If the payload is larger than or equal to MAX_PAYLOAD_LENGTH + # the length is set to 2^24 - 1 (ff ff ff) and additional + # packets are sent with the rest of the payload until the + # payload of a packet is less than MAX_PAYLOAD_LENGTH. + if len(payload) >= MAX_PAYLOAD_LENGTH: + offset = 0 + for _ in range(len(payload) // MAX_PAYLOAD_LENGTH): + # payload_len, sequence_id, payload + self._send_pkt( + sock, + address, + b"\xff\xff\xff" + + struct.pack(" bytearray: + """Receive `one` packet from the MySQL server.""" + try: + # Read the header of the MySQL packet + header = self._recv_chunk(sock, size=PACKET_HEADER_LENGTH) + + # Pull the payload length and sequence id + payload_len, self._pktnr = ( + struct.unpack(" None: + super().__init__() + self._compressed_pktnr = -1 + self._queue_read: Deque[bytearray] = deque() + + @staticmethod + def _prepare_packets(payload: bytes, pktnr: int) -> List[bytes]: + """Prepare a payload for sending to the MySQL server.""" + pkts = [] + + # If the payload is larger than or equal to MAX_PAYLOAD_LENGTH + # the length is set to 2^24 - 1 (ff ff ff) and additional + # packets are sent with the rest of the payload until the + # payload of a packet is less than MAX_PAYLOAD_LENGTH. + if len(payload) >= MAX_PAYLOAD_LENGTH: + offset = 0 + for _ in range(len(payload) // MAX_PAYLOAD_LENGTH): + # payload length + sequence id + payload + pkts.append( + b"\xff\xff\xff" + + struct.pack(" None: + """Increment packet id.""" + self._compressed_pktnr = (self._compressed_pktnr + 1) % 256 + + def _send_pkt(self, sock: socket.socket, address: str, pkt: bytes) -> None: + """Compress packet and write it to the comm channel.""" + compressed_pkt = zlib.compress(pkt) + pkt = ( + struct.pack(" None: + """Send `payload` as compressed packets to the MySQL server. + + If provided a payload whose length is greater than `MAX_PAYLOAD_LENGTH`, it is + broken down into packets. + """ + # get next packet numbers + if packet_number is None: + self._set_next_pktnr() + else: + self._pktnr = packet_number + if compressed_packet_number is None: + self._set_next_compressed_pktnr() + else: + self._compressed_pktnr = compressed_packet_number + + payload_prep = bytearray(b"").join(self._prepare_packets(payload, self._pktnr)) + if len(payload) >= MAX_PAYLOAD_LENGTH - PACKET_HEADER_LENGTH: + # sending a MySQL payload of the size greater or equal to 2^24 - 5 + # via compression leads to at least one extra compressed packet + # WHY? let's say len(payload) is MAX_PAYLOAD_LENGTH - 3; when preparing + # the payload, a header of size PACKET_HEADER_LENGTH is pre-appended + # to the payload. This means that len(payload_prep) is + # MAX_PAYLOAD_LENGTH - 3 + PACKET_HEADER_LENGTH = MAX_PAYLOAD_LENGTH + 1 + # surpassing the maximum allowed payload size per packet. + offset = 0 + + # send several MySQL packets + for _ in range(len(payload_prep) // MAX_PAYLOAD_LENGTH): + self._send_pkt( + sock, address, payload_prep[offset : offset + MAX_PAYLOAD_LENGTH] + ) + self._set_next_compressed_pktnr() + offset += MAX_PAYLOAD_LENGTH + self._send_pkt(sock, address, payload_prep[offset:]) + else: + # send one MySQL packet + # For small packets it may be too costly to compress the packet. + # Usually payloads less than 50 bytes (MIN_COMPRESS_LENGTH) + # aren't compressed (see MySQL source code Documentation). + if len(payload) > MIN_COMPRESS_LENGTH: + # perform compression + self._send_pkt(sock, address, payload_prep) + else: + # skip compression + super()._send_pkt( + sock, + address, + struct.pack(" None: + """Handle reading of a compressed packet.""" + # compressed_pll stands for compressed payload length. + # Recalling that if uncompressed payload length == 0, the packet + # comes in uncompressed, so no decompression is needed. + compressed_pkt = super()._recv_chunk(sock, size=compressed_pll) + pkt = ( + compressed_pkt + if uncompressed_pll == 0 + else bytearray(zlib.decompress(compressed_pkt)) + ) + + offset = 0 + while offset < len(pkt): + # pll stands for payload length + pll = struct.unpack( + " len(pkt) - offset: + # More bytes need to be consumed + # Read the header of the next MySQL packet + header = super()._recv_chunk(sock, size=COMPRESSED_PACKET_HEADER_LENGTH) + + # compressed payload length, sequence id, uncompressed payload length + ( + compressed_pll, + self._compressed_pktnr, + uncompressed_pll, + ) = ( + struct.unpack(" bytearray: + """Receive `one` or `several` packets from the MySQL server, enqueue them, and + return the packet at the head. + """ + if not self._queue_read: + try: + # Read the header of the next MySQL packet + header = super()._recv_chunk(sock, size=COMPRESSED_PACKET_HEADER_LENGTH) + + # compressed payload length, sequence id, uncompressed payload length + ( + compressed_pll, + self._compressed_pktnr, + uncompressed_pll, + ) = ( + struct.unpack(" None: + """Network layer where transactions are made with plain (uncompressed) packets + is enabled by default. + """ + # holds the socket connection + self.sock: Optional[socket.socket] = None + self._connection_timeout: Optional[int] = None + self.server_host: Optional[str] = None + self._netbroker: NetworkBroker = NetworkBrokerPlain() + + def switch_to_compressed_mode(self) -> None: + """Enable network layer where transactions are made with compressed packets.""" + self._netbroker = NetworkBrokerCompressed() + + def shutdown(self) -> None: + """Shut down the socket before closing it.""" + try: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + except (AttributeError, OSError): + pass + + def close_connection(self) -> None: + """Close the socket.""" + try: + self.sock.close() + except (AttributeError, OSError): + pass + + def __del__(self) -> None: + self.shutdown() + + def set_connection_timeout(self, timeout: Optional[int]) -> None: + """Set the connection timeout.""" + self._connection_timeout = timeout + if self.sock: + self.sock.settimeout(timeout) + + def switch_to_ssl(self, ssl_context: Any, host: str) -> None: + """Upgrade an existing connection to TLS. + + Args: + ssl_context (ssl.SSLContext): The SSL Context to be used. + host (str): Server host name. + + Returns: + None. + + Raises: + ProgrammingError: If the transport does not expose the socket instance. + NotSupportedError: If Python installation has no SSL support. + """ + # Ensure that self.sock is already created + assert self.sock is not None + + if self.sock.family == 1: # socket.AF_UNIX + raise ProgrammingError("SSL is not supported when using Unix sockets") + + if ssl is None: + raise NotSupportedError("Python installation has no SSL support") + + try: + self.sock = ssl_context.wrap_socket(self.sock, server_hostname=host) + except NameError as err: + raise NotSupportedError("Python installation has no SSL support") from err + except (ssl.SSLError, IOError) as err: + raise InterfaceError( + errno=2055, values=(self.address, _strioerror(err)) + ) from err + except ssl.CertificateError as err: + raise InterfaceError(str(err)) from err + except NotImplementedError as err: + raise InterfaceError(str(err)) from err + + def build_ssl_context( + self, + ssl_ca: Optional[str] = None, + ssl_cert: Optional[str] = None, + ssl_key: Optional[str] = None, + ssl_verify_cert: Optional[bool] = False, + ssl_verify_identity: Optional[bool] = False, + tls_versions: Optional[List[str]] = None, + tls_cipher_suites: Optional[List[str]] = None, + ) -> Any: + """Build a SSLContext. + + Args: + ssl_ca: Certificate authority, opptional. + ssl_cert: SSL certificate, optional. + ssl_key: Private key, optional. + ssl_verify_cert: Verify the SSL certificate if `True`. + ssl_verify_identity: Verify host identity if `True`. + tls_versions: TLS protocol versions, optional. + tls_cipher_suites: Set of steps that helps to establish a secure connection. + + Returns: + ssl_context (ssl.SSLContext): An SSL Context ready be used. + + Raises: + NotSupportedError: Python installation has no SSL support. + InterfaceError: Socket undefined or invalid ssl data. + """ + if not self.sock: + raise InterfaceError(errno=2048) + + if ssl is None: + raise NotSupportedError("Python installation has no SSL support") + + if tls_versions is None: + tls_versions = [] + + if tls_cipher_suites is None: + tls_cipher_suites = [] + + try: + if tls_versions: + tls_versions.sort(reverse=True) + tls_version = tls_versions[0] + ssl_protocol = TLS_VERSIONS[tls_version] + context = ssl.SSLContext(ssl_protocol) + + if tls_version == "TLSv1.3": + if "TLSv1.2" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1_2 + if "TLSv1.1" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1_1 + if "TLSv1" not in tls_versions: + context.options |= ssl.OP_NO_TLSv1 + else: + # `check_hostname` is True by default + context = ssl.create_default_context() + + context.check_hostname = ssl_verify_identity + + if ssl_verify_cert: + context.verify_mode = ssl.CERT_REQUIRED + elif ssl_verify_identity: + context.verify_mode = ssl.CERT_OPTIONAL + else: + context.verify_mode = ssl.CERT_NONE + + context.load_default_certs() + + if ssl_ca: + try: + context.load_verify_locations(ssl_ca) + except (IOError, ssl.SSLError) as err: + raise InterfaceError(f"Invalid CA Certificate: {err}") from err + if ssl_cert: + try: + context.load_cert_chain(ssl_cert, ssl_key) + except (IOError, ssl.SSLError) as err: + raise InterfaceError(f"Invalid Certificate/Key: {err}") from err + + if tls_cipher_suites: + context.set_ciphers(":".join(tls_cipher_suites)) + + return context + except NameError as err: + raise NotSupportedError("Python installation has no SSL support") from err + except ( + IOError, + NotImplementedError, + ssl.CertificateError, + ssl.SSLError, + ) as err: + raise InterfaceError(str(err)) from err + + def send( + self, + payload: bytes, + packet_number: Optional[int] = None, + compressed_packet_number: Optional[int] = None, + ) -> None: + """Send `payload` to the MySQL server. + + NOTE: if `payload` is an instance of `bytearray`, then `payload` might be + changed by this method - `bytearray` is similar to passing a variable by + reference. + + If you're sure you won't read `payload` after invoking `send()`, + then you can use `bytearray.` Otherwise, you must use `bytes`. + """ + return self._netbroker.send( + self.sock, + self.address, + payload, + packet_number=packet_number, + compressed_packet_number=compressed_packet_number, + ) + + def recv(self) -> bytearray: + """Get packet from the MySQL server comm channel.""" + return self._netbroker.recv(self.sock, self.address) + + @abstractmethod + def open_connection(self) -> None: + """Open the socket.""" + + @property + @abstractmethod + def address(self) -> str: + """Get the location of the socket.""" + + +class MySQLUnixSocket(MySQLSocket): + """MySQL socket class using UNIX sockets. + + Opens a connection through the UNIX socket of the MySQL Server. + """ + + def __init__(self, unix_socket: str = "/tmp/mysql.sock") -> None: + super().__init__() + self.unix_socket: str = unix_socket + self._address: str = unix_socket + + @property + def address(self) -> str: + return self._address + + def open_connection(self) -> None: + try: + self.sock = socket.socket( + # pylint: disable=no-member + socket.AF_UNIX, + socket.SOCK_STREAM, + ) + self.sock.settimeout(self._connection_timeout) + self.sock.connect(self.unix_socket) + except IOError as err: + raise InterfaceError( + errno=2002, values=(self.address, _strioerror(err)) + ) from err + except Exception as err: + raise InterfaceError(str(err)) from err + + def switch_to_ssl( + self, *args: Any, **kwargs: Any # pylint: disable=unused-argument + ) -> None: + """Switch the socket to use SSL.""" + warnings.warn( + "SSL is disabled when using unix socket connections", + Warning, + ) + + +class MySQLTCPSocket(MySQLSocket): + """MySQL socket class using TCP/IP. + + Opens a TCP/IP connection to the MySQL Server. + """ + + def __init__( + self, + host: str = "127.0.0.1", + port: int = 3306, + force_ipv6: bool = False, + ) -> None: + super().__init__() + self.server_host: str = host + self.server_port: int = port + self.force_ipv6: bool = force_ipv6 + self._family: int = 0 + self._address: str = f"{host}:{port}" + + @property + def address(self) -> str: + return self._address + + def open_connection(self) -> None: + """Open the TCP/IP connection to the MySQL server.""" + # pylint: disable=no-member + # Get address information + addrinfo: Union[ + Tuple[None, None, None, None, None], + Tuple[ + socket.AddressFamily, + socket.SocketKind, + int, + str, + Union[Tuple[str, int], Tuple[str, int, int, int]], + ], + ] = (None, None, None, None, None) + try: + addrinfos = socket.getaddrinfo( + self.server_host, + self.server_port, + 0, + socket.SOCK_STREAM, + socket.SOL_TCP, + ) + # If multiple results we favor IPv4, unless IPv6 was forced. + for info in addrinfos: + if self.force_ipv6 and info[0] == socket.AF_INET6: + addrinfo = info + break + if info[0] == socket.AF_INET: + addrinfo = info + break + if self.force_ipv6 and addrinfo[0] is None: + raise InterfaceError(f"No IPv6 address found for {self.server_host}") + if addrinfo[0] is None: + addrinfo = addrinfos[0] + except IOError as err: + raise InterfaceError( + errno=2003, values=(self.address, _strioerror(err)) + ) from err + + (self._family, socktype, proto, _, sockaddr) = addrinfo + + # Instanciate the socket and connect + try: + self.sock = socket.socket(self._family, socktype, proto) + self.sock.settimeout(self._connection_timeout) + self.sock.connect(sockaddr) + except IOError as err: + raise InterfaceError( + errno=2003, + values=( + self.server_host, + self.server_port, + _strioerror(err), + ), + ) from err + except Exception as err: + raise OperationalError(str(err)) from err diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__init__.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/__init__.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000..d85dd15 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/__init__.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/constants.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/constants.cpython-310.pyc new file mode 100644 index 0000000..40215eb Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/constants.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/context_propagation.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/context_propagation.cpython-310.pyc new file mode 100644 index 0000000..8791118 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/context_propagation.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/instrumentation.cpython-310.pyc b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/instrumentation.cpython-310.pyc new file mode 100644 index 0000000..d3ae1b2 Binary files /dev/null and b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/__pycache__/instrumentation.cpython-310.pyc differ diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/constants.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/constants.py new file mode 100644 index 0000000..7e0090c --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/constants.py @@ -0,0 +1,56 @@ +"""Constants used by the opentelemetry instrumentation implementation.""" +# mypy: disable-error-code="no-redef,assignment" + +# pylint: disable=unused-import +OTEL_ENABLED = True +try: + # try to load otel from the system + from opentelemetry import trace # check api + from opentelemetry.sdk.trace import TracerProvider # check sdk + from opentelemetry.semconv.trace import SpanAttributes # check semconv +except ImportError: + # falling back to the bundled installation + try: + from mysql.opentelemetry import trace + from mysql.opentelemetry.sdk.trace import TracerProvider + from mysql.opentelemetry.semconv.trace import SpanAttributes + except ImportError: + # bundled installation has missing dependencies + OTEL_ENABLED = False + + +OPTION_CNX_SPAN = "_span" +""" +Connection option name used to inject the connection span. +This connection option name must not be used, is reserved. +""" + +OPTION_CNX_TRACER = "_tracer" +""" +Connection option name used to inject the opentelemetry tracer. +This connection option name must not be used, is reserved. +""" + +CONNECTION_SPAN_NAME = "connection" +""" +Connection span name to be used by the instrumentor. +""" + +FIRST_SUPPORTED_VERSION = "8.1.0" +""" +First mysql-connector-python version to support opentelemetry instrumentation. +""" + +TRACEPARENT_HEADER_NAME = "traceparent" + +DB_SYSTEM = "mysql" +DEFAULT_THREAD_NAME = "main" +DEFAULT_THREAD_ID = 0 + +# Reference: https://github.com/open-telemetry/opentelemetry-specification/blob/main/ +# specification/trace/semantic_conventions/span-general.md +NET_SOCK_FAMILY = "net.sock.family" +NET_SOCK_PEER_ADDR = "net.sock.peer.addr" +NET_SOCK_PEER_PORT = "net.sock.peer.port" +NET_SOCK_HOST_ADDR = "net.sock.host.addr" +NET_SOCK_HOST_PORT = "net.sock.host.port" diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/context_propagation.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/context_propagation.py new file mode 100644 index 0000000..bb66b34 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/context_propagation.py @@ -0,0 +1,89 @@ +"""Trace context propagation utilities.""" +# mypy: disable-error-code="no-redef" +# pylint: disable=invalid-name + +from typing import TYPE_CHECKING, Any, Callable + +from .constants import OTEL_ENABLED, TRACEPARENT_HEADER_NAME + +if OTEL_ENABLED: + from .instrumentation import OTEL_SYSTEM_AVAILABLE + + if OTEL_SYSTEM_AVAILABLE: + # pylint: disable=import-error + # load otel from the system + from opentelemetry import trace + from opentelemetry.trace.span import format_span_id, format_trace_id + else: + # load otel from the bundled installation + from mysql.opentelemetry import trace + from mysql.opentelemetry.trace.span import format_span_id, format_trace_id + + +if TYPE_CHECKING: + from ..abstracts import MySQLConnectionAbstract + + +def build_traceparent_header(span: Any) -> str: + """Build a traceparent header according to the provided span. + + The context information from the provided span is used to build the traceparent + header that will be propagated to the MySQL server. For particulars regarding + the header creation, refer to [1]. + + This method assumes version 0 of the W3C specification. + + Args: + span (opentelemetry.trace.span.Span): current span in trace. + + Returns: + traceparent_header (str): HTTP header field that identifies requests in a + tracing system. + + References: + [1]: https://www.w3.org/TR/trace-context/#traceparent-header + """ + ctx = span.get_span_context() + + version = "00" # version 0 of the W3C specification + trace_id = format_trace_id(ctx.trace_id) + span_id = format_span_id(ctx.span_id) + trace_flags = "00" # sampled flag is off + + return "-".join([version, trace_id, span_id, trace_flags]) + + +def with_context_propagation(method: Callable) -> Callable: + """Perform trace context propagation. + + The trace context is propagated via query attributes. The `traceparent` header + from W3C specification [1] is used, in this sense, the attribute name is + `traceparent` (is RESERVED, avoid using it), and its value is built as per + instructed in [1]. + + If opentelemetry API/SDK is unavailable or there is no recording span, + trace context propagation is skipped. + + References: + [1]: https://www.w3.org/TR/trace-context/#traceparent-header + """ + + def wrapper(cnx: "MySQLConnectionAbstract", *args: Any, **kwargs: Any) -> Any: + """Context propagation decorator.""" + if not OTEL_ENABLED or not cnx.otel_context_propagation: + return method(cnx, *args, **kwargs) + + current_span = trace.get_current_span() + tp_header = None + if current_span.is_recording(): + tp_header = build_traceparent_header(current_span) + cnx.query_attrs_append(value=(TRACEPARENT_HEADER_NAME, tp_header)) + + try: + result = method(cnx, *args, **kwargs) + finally: + if tp_header is not None: + cnx.query_attrs_remove(name=TRACEPARENT_HEADER_NAME) + return result + + return wrapper diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/instrumentation.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/instrumentation.py new file mode 100644 index 0000000..1e41d80 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/opentelemetry/instrumentation.py @@ -0,0 +1,531 @@ +"""MySQL instrumentation supporting mysql-connector.""" +# mypy: disable-error-code="no-redef" +# pylint: disable=protected-access,global-statement,invalid-name + +from __future__ import annotations + +import functools +import re + +from abc import ABC, abstractmethod +from contextlib import nullcontext +from typing import TYPE_CHECKING, Any, Callable, Collection, Dict, Optional, Union + +# pylint: disable=cyclic-import +if TYPE_CHECKING: + # `TYPE_CHECKING` is always False at run time, hence circular import + # will not happen at run time (no error happens whatsoever). + # Since pylint is a static checker it happens that `TYPE_CHECKING` + # is True when analyzing the code which makes pylint believe there + # is a circular import issue when there isn't. + + from ..abstracts import MySQLConnectionAbstract, MySQLCursorAbstract + from ..connection import MySQLConnection + from ..pooling import PooledMySQLConnection + + try: + from ..connection_cext import CMySQLConnection + except ImportError: + # The cext is not available. + pass + +from ... import connector +from ..constants import CNX_POOL_ARGS, DEFAULT_CONFIGURATION +from ..logger import logger +from ..version import VERSION_TEXT + +try: + # pylint: disable=unused-import + # try to load otel from the system + from opentelemetry import trace # check api + from opentelemetry.sdk.trace import TracerProvider # check sdk + from opentelemetry.semconv.trace import SpanAttributes # check semconv + + OTEL_SYSTEM_AVAILABLE = True +except ImportError: + try: + # falling back to the bundled installation + from mysql.opentelemetry import trace + from mysql.opentelemetry.semconv.trace import SpanAttributes + + OTEL_SYSTEM_AVAILABLE = False + except ImportError as missing_dependencies_err: + raise connector.errors.ProgrammingError( + "Bundled installation has missing dependencies. " + "Please use `pip install mysql-connector-python[opentelemetry]`, " + "or for an editable install use `pip install -e '.[opentelemetry]'`, " + "to install the dependencies required by the bundled opentelemetry package." + ) from missing_dependencies_err + + +from .constants import ( + CONNECTION_SPAN_NAME, + DB_SYSTEM, + DEFAULT_THREAD_ID, + DEFAULT_THREAD_NAME, + FIRST_SUPPORTED_VERSION, + NET_SOCK_FAMILY, + NET_SOCK_HOST_ADDR, + NET_SOCK_HOST_PORT, + NET_SOCK_PEER_ADDR, + NET_SOCK_PEER_PORT, + OPTION_CNX_SPAN, + OPTION_CNX_TRACER, +) + +leading_comment_remover: re.Pattern = re.compile(r"^/\*.*?\*/") + + +def record_exception_event(span: trace.Span, exc: Optional[Exception]) -> None: + """Records an exeception event.""" + if not span or not span.is_recording() or not exc: + return + + span.set_status(trace.Status(trace.StatusCode.ERROR)) + span.record_exception(exc) + + +def end_span(span: trace.Span) -> None: + """Ends span.""" + if not span or not span.is_recording(): + return + + span.end() + + +def get_operation_name(operation: str) -> str: + """Parse query to extract operation name.""" + if operation and isinstance(operation, str): + # Strip leading comments so we get the operation name. + return leading_comment_remover.sub("", operation).split()[0] + return "" + + +def set_connection_span_attrs( + cnx: Optional["MySQLConnectionAbstract"], + cnx_span: trace.Span, + cnx_kwargs: Optional[Dict[str, Any]] = None, +) -> None: + """Defines connection span attributes. If `cnx` is None then we use `cnx_kwargs` + to get basic net information. Basic net attributes are defined such as: + + * DB_SYSTEM + * NET_TRANSPORT + * NET_SOCK_FAMILY + + Socket-level attributes [*] are also defined [**]. + + [*]: Socket-level attributes identify peer and host that are directly connected to + each other. Since instrumentations may have limited knowledge on network + information, instrumentations SHOULD populate such attributes to the best of + their knowledge when populate them at all. + + [**]: `CMySQLConnection` connections have no access to socket-level + details so socket-level attributes aren't included. `MySQLConnection` + connections, on the other hand, do include socket-level attributes. + + References: + [1]: https://github.com/open-telemetry/opentelemetry-specification/blob/main/\ + specification/trace/semantic_conventions/span-general.md + """ + # pylint: disable=broad-exception-caught + if not cnx_span or not cnx_span.is_recording(): + return + + if cnx_kwargs is None: + cnx_kwargs = {} + + is_tcp = not cnx._unix_socket if cnx else "unix_socket" not in cnx_kwargs + + attrs: Dict[str, Any] = { + SpanAttributes.DB_SYSTEM: DB_SYSTEM, + SpanAttributes.NET_TRANSPORT: "ip_tcp" if is_tcp else "inproc", + NET_SOCK_FAMILY: "inet" if is_tcp else "unix", + } + + # Only socket and tcp connections are supported. + if is_tcp: + attrs[SpanAttributes.NET_PEER_NAME] = ( + cnx._host if cnx else cnx_kwargs.get("host", DEFAULT_CONFIGURATION["host"]) + ) + attrs[SpanAttributes.NET_PEER_PORT] = ( + cnx._port if cnx else cnx_kwargs.get("port", DEFAULT_CONFIGURATION["port"]) + ) + + if hasattr(cnx, "_socket") and cnx._socket: + try: + ( + attrs[NET_SOCK_PEER_ADDR], + sock_peer_port, + ) = cnx._socket.sock.getpeername() + + ( + attrs[NET_SOCK_HOST_ADDR], + attrs[NET_SOCK_HOST_PORT], + ) = cnx._socket.sock.getsockname() + except Exception as sock_err: + logger.warning("Connection socket is down %s", sock_err) + else: + if attrs[SpanAttributes.NET_PEER_PORT] != sock_peer_port: + # NET_SOCK_PEER_PORT is recommended if different than net.peer.port + # and if net.sock.peer.addr is set. + attrs[NET_SOCK_PEER_PORT] = sock_peer_port + else: + # For Unix domain socket, net.sock.peer.addr attribute represents + # destination name and net.peer.name SHOULD NOT be set. + attrs[NET_SOCK_PEER_ADDR] = ( + cnx._unix_socket if cnx else cnx_kwargs.get("unix_socket") + ) + + if hasattr(cnx, "_socket") and cnx._socket: + try: + attrs[NET_SOCK_HOST_ADDR] = cnx._socket.sock.getsockname() + except Exception as sock_err: + logger.warning("Connection socket is down %s", sock_err) + + cnx_span.set_attributes(attrs) + + +def with_connection_span_attached(method: Callable) -> Callable: + """Attach the connection span while executing a connection method.""" + + def wrapper( + cnx: Union["MySQLConnection", "CMySQLConnection"], *args: Any, **kwargs: Any + ) -> Any: + """Connection span attacher decorator.""" + with trace.use_span( + cnx._span, end_on_exit=False + ) if cnx._span and cnx._span.is_recording() else nullcontext(): + return method(cnx, *args, **kwargs) + + return wrapper + + +def _instrument_execution( + query_method: Callable, + tracer: trace.Tracer, + connection_span_link: trace.Link, + wrapped: "MySQLCursorAbstract", + *args: Any, + **kwargs: Any, +) -> Callable: + """Instruments the execution of `query_method`. + + A query span with a link to the corresponding connection span is generated. + """ + connection: Union["MySQLConnection", "CMySQLConnection"] = ( + getattr(wrapped, "_connection") + if hasattr(wrapped, "_connection") + else getattr(wrapped, "_cnx") + ) + + # SpanAttributes.DB_NAME: connection.database or ""; introduces performance + # degradation, at this time the database attribute is something nice to have but + # not a requirement. + query_span_attributes: Dict = { + SpanAttributes.DB_SYSTEM: DB_SYSTEM, + SpanAttributes.DB_USER: connection._user, + SpanAttributes.THREAD_ID: DEFAULT_THREAD_ID, + SpanAttributes.THREAD_NAME: DEFAULT_THREAD_NAME, + "cursor_type": wrapped.__class__.__name__, + } + with tracer.start_as_current_span( + name=get_operation_name(args[0]) or "SQL statement", + kind=trace.SpanKind.CLIENT, + links=[connection_span_link], + attributes=query_span_attributes, + ): + return query_method(*args, **kwargs) + + +class BaseMySQLTracer(ABC): + """Base class that provides basic object wrapper functionality.""" + + @abstractmethod + def __init__(self) -> None: + """Must be implemented by subclasses.""" + + def __getattr__(self, attr: str) -> Any: + """Gets an attribute. + + Attributes defined in the wrapper object have higher precedence + than those wrapped object equivalent. Attributes not found in + the wrapper are then searched in the wrapped object. + """ + if attr in self.__dict__: + # this object has it + return getattr(self, attr) + # proxy to the wrapped object + return getattr(self._wrapped, attr) + + def __setattr__(self, name: str, value: Any) -> None: + if "_wrapped" not in self.__dict__: + self.__dict__["_wrapped"] = value + return + + if name in self.__dict__: + # this object has it + super().__setattr__(name, value) + return + # proxy to the wrapped object + self._wrapped.__setattr__(name, value) + + def __enter__(self) -> Any: + """Magic method.""" + self._wrapped.__enter__() + return self + + def __exit__(self, *args: Any, **kwargs: Any) -> None: + """Magic method.""" + self._wrapped.__exit__(*args, **kwargs) + + def get_wrapped_class(self) -> str: + """Gets the wrapped class name.""" + return self._wrapped.__class__.__name__ + + +class TracedMySQLCursor(BaseMySQLTracer): + """Wrapper class for a `MySQLCursor` or `CMySQLCursor` object.""" + + def __init__( + self, + wrapped: "MySQLCursorAbstract", + tracer: trace.Tracer, + connection_span: trace.Span, + ): + """Constructor.""" + self._wrapped: "MySQLCursorAbstract" = wrapped + self._tracer: trace.Tracer = tracer + self._connection_span_link: trace.Link = trace.Link( + connection_span.get_span_context() + ) + + def execute(self, *args: Any, **kwargs: Any) -> Any: + """Instruments execute method.""" + return _instrument_execution( + self._wrapped.execute, + self._tracer, + self._connection_span_link, + self._wrapped, + *args, + **kwargs, + ) + + def executemany(self, *args: Any, **kwargs: Any) -> Any: + """Instruments executemany method.""" + return _instrument_execution( + self._wrapped.executemany, + self._tracer, + self._connection_span_link, + self._wrapped, + *args, + **kwargs, + ) + + def callproc(self, *args: Any, **kwargs: Any) -> Any: + """Instruments callproc method.""" + return _instrument_execution( + self._wrapped.callproc, + self._tracer, + self._connection_span_link, + self._wrapped, + *args, + **kwargs, + ) + + +class TracedMySQLConnection(BaseMySQLTracer): + """Wrapper class for a `MySQLConnection` or `CMySQLConnection` object.""" + + def __init__(self, wrapped: "MySQLConnectionAbstract") -> None: + """Constructor.""" + self._wrapped: "MySQLConnectionAbstract" = wrapped + + # call `sql_mode` so its value is cached internally and querying it does not + # interfere when recording query span events later. + _ = self._wrapped.sql_mode + + def cursor(self, *args: Any, **kwargs: Any) -> TracedMySQLCursor: + """Wraps the object method.""" + return TracedMySQLCursor( + wrapped=self._wrapped.cursor(*args, **kwargs), + tracer=self._tracer, + connection_span=self._span, + ) + + @with_connection_span_attached + def cmd_change_user(self, *args: Any, **kwargs: Any) -> Any: + """Wraps the object method.""" + return self._wrapped.cmd_change_user(*args, **kwargs) + + +def _instrument_connect( + connect: Callable[..., Union["MySQLConnectionAbstract", "PooledMySQLConnection"]], + tracer_provider: Optional[trace.TracerProvider] = None, +) -> Callable[..., Union["MySQLConnectionAbstract", "PooledMySQLConnection"]]: + """Retrurn the instrumented version of `connect`.""" + + # let's preserve `connect` identity. + @functools.wraps(connect) + def wrapper( + *args: Any, **kwargs: Any + ) -> Union["MySQLConnectionAbstract", "PooledMySQLConnection"]: + """Wraps the connection object returned by the method `connect`. + + Instrumentation for PooledConnections is not supported. + """ + if any(key in kwargs for key in CNX_POOL_ARGS): + logger.warning("Instrumentation for pooled connections not supported") + return connect(*args, **kwargs) + + tracer = trace.get_tracer( + instrumenting_module_name="MySQL Connector/Python", + instrumenting_library_version=VERSION_TEXT, + tracer_provider=tracer_provider, + ) + + # The connection span is passed in as an argument so the connection object can + # keep a pointer to it. + kwargs[OPTION_CNX_SPAN] = tracer.start_span( + name=CONNECTION_SPAN_NAME, kind=trace.SpanKind.CLIENT + ) + kwargs[OPTION_CNX_TRACER] = tracer + + # attach connection span + with trace.use_span(kwargs[OPTION_CNX_SPAN], end_on_exit=False) as cnx_span: + # Add basic net information. + set_connection_span_attrs(None, cnx_span, kwargs) + + # Connection may fail at this point, in case it does, basic net info is already + # included so the user can check the net configuration she/he provided. + cnx = connect(*args, **kwargs) + + # connection went ok, let's refine the net information. + set_connection_span_attrs(cnx, cnx_span, kwargs) # type: ignore[arg-type] + + return TracedMySQLConnection( + wrapped=cnx, # type: ignore[return-value, arg-type] + ) + + return wrapper + + +class MySQLInstrumentor: + """MySQL instrumentation supporting mysql-connector-python.""" + + _instance: Optional[MySQLInstrumentor] = None + + def __new__(cls, *args: Any, **kwargs: Any) -> MySQLInstrumentor: + """Singlenton. + + Restricts the instantiation to a singular instance. + """ + if cls._instance is None: + # create instance + cls._instance = object.__new__(cls, *args, **kwargs) + # keep a pointer to the uninstrumented connect method + setattr(cls._instance, "_original_connect", connector.connect) + return cls._instance + + def instrumentation_dependencies(self) -> Collection[str]: + """Return a list of python packages with versions + that the will be instrumented (e.g., versions >= 8.1.0).""" + return [f"mysql-connector-python >= {FIRST_SUPPORTED_VERSION}"] + + def instrument(self, **kwargs: Any) -> None: + """Instrument the library. + + Args: + trace_module: reference to the 'trace' module from opentelemetry. + tracer_provider (optional): TracerProvider instance. + + NOTE: Instrumentation for pooled connections not supported. + """ + if connector.connect != getattr(self, "_original_connect"): + logger.warning("MySQL Connector/Python module already instrumented.") + return + connector.connect = _instrument_connect( + connect=getattr(self, "_original_connect"), + tracer_provider=kwargs.get("tracer_provider"), + ) + + def instrument_connection( + self, + connection: "MySQLConnectionAbstract", + tracer_provider: Optional[trace.TracerProvider] = None, + ) -> "MySQLConnectionAbstract": + """Enable instrumentation in a MySQL connection. + + Args: + connection: uninstrumented connection instance. + trace_module: reference to the 'trace' module from opentelemetry. + tracer_provider (optional): TracerProvider instance. + + Returns: + connection: instrumented connection instace. + + NOTE: Instrumentation for pooled connections not supported. + """ + if isinstance(connection, TracedMySQLConnection): + logger.warning("Connection already instrumented.") + return connection + + if not hasattr(connection, "_span") or not hasattr(connection, "_tracer"): + logger.warning( + "Instrumentation for class %s not supported.", + connection.__class__.__name__, + ) + return connection + + tracer = trace.get_tracer( + instrumenting_module_name="MySQL Connector/Python", + instrumenting_library_version=VERSION_TEXT, + tracer_provider=tracer_provider, + ) + connection._span = tracer.start_span( + name=CONNECTION_SPAN_NAME, kind=trace.SpanKind.CLIENT + ) + connection._tracer = tracer + + set_connection_span_attrs(connection, connection._span) + + return TracedMySQLConnection(wrapped=connection) # type: ignore[return-value] + + def uninstrument(self, **kwargs: Any) -> None: + """Uninstrument the library.""" + # pylint: disable=unused-argument + if connector.connect == getattr(self, "_original_connect"): + logger.warning("MySQL Connector/Python module already uninstrumented.") + return + connector.connect = getattr(self, "_original_connect") + + def uninstrument_connection( + self, connection: "MySQLConnectionAbstract" + ) -> "MySQLConnectionAbstract": + """Disable instrumentation in a MySQL connection. + + Args: + connection: instrumented connection instance. + + Returns: + connection: uninstrumented connection instace. + + NOTE: Instrumentation for pooled connections not supported. + """ + if not hasattr(connection, "_span"): + logger.warning( + "Uninstrumentation for class %s not supported.", + connection.__class__.__name__, + ) + return connection + + if not isinstance(connection, TracedMySQLConnection): + logger.warning("Connection already uninstrumented.") + return connection + + # stop connection span recording + if connection._span and connection._span.is_recording(): + connection._span.end() + connection._span = None + + return connection._wrapped diff --git a/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/optionfiles.py b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/optionfiles.py new file mode 100644 index 0000000..13f7132 --- /dev/null +++ b/src9/store/.venv/lib/python3.10/site-packages/mysql/connector/optionfiles.py @@ -0,0 +1,357 @@ +# Copyright (c) 2014, 2022, Oracle and/or its affiliates. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License, version 2.0, as +# published by the Free Software Foundation. +# +# This program is also distributed with certain software (including +# but not limited to OpenSSL) that is licensed under separate terms, +# as designated in a particular file or component or in included license +# documentation. The authors of MySQL hereby grant you an +# additional permission to link the program and your derivative works +# with the separately licensed software that they have included with +# MySQL. +# +# Without limiting anything contained in the foregoing, this file, +# which is part of MySQL Connector/Python, is also subject to the +# Universal FOSS Exception, version 1.0, a copy of which can be found at +# http://oss.oracle.com/licenses/universal-foss-exception. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. +# See the GNU General Public License, version 2.0, for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA + +# mypy: disable-error-code="attr-defined" + +"""Implements parser to parse MySQL option files.""" + +import codecs +import io +import os +import re + +from configparser import ConfigParser as SafeConfigParser, MissingSectionHeaderError +from typing import Any, Dict, List, Optional, Tuple, Union + +from .constants import CNX_POOL_ARGS, DEFAULT_CONFIGURATION + +DEFAULT_EXTENSIONS: Dict[str, Tuple[str, ...]] = { + "nt": ("ini", "cnf"), + "posix": ("cnf",), +} + + +def read_option_files(**config: Union[str, List[str]]) -> Dict[str, Any]: + """ + Read option files for connection parameters. + + Checks if connection arguments contain option file arguments, and then + reads option files accordingly. + """ + if "option_files" in config: + try: + if isinstance(config["option_groups"], str): + config["option_groups"] = [config["option_groups"]] + groups = config["option_groups"] + del config["option_groups"] + except KeyError: + groups = ["client", "connector_python"] + + if isinstance(config["option_files"], str): + config["option_files"] = [config["option_files"]] + option_parser = MySQLOptionsParser( + list(config["option_files"]), keep_dashes=False + ) + del config["option_files"] + + config_from_file = option_parser.get_groups_as_dict_with_priority(*groups) + config_options: Dict[str, Tuple[str, int]] = {} + for group in groups: + try: + for option, value in config_from_file[group].items(): + try: + if option == "socket": + option = "unix_socket" + + if option not in CNX_POOL_ARGS and option != "failover": + _ = DEFAULT_CONFIGURATION[option] + + if ( + option not in config_options + or config_options[option][1] <= value[1] + ): + config_options[option] = value + except KeyError: + if group == "connector_python": + raise AttributeError( + f"Unsupported argument '{option}'" + ) from None + except KeyError: + continue + + not_evaluate = ("password", "passwd") + for option, value in config_options.items(): + if option not in config: + try: + if option in not_evaluate: + config[option] = value[0] + else: + config[option] = eval(value[0]) # pylint: disable=eval-used + except (NameError, SyntaxError): + config[option] = value[0] + + return config + + +class MySQLOptionsParser(SafeConfigParser): + """This class implements methods to parse MySQL option files""" + + def __init__( + self, files: Optional[Union[List[str], str]] = None, keep_dashes: bool = True + ) -> None: + """Initialize + + If defaults is True, default option files are read first + + Raises ValueError if defaults is set to True but defaults files + cannot be found. + """ + + # Regular expression to allow options with no value(For Python v2.6) + self.optcre: re.Pattern = re.compile( + r"(?P