o
    g%j                     @   s
  d Z ddlZddlZddlZddlZddlmZ ddlmZ ddlmZ ddlm	Z	 ddlm
Z
 dd	lmZ dd
lmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ ddlmZ G dd dZG dd deej Z!G dd deej"Z#G dd dZ$G dd dZ%G dd  d e%eZ&G d!d" d"e%e
Z'G d#d$ d$e$eZ(G d%d& d&e$e	Z)G d'd( d(ej*Z+G d)d* d*eZ,G d+d, d,eZ-G d-d. d.eZ.G d/d0 d0eZ/G d1d2 d2eZ0G d3d4 d4eZ1G d5d6 d6eeZ2e2Z3dS )7a6  
.. dialect:: mssql+pyodbc
    :name: PyODBC
    :dbapi: pyodbc
    :connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
    :url: https://pypi.org/project/pyodbc/

Connecting to PyODBC
--------------------

The URL here is to be translated to PyODBC connection strings, as
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.

DSN Connections
^^^^^^^^^^^^^^^

A DSN connection in ODBC means that a pre-existing ODBC datasource is
configured on the client machine.   The application then specifies the name
of this datasource, which encompasses details such as the specific ODBC driver
in use as well as the network address of the database.   Assuming a datasource
is configured on the client, a basic DSN-based connection looks like::

    engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")

Which above, will pass the following connection string to PyODBC:

.. sourcecode:: text

    DSN=some_dsn;UID=scott;PWD=tiger

If the username and password are omitted, the DSN form will also add
the ``Trusted_Connection=yes`` directive to the ODBC string.

Hostname Connections
^^^^^^^^^^^^^^^^^^^^

Hostname-based connections are also supported by pyodbc.  These are often
easier to use than a DSN and have the additional advantage that the specific
database name to connect towards may be specified locally in the URL, rather
than it being fixed as part of a datasource configuration.

When using a hostname connection, the driver name must also be specified in the
query parameters of the URL.  As these names usually have spaces in them, the
name must be URL encoded which means using plus signs for spaces::

    engine = create_engine(
        "mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server"
    )

The ``driver`` keyword is significant to the pyodbc dialect and must be
specified in lowercase.

Any other names passed in the query string are passed through in the pyodbc
connect string, such as ``authentication``, ``TrustServerCertificate``, etc.
Multiple keyword arguments must be separated by an ampersand (``&``); these
will be translated to semicolons when the pyodbc connect string is generated
internally::

    e = create_engine(
        "mssql+pyodbc://scott:tiger@mssql2017:1433/test?"
        "driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
        "&authentication=ActiveDirectoryIntegrated"
    )

The equivalent URL can be constructed using :class:`_sa.engine.URL`::

    from sqlalchemy.engine import URL

    connection_url = URL.create(
        "mssql+pyodbc",
        username="scott",
        password="tiger",
        host="mssql2017",
        port=1433,
        database="test",
        query={
            "driver": "ODBC Driver 18 for SQL Server",
            "TrustServerCertificate": "yes",
            "authentication": "ActiveDirectoryIntegrated",
        },
    )

Pass through exact Pyodbc string
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

A PyODBC connection string can also be sent in pyodbc's format directly, as
specified in `the PyODBC documentation
<https://github.com/mkleehammer/pyodbc/wiki/Connecting-to-databases>`_,
using the parameter ``odbc_connect``.  A :class:`_sa.engine.URL` object
can help make this easier::

    from sqlalchemy.engine import URL

    connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password"
    connection_url = URL.create(
        "mssql+pyodbc", query={"odbc_connect": connection_string}
    )

    engine = create_engine(connection_url)

.. _mssql_pyodbc_access_tokens:

Connecting to databases with access tokens
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

Some database servers are set up to only accept access tokens for login. For
example, SQL Server allows the use of Azure Active Directory tokens to connect
to databases. This requires creating a credential object using the
``azure-identity`` library. More information about the authentication step can be
found in `Microsoft's documentation
<https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate?tabs=bash>`_.

After getting an engine, the credentials need to be sent to ``pyodbc.connect``
each time a connection is requested. One way to do this is to set up an event
listener on the engine that adds the credential token to the dialect's connect
call. This is discussed more generally in :ref:`engines_dynamic_tokens`. For
SQL Server in particular, this is passed as an ODBC connection attribute with
a data structure `described by Microsoft
<https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_.

The following code snippet will create an engine that connects to an Azure SQL
database using Azure credentials::

    import struct
    from sqlalchemy import create_engine, event
    from sqlalchemy.engine.url import URL
    from azure import identity

    # Connection option for access tokens, as defined in msodbcsql.h
    SQL_COPT_SS_ACCESS_TOKEN = 1256
    TOKEN_URL = "https://database.windows.net/"  # The token URL for any Azure SQL database

    connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server"

    engine = create_engine(connection_string)

    azure_credentials = identity.DefaultAzureCredential()


    @event.listens_for(engine, "do_connect")
    def provide_token(dialect, conn_rec, cargs, cparams):
        # remove the "Trusted_Connection" parameter that SQLAlchemy adds
        cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "")

        # create token credential
        raw_token = azure_credentials.get_token(TOKEN_URL).token.encode(
            "utf-16-le"
        )
        token_struct = struct.pack(
            f"<I{len(raw_token)}s", len(raw_token), raw_token
        )

        # apply it to keyword arguments
        cparams["attrs_before"] = {SQL_COPT_SS_ACCESS_TOKEN: token_struct}

.. tip::

    The ``Trusted_Connection`` token is currently added by the SQLAlchemy
    pyodbc dialect when no username or password is present.  This needs
    to be removed per Microsoft's
    `documentation for Azure access tokens
    <https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_,
    stating that a connection string when using an access token must not contain
    ``UID``, ``PWD``, ``Authentication`` or ``Trusted_Connection`` parameters.

.. _azure_synapse_ignore_no_transaction_on_rollback:

Avoiding transaction-related exceptions on Azure Synapse Analytics
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

Azure Synapse Analytics has a significant difference in its transaction
handling compared to plain SQL Server; in some cases an error within a Synapse
transaction can cause it to be arbitrarily terminated on the server side, which
then causes the DBAPI ``.rollback()`` method (as well as ``.commit()``) to
fail. The issue prevents the usual DBAPI contract of allowing ``.rollback()``
to pass silently if no transaction is present as the driver does not expect
this condition. The symptom of this failure is an exception with a message
resembling 'No corresponding transaction found. (111214)' when attempting to
emit a ``.rollback()`` after an operation had a failure of some kind.

This specific case can be handled by passing ``ignore_no_transaction_on_rollback=True`` to
the SQL Server dialect via the :func:`_sa.create_engine` function as follows::

    engine = create_engine(
        connection_url, ignore_no_transaction_on_rollback=True
    )

Using the above parameter, the dialect will catch ``ProgrammingError``
exceptions raised during ``connection.rollback()`` and emit a warning
if the error message contains code ``111214``, however will not raise
an exception.

.. versionadded:: 1.4.40  Added the
   ``ignore_no_transaction_on_rollback=True`` parameter.

Enable autocommit for Azure SQL Data Warehouse (DW) connections
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

Azure SQL Data Warehouse does not support transactions,
and that can cause problems with SQLAlchemy's "autobegin" (and implicit
commit/rollback) behavior. We can avoid these problems by enabling autocommit
at both the pyodbc and engine levels::

    connection_url = sa.engine.URL.create(
        "mssql+pyodbc",
        username="scott",
        password="tiger",
        host="dw.azure.example.com",
        database="mydb",
        query={
            "driver": "ODBC Driver 17 for SQL Server",
            "autocommit": "True",
        },
    )

    engine = create_engine(connection_url).execution_options(
        isolation_level="AUTOCOMMIT"
    )

Avoiding sending large string parameters as TEXT/NTEXT
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^

By default, for historical reasons, Microsoft's ODBC drivers for SQL Server
send long string parameters (greater than 4000 SBCS characters or 2000 Unicode
characters) as TEXT/NTEXT values. TEXT and NTEXT have been deprecated for many
years and are starting to cause compatibility issues with newer versions of
SQL_Server/Azure. For example, see `this
issue <https://github.com/mkleehammer/pyodbc/issues/835>`_.

Starting with ODBC Driver 18 for SQL Server we can override the legacy
behavior and pass long strings as varchar(max)/nvarchar(max) using the
``LongAsMax=Yes`` connection string parameter::

    connection_url = sa.engine.URL.create(
        "mssql+pyodbc",
        username="scott",
        password="tiger",
        host="mssqlserver.example.com",
        database="mydb",
        query={
            "driver": "ODBC Driver 18 for SQL Server",
            "LongAsMax": "Yes",
        },
    )

Pyodbc Pooling / connection close behavior
------------------------------------------

PyODBC uses internal `pooling
<https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ by
default, which means connections will be longer lived than they are within
SQLAlchemy itself.  As SQLAlchemy has its own pooling behavior, it is often
preferable to disable this behavior.  This behavior can only be disabled
globally at the PyODBC module level, **before** any connections are made::

    import pyodbc

    pyodbc.pooling = False

    # don't use the engine before pooling is set to False
    engine = create_engine("mssql+pyodbc://user:pass@dsn")

If this variable is left at its default value of ``True``, **the application
will continue to maintain active database connections**, even when the
SQLAlchemy engine itself fully discards a connection or if the engine is
disposed.

.. seealso::

    `pooling <https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ -
    in the PyODBC documentation.

Driver / Unicode Support
-------------------------

PyODBC works best with Microsoft ODBC drivers, particularly in the area
of Unicode support on both Python 2 and Python 3.

Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not**
recommended; there have been historically many Unicode-related issues
in this area, including before Microsoft offered ODBC drivers for Linux
and OSX.   Now that Microsoft offers drivers for all platforms, for
PyODBC support these are recommended.  FreeTDS remains relevant for
non-ODBC drivers such as pymssql where it works very well.


Rowcount Support
----------------

Previous limitations with the SQLAlchemy ORM's "versioned rows" feature with
Pyodbc have been resolved as of SQLAlchemy 2.0.5. See the notes at
:ref:`mssql_rowcount_versioning`.

.. _mssql_pyodbc_fastexecutemany:

Fast Executemany Mode
---------------------

The PyODBC driver includes support for a "fast executemany" mode of execution
which greatly reduces round trips for a DBAPI ``executemany()`` call when using
Microsoft ODBC drivers, for **limited size batches that fit in memory**.  The
feature is enabled by setting the attribute ``.fast_executemany`` on the DBAPI
cursor when an executemany call is to be used.   The SQLAlchemy PyODBC SQL
Server dialect supports this parameter by passing the
``fast_executemany`` parameter to
:func:`_sa.create_engine` , when using the **Microsoft ODBC driver only**::

    engine = create_engine(
        "mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server",
        fast_executemany=True,
    )

.. versionchanged:: 2.0.9 - the ``fast_executemany`` parameter now has its
   intended effect of this PyODBC feature taking effect for all INSERT
   statements that are executed with multiple parameter sets, which don't
   include RETURNING.  Previously, SQLAlchemy 2.0's :term:`insertmanyvalues`
   feature would cause ``fast_executemany`` to not be used in most cases
   even if specified.

.. versionadded:: 1.3

.. seealso::

    `fast executemany <https://github.com/mkleehammer/pyodbc/wiki/Features-beyond-the-DB-API#fast_executemany>`_
    - on github

.. _mssql_pyodbc_setinputsizes:

Setinputsizes Support
-----------------------

As of version 2.0, the pyodbc ``cursor.setinputsizes()`` method is used for
all statement executions, except for ``cursor.executemany()`` calls when
fast_executemany=True where it is not supported (assuming
:ref:`insertmanyvalues <engine_insertmanyvalues>` is kept enabled,
"fastexecutemany" will not take place for INSERT statements in any case).

The use of ``cursor.setinputsizes()`` can be disabled by passing
``use_setinputsizes=False`` to :func:`_sa.create_engine`.

When ``use_setinputsizes`` is left at its default of ``True``, the
specific per-type symbols passed to ``cursor.setinputsizes()`` can be
programmatically customized using the :meth:`.DialectEvents.do_setinputsizes`
hook. See that method for usage examples.

.. versionchanged:: 2.0  The mssql+pyodbc dialect now defaults to using
   ``use_setinputsizes=True`` for all statement executions with the exception of
   cursor.executemany() calls when fast_executemany=True.  The behavior can
   be turned off by passing ``use_setinputsizes=False`` to
   :func:`_sa.create_engine`.

    N   )_MSDateTime)
_MSUnicode)_MSUnicodeText)BINARY)DATETIMEOFFSET)	MSDialect)MSExecutionContext)	VARBINARY)JSON)JSONIndexType)JSONPathType   )exc)types)util)PyODBCConnector)cursorc                       s0   e Zd ZdZ fddZdd Zdd Z  ZS )_ms_numeric_pyodbczTurns Decimals with adjusted() < 0 or > 7 into strings.

    The routines here are needed for older pyodbc versions
    as well as current mxODBC versions.

    c                    s(   t  ||jsS  fdd}|S )Nc                    sN    j rt| tjr|  }|dk r | S |dkr | S r%| S | S )Nr      )	asdecimal
isinstancedecimalDecimaladjusted_small_dec_to_string_large_dec_to_string)valuer   selfsuper_process b/var/www/html/ecg_monitoring/venv/lib/python3.10/site-packages/sqlalchemy/dialects/mssql/pyodbc.pyprocess  s   

z2_ms_numeric_pyodbc.bind_processor.<locals>.process)superbind_processor_need_decimal_fixr   dialectr#   	__class__r   r"   r%     s
   z!_ms_numeric_pyodbc.bind_processorc                 C   sB   d|dk rdpddt | d  ddd | d D f S )	Nz%s0.%s%sr   - 0r   c                 S      g | ]}t |qS r!   str).0nintr!   r!   r"   
<listcomp>      z;_ms_numeric_pyodbc._small_dec_to_string.<locals>.<listcomp>)absr   joinas_tuple)r   r   r!   r!   r"   r     s
   z'_ms_numeric_pyodbc._small_dec_to_stringc                 C   s  |  d }dt|v r-d|dk rdpdddd |D d	| t|d   f }|S t|d | krfd
|dk r>dp?dddd |D d| d  ddd |D | d d  f }|S d|dk rmdpndddd |D d| d  f }|S )Nr   Ez%s%s%sr   r+   r,   c                 S   r.   r!   r/   r1   sr!   r!   r"   r3     r4   z;_ms_numeric_pyodbc._large_dec_to_string.<locals>.<listcomp>r-   z%s%s.%sc                 S   r.   r!   r/   r9   r!   r!   r"   r3     r4   c                 S   r.   r!   r/   r9   r!   r!   r"   r3     r4   z%s%sc                 S   r.   r!   r/   r9   r!   r!   r"   r3     r4   )r7   r0   r6   r   len)r   r   _intresultr!   r!   r"   r     s(   ""
"z'_ms_numeric_pyodbc._large_dec_to_string)__name__
__module____qualname____doc__r%   r   r   __classcell__r!   r!   r)   r"   r     s
    r   c                   @      e Zd ZdS )_MSNumeric_pyodbcNr>   r?   r@   r!   r!   r!   r"   rD         rD   c                   @   rC   )_MSFloat_pyodbcNrE   r!   r!   r!   r"   rG     rF   rG   c                   @   s   e Zd ZdZdd ZdS )_ms_binary_pyodbczWraps binary values in dialect-specific Binary wrapper.
    If the value is null, return a pyodbc-specific BinaryNull
    object to prevent pyODBC [and FreeTDS] from defaulting binary
    NULL types to SQLWCHAR and causing implicit conversion errors.
    c                    s(   j d u rd S j j  fdd}|S )Nc                    s   | d ur | S j jS N)dbapi
BinaryNull)r   DBAPIBinaryr(   r!   r"   r#     s   z1_ms_binary_pyodbc.bind_processor.<locals>.process)rJ   Binaryr'   r!   rL   r"   r%     s
   
z _ms_binary_pyodbc.bind_processorN)r>   r?   r@   rA   r%   r!   r!   r!   r"   rH     s    rH   c                   @   s   e Zd ZdZdZdd ZdS )_ODBCDateTimeBindProcessorz6Add bind processors to handle datetimeoffset behaviorsFc                    s    fdd}|S )Nc                    sL   | d u rd S t | tr| S | jr js js| S | d}tdd|}|S )Nz%Y-%m-%d %H:%M:%S.%f %zz([\+\-]\d{2})([\d\.]+)$z\1:\2)r   r0   tzinfotimezonehas_tzstrftimeresub)r   
dto_stringr   r!   r"   r#     s   

z:_ODBCDateTimeBindProcessor.bind_processor.<locals>.processr!   r'   r!   rW   r"   r%     s   z)_ODBCDateTimeBindProcessor.bind_processorN)r>   r?   r@   rA   rR   r%   r!   r!   r!   r"   rO     s    rO   c                   @   rC   )_ODBCDateTimeNrE   r!   r!   r!   r"   rX     rF   rX   c                   @   s   e Zd ZdZdS )_ODBCDATETIMEOFFSETTN)r>   r?   r@   rR   r!   r!   r!   r"   rY     s    rY   c                   @   rC   )_VARBINARY_pyodbcNrE   r!   r!   r!   r"   rZ     rF   rZ   c                   @   rC   )_BINARY_pyodbcNrE   r!   r!   r!   r"   r[   
  rF   r[   c                   @      e Zd Zdd ZdS )_String_pyodbcc                 C   &   | j dv s
| j dkr|jddfS |jS N)Nmaxi  r   )lengthSQL_VARCHARr   rJ   r!   r!   r"   get_dbapi_type     z_String_pyodbc.get_dbapi_typeNr>   r?   r@   rd   r!   r!   r!   r"   r]         r]   c                   @   r\   )_Unicode_pyodbcc                 C   r^   r_   ra   SQL_WVARCHARrc   r!   r!   r"   rd     re   z_Unicode_pyodbc.get_dbapi_typeNrf   r!   r!   r!   r"   rh     rg   rh   c                   @   r\   )_UnicodeText_pyodbcc                 C   r^   r_   ri   rc   r!   r!   r"   rd     re   z"_UnicodeText_pyodbc.get_dbapi_typeNrf   r!   r!   r!   r"   rk     rg   rk   c                   @   r\   )_JSON_pyodbcc                 C   s   |j ddfS )Nr   rj   rc   r!   r!   r"   rd   '  s   z_JSON_pyodbc.get_dbapi_typeNrf   r!   r!   r!   r"   rl   &  rg   rl   c                   @   r\   )_JSONIndexType_pyodbcc                 C      |j S rI   rm   rc   r!   r!   r"   rd   ,     z$_JSONIndexType_pyodbc.get_dbapi_typeNrf   r!   r!   r!   r"   rn   +  rg   rn   c                   @   r\   )_JSONPathType_pyodbcc                 C   ro   rI   rm   rc   r!   r!   r"   rd   1  rp   z#_JSONPathType_pyodbc.get_dbapi_typeNrf   r!   r!   r!   r"   rq   0  rg   rq   c                       s,   e Zd ZdZ fddZ fddZ  ZS )MSExecutionContext_pyodbcFc                    sJ   t    | jr| jjr!t| jd r#d| _|  jd7  _dS dS dS dS )a  where appropriate, issue "select scope_identity()" in the same
        statement.

        Background on why "scope_identity()" is preferable to "@@identity":
        https://msdn.microsoft.com/en-us/library/ms190315.aspx

        Background on why we attempt to embed "scope_identity()" into the same
        statement as the INSERT:
        https://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?

        r   Tz; select scope_identity()N)	r$   pre_exec_select_lastrowidr(   use_scope_identityr;   
parameters_embedded_scope_identity	statementrW   r)   r!   r"   rs   8  s   
z"MSExecutionContext_pyodbc.pre_execc                    s~   | j r8	 z| j }W n | jjjy   | j  Y nw |s%| j  q|d }nqt|d | _t	j
| _d S t   d S )NTr   )rw   r   fetchallr(   rJ   Errornextsetint
_lastrowid_cursor_NO_CURSOR_DMLcursor_fetch_strategyr$   	post_exec)r   rowsrowr)   r!   r"   r   R  s    
z#MSExecutionContext_pyodbc.post_exec)r>   r?   r@   rw   rs   r   rB   r!   r!   r)   r"   rr   5  s    rr   c                !       s   e Zd ZdZdZeZee	j
ejeejeeeejeeeeeejeejeejeejeejeeje ejj!e"ejj#e$ej%ej%iZ
		d fdd	Z& fddZ' fddZ(d	d
 Z)d fdd	Z* fddZ+  Z,S )MSDialect_pyodbcTFc                    s^   t  jdd|i| | jo| jot| jjd| _| jo!|  dk | _|| _|r-d| _	d S d S )Nuse_setinputsizesr{   )   r      Fr!   )
r$   __init__ru   rJ   hasattrCursor_dbapi_versionr&   fast_executemany!use_insertmanyvalues_wo_returning)r   r   r   paramsr)   r!   r"   r     s   
zMSDialect_pyodbc.__init__c              	      s~   z	| d }W n tjy   t | Y S w g }td}||D ]}z	|	t
| W q& ty:   Y q&w t|S )Nz8SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)z[.\-])exec_driver_sqlscalarr   
DBAPIErrorr$   _get_server_version_inforT   compilesplitappendr|   
ValueErrortuple)r   
connectionrawversionrnr)   r!   r"   r     s$   
z)MSDialect_pyodbc._get_server_version_infoc                    s   t    fdd}|S )Nc                    s   d ur|    |  d S rI   )_setup_timestampoffset_type)connr   super_r!   r"   
on_connect  s   z/MSDialect_pyodbc.on_connect.<locals>.on_connect)r$   r   )r   r   r)   r   r"   r     s   
zMSDialect_pyodbc.on_connectc                 C   s   dd }d}| || d S )Nc                 S   s\   t d| }t|d |d |d |d |d |d |d d	 ttj|d
 |d dS )Nz<6hI2hr   r   r   r            i  r   r   )hoursminutes)structunpackdatetimerQ   	timedelta)	dto_valuetupr!   r!   r"   _handle_datetimeoffset  s   
zLMSDialect_pyodbc._setup_timestampoffset_type.<locals>._handle_datetimeoffsetie)add_output_converter)r   r   r   odbc_SQL_SS_TIMESTAMPOFFSETr!   r!   r"   r     s
   z,MSDialect_pyodbc._setup_timestampoffset_typeNc                    s$   | j rd|_ t j||||d d S )NT)context)r   r$   do_executemany)r   r   rx   rv   r   r)   r!   r"   r     s   zMSDialect_pyodbc.do_executemanyc                    s4   t || jjr|jd }|dv rdS t |||S )Nr   >
   010000100208001080030800708S0108S0210054HY010HYT00T)r   rJ   rz   argsr$   is_disconnect)r   er   r   coder)   r!   r"   r     s
   
zMSDialect_pyodbc.is_disconnect)FTrI   )-r>   r?   r@   supports_statement_cache supports_sane_rowcount_returningrr   execution_ctx_clsr   update_copyr   colspecssqltypesNumericrD   FloatrG   r   r[   DateTimerX   r   rY   r
   rZ   LargeBinaryStringr]   Unicoderh   UnicodeTextrk   r   rl   r   rn   r   rq   Enumr   r   r   r   r   r   rB   r!   r!   r)   r"   r   o  s>    r   )4rA   r   r   rT   r   baser   r   r   r   r   r   r	   r
   jsonr   _MSJsonr   _MSJsonIndexTyper   _MSJsonPathTyper,   r   r   r   r   connectors.pyodbcr   enginer   r~   r   r   rD   r   rG   rH   rO   rX   rY   rZ   r[   r   r]   rh   rk   rl   rn   rq   rr   r   r(   r!   r!   r!   r"   <module>   sT     d>: 
