Skip to content

Commit 3c6100e

Browse files
authored
Merge branch 'main' into saumya/numeric-precision-loss
2 parents 9d6e6bb + 5a0bc59 commit 3c6100e

File tree

5 files changed

+47
-47
lines changed

5 files changed

+47
-47
lines changed

PyPI_Description.md

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -39,12 +39,12 @@ PyBind11 provides:
3939

4040
We are currently in **Public Preview**.
4141

42-
## What's new in v0.13.0
42+
## What's new in v0.13.1
4343

44-
- **Enhanced Batch Operations:** Complete support for UNIQUEIDENTIFIER and DATETIMEOFFSET in `executemany()` operations with automatic type inference, enabling efficient bulk inserts of complex data types including UUIDs and timezone-aware datetimes.
45-
- **Streaming Large Values:** Robust handling of large objects (NVARCHAR/VARCHAR/VARBINARY(MAX)) in `executemany()` with automatic Data-At-Execution detection and fallback, supporting streaming inserts and fetches for massive datasets.
46-
- **Improved Cursor Reliability:** Enhanced `cursor.rowcount` accuracy across all fetch operations, including proper handling of empty result sets and consistent behavior for SELECT, INSERT, and UPDATE operations.
47-
- **Critical Stability Fixes:** Resolved memory leaks with secure token buffer handling, fixed resource cleanup to prevent segmentation faults during Python shutdown, and corrected type inference bugs in batch operations.
44+
- **Authentication Reliability:** Fixed token handling for Microsoft Entra ID authentication to ensure stable and reliable connections.
45+
- **Timezone Preservation:** Removed forced UTC conversion for `datetimeoffset` values, preserving original timezone information in Python `datetime` objects for accurate cross-timezone data handling.
46+
- **Connection Pooling Stability:** Enhanced pool shutdown mechanism with proper tracking to prevent resource leaks and ensure cleanup operations execute reliably.
47+
- **Predictable Type Handling:** Refined UUID string parameter handling to prevent automatic type coercion, ensuring strings are processed as intended.
4848

4949
For more information, please visit the project link on Github: https://github.com/microsoft/mssql-python
5050

mssql_python/pybind/connection/connection.cpp

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,7 @@ SQLRETURN Connection::setAttribute(SQLINTEGER attribute, py::object value) {
173173
LOG("Setting SQL attribute");
174174
SQLPOINTER ptr = nullptr;
175175
SQLINTEGER length = 0;
176-
std::string buffer; // to hold sensitive data temporarily
176+
static std::string buffer; // to hold sensitive data temporarily
177177

178178
if (py::isinstance<py::int_>(value)) {
179179
int intValue = value.cast<int>();
@@ -196,10 +196,6 @@ SQLRETURN Connection::setAttribute(SQLINTEGER attribute, py::object value) {
196196
LOG("Set attribute successfully");
197197
}
198198

199-
// Zero out sensitive data if used
200-
if (!buffer.empty()) {
201-
std::fill(buffer.begin(), buffer.end(), static_cast<char>(0));
202-
}
203199
return ret;
204200
}
205201

@@ -383,4 +379,4 @@ py::object ConnectionHandle::getInfo(SQLUSMALLINT infoType) const {
383379
ThrowStdException("Connection object is not initialized");
384380
}
385381
return _conn->getInfo(infoType);
386-
}
382+
}

mssql_python/pybind/ddbc_bindings.cpp

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2809,7 +2809,6 @@ SQLRETURN SQLGetData_wrap(SqlHandlePtr StatementHandle, SQLUSMALLINT colCount, p
28092809
microseconds,
28102810
tzinfo
28112811
);
2812-
py_dt = py_dt.attr("astimezone")(datetime.attr("timezone").attr("utc"));
28132812
row.append(py_dt);
28142813
} else {
28152814
LOG("Error fetching DATETIMEOFFSET for column {}, ret={}", i, ret);
@@ -3322,7 +3321,6 @@ SQLRETURN FetchBatchData(SQLHSTMT hStmt, ColumnBuffers& buffers, py::list& colum
33223321
dtoValue.fraction / 1000, // ns → µs
33233322
tzinfo
33243323
);
3325-
py_dt = py_dt.attr("astimezone")(datetime.attr("timezone").attr("utc"));
33263324
row.append(py_dt);
33273325
} else {
33283326
row.append(py::none());

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -83,7 +83,7 @@ def finalize_options(self):
8383

8484
setup(
8585
name='mssql-python',
86-
version='0.13.0',
86+
version='0.13.1',
8787
description='A Python library for interacting with Microsoft SQL Server',
8888
long_description=open('PyPI_Description.md', encoding='utf-8').read(),
8989
long_description_content_type='text/markdown',

tests/test_004_cursor.py

Lines changed: 39 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -7829,12 +7829,7 @@ def test_datetimeoffset_read_write(cursor, db_connection):
78297829
assert row is not None
78307830
fetched_id, fetched_dt = row
78317831
assert fetched_dt.tzinfo is not None
7832-
expected_utc = dt.astimezone(timezone.utc)
7833-
fetched_utc = fetched_dt.astimezone(timezone.utc)
7834-
# Ignore sub-microsecond differences
7835-
expected_utc = expected_utc.replace(microsecond=int(expected_utc.microsecond / 1000) * 1000)
7836-
fetched_utc = fetched_utc.replace(microsecond=int(fetched_utc.microsecond / 1000) * 1000)
7837-
assert fetched_utc == expected_utc
7832+
assert fetched_dt == dt
78387833
finally:
78397834
cursor.execute("DROP TABLE IF EXISTS #pytest_datetimeoffset_read_write;")
78407835
db_connection.commit()
@@ -7868,12 +7863,7 @@ def test_datetimeoffset_max_min_offsets(cursor, db_connection):
78687863
assert fetched_id == expected_id, f"ID mismatch: expected {expected_id}, got {fetched_id}"
78697864
assert fetched_dt.tzinfo is not None, f"Fetched datetime object is naive for id {fetched_id}"
78707865

7871-
# Compare in UTC to avoid offset differences
7872-
expected_utc = expected_dt.astimezone(timezone.utc).replace(tzinfo=None)
7873-
fetched_utc = fetched_dt.astimezone(timezone.utc).replace(tzinfo=None)
7874-
assert fetched_utc == expected_utc, (
7875-
f"Value mismatch for id {expected_id}: expected UTC {expected_utc}, got {fetched_utc}"
7876-
)
7866+
assert fetched_dt == expected_dt, f"Value mismatch for id {expected_id}: expected {expected_dt}, got {fetched_dt}"
78777867

78787868
finally:
78797869
cursor.execute("DROP TABLE IF EXISTS #pytest_datetimeoffset_read_write;")
@@ -7928,12 +7918,7 @@ def test_datetimeoffset_dst_transitions(cursor, db_connection):
79287918
assert fetched_id == expected_id, f"ID mismatch: expected {expected_id}, got {fetched_id}"
79297919
assert fetched_dt.tzinfo is not None, f"Fetched datetime object is naive for id {fetched_id}"
79307920

7931-
# Compare UTC time to avoid issues due to offsets changing in DST
7932-
expected_utc = expected_dt.astimezone(timezone.utc).replace(tzinfo=None)
7933-
fetched_utc = fetched_dt.astimezone(timezone.utc).replace(tzinfo=None)
7934-
assert fetched_utc == expected_utc, (
7935-
f"Value mismatch for id {expected_id}: expected UTC {expected_utc}, got {fetched_utc}"
7936-
)
7921+
assert fetched_dt == expected_dt, f"Value mismatch for id {expected_id}: expected {expected_dt}, got {fetched_dt}"
79377922

79387923
finally:
79397924
cursor.execute("DROP TABLE IF EXISTS #pytest_datetimeoffset_dst_transitions;")
@@ -8010,17 +7995,7 @@ def test_datetimeoffset_executemany(cursor, db_connection):
80107995
fetched_id, fetched_dto = rows[i]
80117996
assert fetched_dto.tzinfo is not None, "Fetched datetime object is naive."
80127997

8013-
expected_utc = python_dt.astimezone(timezone.utc).replace(tzinfo=None)
8014-
fetched_utc = fetched_dto.astimezone(timezone.utc).replace(tzinfo=None)
8015-
8016-
# Round microseconds to nearest millisecond for comparison
8017-
expected_utc = expected_utc.replace(microsecond=int(expected_utc.microsecond / 1000) * 1000)
8018-
fetched_utc = fetched_utc.replace(microsecond=int(fetched_utc.microsecond / 1000) * 1000)
8019-
8020-
assert fetched_utc == expected_utc, (
8021-
f"Value mismatch for test case {i}. "
8022-
f"Expected UTC: {expected_utc}, Got UTC: {fetched_utc}"
8023-
)
7998+
assert fetched_dto == python_dt, f"Value mismatch for id {fetched_id}: expected {python_dt}, got {fetched_dto}"
80247999
finally:
80258000
cursor.execute("IF OBJECT_ID('tempdb..#pytest_dto', 'U') IS NOT NULL DROP TABLE #pytest_dto;")
80268001
db_connection.commit()
@@ -8086,13 +8061,44 @@ def test_datetimeoffset_extreme_offsets(cursor, db_connection):
80868061
for i, dt in enumerate(extreme_offsets):
80878062
_, fetched = rows[i]
80888063
assert fetched.tzinfo is not None
8089-
# Round-trip comparison via UTC
8090-
expected_utc = dt.astimezone(timezone.utc).replace(tzinfo=None)
8091-
fetched_utc = fetched.astimezone(timezone.utc).replace(tzinfo=None)
8092-
assert expected_utc == fetched_utc, f"Extreme offset round-trip failed for {dt.tzinfo}"
8064+
assert fetched == dt, f"Value mismatch for id {i}: expected {dt}, got {fetched}"
80938065
finally:
80948066
cursor.execute("IF OBJECT_ID('tempdb..#pytest_dto', 'U') IS NOT NULL DROP TABLE #pytest_dto;")
80958067
db_connection.commit()
8068+
8069+
def test_datetimeoffset_native_vs_string_simple(cursor, db_connection):
8070+
"""
8071+
Replicates the user's testing scenario: fetch DATETIMEOFFSET as native datetime
8072+
and as string using CONVERT(nvarchar(35), ..., 121).
8073+
"""
8074+
try:
8075+
cursor.execute("CREATE TABLE #pytest_dto_user_test (id INT PRIMARY KEY, Systime DATETIMEOFFSET);")
8076+
db_connection.commit()
8077+
8078+
# Insert rows similar to user's example
8079+
test_rows = [
8080+
(1, datetime(2025, 5, 14, 12, 35, 52, 501000, tzinfo=timezone(timedelta(hours=1)))),
8081+
(2, datetime(2025, 5, 14, 15, 20, 30, 123000, tzinfo=timezone(timedelta(hours=-5))))
8082+
]
8083+
8084+
for i, dt in test_rows:
8085+
cursor.execute("INSERT INTO #pytest_dto_user_test (id, Systime) VALUES (?, ?);", i, dt)
8086+
db_connection.commit()
8087+
8088+
# Native fetch (like the user's first execute)
8089+
cursor.execute("SELECT Systime FROM #pytest_dto_user_test WHERE id=1;")
8090+
dt_native = cursor.fetchone()[0]
8091+
assert dt_native.tzinfo is not None
8092+
assert dt_native == test_rows[0][1]
8093+
8094+
# String fetch (like the user's convert to nvarchar)
8095+
cursor.execute("SELECT CONVERT(nvarchar(35), Systime, 121) FROM #pytest_dto_user_test WHERE id=1;")
8096+
dt_str = cursor.fetchone()[0]
8097+
assert dt_str.endswith("+01:00") # original offset preserved
8098+
8099+
finally:
8100+
cursor.execute("DROP TABLE IF EXISTS #pytest_dto_user_test;")
8101+
db_connection.commit()
80968102

80978103
def test_lowercase_attribute(cursor, db_connection):
80988104
"""Test that the lowercase attribute properly converts column names to lowercase"""

0 commit comments

Comments
 (0)