diff --git a/src/snowflake/connector/connection.py b/src/snowflake/connector/connection.py index 83b2986dc8..a46db80e34 100644 --- a/src/snowflake/connector/connection.py +++ b/src/snowflake/connector/connection.py @@ -72,7 +72,7 @@ OCSPMode, QueryStatus, ) -from .converter import SnowflakeConverter +from .converter import SnowflakeConverter, infer_snowflake_type from .cursor import LOG_MAX_QUERY_LENGTH, SnowflakeCursor from .description import ( CLIENT_NAME, @@ -119,6 +119,7 @@ from .sqlstate import SQLSTATE_CONNECTION_NOT_EXISTS, SQLSTATE_FEATURE_NOT_SUPPORTED from .telemetry import TelemetryClient, TelemetryData, TelemetryField from .time_util import HeartBeatTimer, get_time_millis +from .type_wrappers import snowflake_type_wrapper from .url_util import extract_top_level_domain_from_hostname from .util_text import construct_hostname, parse_account, split_statements from .wif_util import AttestationProvider @@ -1737,7 +1738,7 @@ def _get_snowflake_type_and_binding( ) snowflake_type, v = v else: - snowflake_type = self.converter.snowflake_type(v) + snowflake_type = infer_snowflake_type(v) if snowflake_type is None: Error.errorhandler_wrapper( self, @@ -1756,6 +1757,9 @@ def _get_snowflake_type_and_binding( self.converter.to_snowflake_bindings(snowflake_type, v), ) + def _is_complex_type(self, snowflake_type: str): + return snowflake_type in ("VARIANT", "OBJECT", "ARRAY", "MAP") + # TODO we could probably rework this to not make dicts like this: {'1': 'value', '2': '13'} def _process_params_qmarks( self, @@ -1769,8 +1773,12 @@ def _process_params_qmarks( get_type_and_binding = partial(self._get_snowflake_type_and_binding, cursor) for idx, v in enumerate(params): - if isinstance(v, list): - snowflake_type = self.converter.snowflake_type(v) + if isinstance(v, snowflake_type_wrapper): + processed_params[str(idx + 1)] = ( + self.converter.to_snowflake_bindings_dict("", v) + ) + elif isinstance(v, list): + snowflake_type = infer_snowflake_type(v) all_param_data = list(map(get_type_and_binding, v)) first_type = all_param_data[0].type # if all elements have the same snowflake type, update snowflake_type diff --git a/src/snowflake/connector/converter.py b/src/snowflake/connector/converter.py index 8202351990..ea9628af2a 100644 --- a/src/snowflake/connector/converter.py +++ b/src/snowflake/connector/converter.py @@ -8,6 +8,7 @@ from datetime import date, datetime from datetime import time as dt_t from datetime import timedelta, timezone, tzinfo +from decimal import Decimal from functools import partial from logging import getLogger from math import ceil @@ -22,6 +23,12 @@ from .errors import ProgrammingError from .sfbinaryformat import binary_to_python, binary_to_snowflake from .sfdatetime import sfdatetime_total_seconds_from_timedelta +from .type_wrappers import ( + snowflake_array, + snowflake_map, + snowflake_object, + snowflake_variant, +) if TYPE_CHECKING: from numpy import bool_, int64 @@ -81,6 +88,8 @@ # Type alias SnowflakeConverterType = Callable[[Any], Any] +JSON_FORMAT_STR = "json" + def convert_datetime_to_epoch(dt: datetime) -> float: """Converts datetime to epoch time in seconds. @@ -147,6 +156,12 @@ def _generate_tzinfo_from_tzoffset(tzoffset_minutes: int) -> tzinfo: return pytz.FixedOffset(tzoffset_minutes) +def infer_snowflake_type(value: Any) -> str | None: + """Returns Snowflake data type for the value. This is used for qmark parameter style.""" + type_name = value.__class__.__name__.lower() + return PYTHON_TO_SNOWFLAKE_TYPE.get(type_name) + + class SnowflakeConverter: def __init__(self, **kwargs) -> None: self._parameters: dict[str, str | int | bool] = {} @@ -355,11 +370,6 @@ def _BOOLEAN_to_python( ) -> Callable: return lambda value: value in ("1", "TRUE") - def snowflake_type(self, value: Any) -> str | None: - """Returns Snowflake data type for the value. This is used for qmark parameter style.""" - type_name = value.__class__.__name__.lower() - return PYTHON_TO_SNOWFLAKE_TYPE.get(type_name) - def to_snowflake_bindings(self, snowflake_type: str, value: Any) -> str: """Converts Python data to snowflake data for qmark and numeric parameter style. @@ -370,10 +380,32 @@ def to_snowflake_bindings(self, snowflake_type: str, value: Any) -> str: snowflake_type, value ) + def to_snowflake_bindings_dict( + self, snowflake_type: str, value: Any + ) -> dict[str, Any]: + """Converts Python data to snowflake bindings dict dor qmark and numeric parameter style. + + The output is bound in a query in the server side. + """ + type_name = value.__class__.__name__.lower() + return getattr(self, f"_{type_name}_to_snowflake_bindings_dict")( + snowflake_type, value + ) + def _str_to_snowflake_bindings(self, _, value: str) -> str: # NOTE: str type is always taken as a text data and never binary return str(value) + def _str_to_snowflake_bindings_dict( + self, snowflake_type: str, value: str + ) -> dict[str, Any]: + return { + "type": snowflake_type, + "value": str(value), + "schema": None, + "fmt": JSON_FORMAT_STR, + } + def _date_to_snowflake_bindings_in_bulk_insertion(self, value: date) -> str: # notes: this is for date type bulk insertion, it's different from non-bulk date type insertion flow milliseconds = _convert_date_to_epoch_milliseconds(value) @@ -403,6 +435,14 @@ def _bool_to_snowflake_bindings(self, _, value: bool) -> str: def _nonetype_to_snowflake_bindings(self, *_) -> None: return None + def _nonetype_to_snowflake_bindings_dict(self, *_) -> dict[str, Any]: + return { + "type": "ANY", + "value": None, + "schema": None, + "format": JSON_FORMAT_STR, + } + def _date_to_snowflake_bindings(self, _, value: date) -> str: # this is for date type non-bulk insertion, it's different from bulk date type insertion flow # milliseconds @@ -474,6 +514,183 @@ def _timedelta_to_snowflake_bindings( str(hours * 3600 + mins * 60 + secs) + f"{value.microseconds:06d}" + "000" ) + def _python_object_to_structured_type_field(self, value: Any) -> Any: + """Converts a Python object to a structured type field.""" + if isinstance(value, datetime): + return value.strftime("%a, %d %b %Y %H:%M:%S %Z") + elif isinstance(value, date): + return value.strftime("%Y-%m-%d") + elif isinstance(value, time.struct_time) or isinstance(value, dt_t): + return time.strftime("%a, %d %b %Y %H:%M:%S %Z", value) + elif isinstance(value, timedelta): + return self._timedelta_to_snowflake_bindings("TIME", value) + elif isinstance(value, bytes) or isinstance(value, bytearray): + return self._bytes_to_snowflake_bindings(None, value) + elif isinstance(value, numpy.int64): + return int(value) + elif isinstance(value, Decimal): + return float(value) + elif isinstance(value, snowflake_array): + return self._snowflake_array_to_snowflake_bindings(value) + elif isinstance(value, snowflake_object): + return self._snowflake_object_to_snowflake_bindings(value) + else: + return value + + def _snowflake_array_to_snowflake_bindings( + self, value: snowflake_array + ) -> list[Any]: + if not value: + return [] + + converted_values = [] + # TODO: is this an edge case that needs to be handled + + if value.original_type == bytearray: + # bytearray when converted to snowflake_array becomes an array of int. The reasonable expectation would be + # for it to be binded as an array of individual bytes the same way as array of bytes value is binded. + return [self._bytes_to_snowflake_bindings(None, bytes([v])) for v in value] + + for v in value: + if isinstance(v, snowflake_object): + converted_values.append(self._snowflake_object_to_snowflake_bindings(v)) + elif isinstance(v, snowflake_array): + converted_values.append(self._snowflake_array_to_snowflake_bindings(v)) + else: + converted_values.append(self._python_object_to_structured_type_field(v)) + + return converted_values + + def _snowflake_array_to_snowflake_bindings_dict( + self, _, value: snowflake_array + ) -> dict[str, Any]: + if not value: + return { + "type": "ARRAY", + "value": "[]", + "fmt": JSON_FORMAT_STR, + "schema": None, + } + + return { + "type": "ARRAY", + "value": json.dumps(self._snowflake_array_to_snowflake_bindings(value)), + "fmt": JSON_FORMAT_STR, + } + + def _snowflake_object_to_snowflake_bindings( + self, value: snowflake_object + ) -> dict[str, Any]: + if not value: + return {} + + converted_object = {} + + for key, v in value.items(): + if type(key) is not str: + logger.info( + "snowflake_object key %s is not a string. Converting to string.", + key, + ) + key = str(key) + + converted_object[key] = self._python_object_to_structured_type_field(v) + + return converted_object + + def _snowflake_object_to_snowflake_bindings_dict( + self, _, value: snowflake_object + ) -> dict[str, Any]: + if not value: + return { + "type": "OBJECT", + "value": "{}", + "fmt": JSON_FORMAT_STR, + "schema": None, + } + + return { + "type": "OBJECT", + "value": json.dumps(self._snowflake_object_to_snowflake_bindings(value)), + "fmt": JSON_FORMAT_STR, + "schema": None, + } + + def _snowflake_variant_to_snowflake_bindings_dict( + self, _, value: snowflake_variant + ) -> dict[str, Any]: + return { + "type": "VARIANT", + "value": json.dumps(value.value), + "fmt": JSON_FORMAT_STR, + "schema": None, + } + + def _snowflake_map_to_snowflake_bindings( + self, value: snowflake_map + ) -> dict[Any, Any]: + """Converts snowflake_map to a dictionary for binding.""" + if not value: + return {} + + converted_map = {} + key_type = None + value_type = None + for key, v in value.items(): + new_key_type = infer_snowflake_type(key) + new_value_type = infer_snowflake_type(v) + + if key_type and new_key_type != key_type: + raise ValueError("Keys in snowflake_map must be of the same type.") + else: + key_type = new_key_type + + if value_type and new_value_type != value_type: + raise ValueError("Values in snowflake_map must be of the same type.") + else: + value_type = new_value_type + + if key in converted_map: + logger.warning( + "Duplicate key found in snowflake_map: %s. Overwriting the value.", + key, + ) + converted_map[key] = self._python_object_to_structured_type_field(v) + + return converted_map + + def _snowflake_map_to_snowflake_bindings_dict( + self, _, value: snowflake_map + ) -> dict[str, Any]: + if not value: + return { + "type": "OBJECT", + "value": "{}", + "fmt": JSON_FORMAT_STR, + "schema": None, + } + + return { + "type": "OBJECT", + "value": json.dumps(self._snowflake_map_to_snowflake_bindings(value)), + "nullable": True, + "fmt": JSON_FORMAT_STR, + "schema": { + "type": "MAP", + "nullable": True, + "fields": [ + { + "type": value.key_type, + **value.key_attributes, + }, + { + "type": value.value_type, + **value.value_attributes, + }, + ], + }, + } + def to_snowflake(self, value: Any) -> Any: """Converts Python data to Snowflake data for pyformat/format style. @@ -690,7 +907,7 @@ def to_csv_bindings(self, value: tuple[str, Any] | Any) -> str | None: # to_csv_bindings is only used in bulk insertion logic val = self._date_to_snowflake_bindings_in_bulk_insertion(value) else: - _type = self.snowflake_type(value) + _type = infer_snowflake_type(value) val = self.to_snowflake_bindings(_type, value) return self.escape_for_csv(val) diff --git a/src/snowflake/connector/type_wrappers.py b/src/snowflake/connector/type_wrappers.py new file mode 100644 index 0000000000..ec85f0d3e3 --- /dev/null +++ b/src/snowflake/connector/type_wrappers.py @@ -0,0 +1,90 @@ +from __future__ import annotations + +from typing import Any + +from snowflake.connector import converter + + +class snowflake_type_wrapper: + pass + + +class snowflake_array(snowflake_type_wrapper, list): + def __init__(self, seq=()): + super().__init__(seq) + self.original_type = type(seq) + + +class snowflake_object(snowflake_type_wrapper, dict): + def __init__(self, seq=None, **kwargs): + super().__init__(seq or {}, **kwargs) + self.original_type = type(seq) if seq is not None else None + + +class snowflake_map(snowflake_type_wrapper, dict): + def __init__(self, seq=None, **kwargs): + super().__init__(seq or {}, **kwargs) + self.original_type = type(seq) if seq is not None else None + self.key_type = None + self.key_attributes = { + "nullable": True, + "length": 0, + "scale": 0, + "precision": 36, + } + self.value_type = None + self.value_attributes = { + "nullable": True, + "length": 0, + "scale": 0, + "precision": 36, + } + if seq: + keys = list(self.keys()) + values = list(self.values()) + if keys: + self.key_type = converter.infer_snowflake_type(keys[0]) + if not all( + converter.infer_snowflake_type(k) == self.key_type for k in keys + ): + raise ValueError("All keys must have the same snowflake_type.") + if values: + self.value_type = converter.infer_snowflake_type(values[0]) + if not all( + converter.infer_snowflake_type(v) == self.value_type for v in values + ): + raise ValueError("All values must have the same snowflake_type.") + + if self.key_type == "TIME": + self.key_attributes = { + "nullable": True, + "length": 0, + "scale": 9, + "precision": 0, + } + + if self.value_type == "TIME": + self.value_attributes = { + "nullable": True, + "length": 0, + "scale": 9, + "precision": 0, + } + + def __setitem__(self, key, value): + key_type = converter.infer_snowflake_type(key) + value_type = converter.infer_snowflake_type(value) + if self.key_type is not None and key_type != self.key_type: + raise ValueError("Key snowflake_type does not match existing key_type.") + if self.value_type is not None and value_type != self.value_type: + raise ValueError("Value snowflake_type does not match existing value_type.") + if self.key_type is None: + self.key_type = key_type + if self.value_type is None: + self.value_type = value_type + super().__setitem__(key, value) + + +class snowflake_variant(snowflake_type_wrapper): + def __init__(self, value: Any): + raise NotImplementedError("snowflake_variant is currently unsupported.") diff --git a/test/integ/test_bindings.py b/test/integ/test_bindings.py index e5820c199b..154a34fed8 100644 --- a/test/integ/test_bindings.py +++ b/test/integ/test_bindings.py @@ -2,6 +2,8 @@ from __future__ import annotations import calendar +import json +import math import tempfile import time from datetime import date, datetime @@ -13,9 +15,16 @@ import pendulum import pytest import pytz +from numpy import long from snowflake.connector.converter import convert_datetime_to_epoch from snowflake.connector.errors import ForbiddenError, ProgrammingError +from snowflake.connector.type_wrappers import ( + snowflake_array, + snowflake_map, + snowflake_object, + snowflake_variant, +) try: from snowflake.connector.util_text import random_string @@ -695,3 +704,293 @@ def test_timestamp_bindings( assert r[0].replace(tzinfo=None) == expected.replace(tzinfo=None) else: assert r[0] == expected + + +@pytest.mark.skipolddriver +def test_binding_variant(conn_cnx): + pytest.skip("Server-side binding of VARIANT type is not supported") + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + snowflake_type = "VARIANT" + cursor.execute(f"CREATE OR REPLACE TABLE TEST_TABLE1 (col1 {snowflake_type});") + cursor.execute(bind_query, params=(snowflake_variant(None),)) + cursor.execute(bind_query, params=(snowflake_variant(""),)) + cursor.execute(bind_query, params=(snowflake_variant([1, 2, 3]),)) + cursor.execute( + bind_query, + params=(snowflake_variant("{'a': 1, 'b': 2, 'c': 3}"),), + ) + cursor.execute( + bind_query, + params=(snowflake_variant({"a": 1, "b": 2, "c": 3}),), + ) + + results = cursor.execute("SELECT * FROM TEST_TABLE1").fetchall() + + assert results[0][0] is None + assert results[1][0] is None + assert json.loads(results[2][0]) == [1, 2, 3] + assert json.loads(results[3][0]) == {"a": 1, "b": 2, "c": 3} + assert json.loads(results[4][0]) == {"a": 1, "b": 2, "c": 3} + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + "write_value, read_value", + [ + ([], []), + ([None], [None]), + ([1, 2, 3], [1, 2, 3]), + (["a", "b", "c"], ["a", "b", "c"]), + ([1, "2", 3], [1, "2", 3]), + ( + [ + datetime.strptime("2020-01-01", "%Y-%m-%d"), + datetime.strptime("2020-01-02", "%Y-%m-%d"), + ], + [ + "Wed, 01 Jan 2020 00:00:00 ", + "Thu, 02 Jan 2020 00:00:00 ", + ], + ), + ( + [ + time.strptime("30 Nov 00", "%d %b %y"), + time.strptime("30 Nov 01", "%d %b %y"), + ], + [ + "Thu, 30 Nov 2000 00:00:00 ", + "Fri, 30 Nov 2001 00:00:00 ", + ], + ), + ( + [ + timedelta(days=365), + timedelta(hours=1), + ], + ["31536000000000000", "3600000000000"], + ), + ([True, False, True], [True, False, True]), + ([b"123", b"HEX", b"3"], ["313233", "484558", "33"]), + ( + [long(10), long(-2147483647), long(2147483647)], + [10, -2147483647, 2147483647], + ), + ([math.pi, 1.1, 1.2], [3.141592653589793, 1.1, 1.2]), + ([Decimal(10.10), Decimal(-5.001), Decimal(-5.5)], [10.1, -5.001, -5.5]), + ([bytearray(b"abc"), bytearray(b"def")], ["616263", "646566"]), + (bytearray(b"123"), ["31", "32", "33"]), + ([1, snowflake_array([1, 2, 3])], [1, [1, 2, 3]]), + ( + [1, snowflake_object({"a": 1, "b": 2, "c": 3})], + [1, {"a": 1, "b": 2, "c": 3}], + ), + ], + ids=[ + "empty_array", + "array_with_nulls", + "array_with_integers", + "array_with_strings", + "array_with_mixed_types", + "array_with_dates", + "array_with_times", + "array_with_timedeltas", + "array_with_booleans", + "array_with_bytes", + "array_with_long_integers", + "array_with_floats", + "array_with_decimals", + "array_with_bytearrays", + "array_with_bytearray_single_value", + "nested_array", + "nested_object", + ], +) +def test_binding_array_without_schema(conn_cnx, write_value, read_value): + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + cursor.execute("CREATE OR REPLACE TABLE TEST_TABLE1 (col1 ARRAY);") + cursor.execute(bind_query, params=(snowflake_array(write_value),)) + + results = cursor.execute("SELECT * FROM TEST_TABLE1").fetchall() + + assert json.loads(results[0][0]) == read_value + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + "write_value, read_value", + [ + (None, {}), + ({}, {}), + ({"a": 1, "b": 2, "c": 3}, {"a": 1, "b": 2, "c": 3}), + ({1: 1, 2: 2, "c": 3}, {"1": 1, "2": 2, "c": 3}), + ( + {"Jan 1st 2020": datetime.strptime("2020-01-01", "%Y-%m-%d")}, + {"Jan 1st 2020": "Wed, 01 Jan 2020 00:00:00 "}, + ), + ( + { + "Nov 30th 2000": time.strptime("30 Nov 00", "%d %b %y"), + "Nov 30th 2001": time.strptime("30 Nov 01", "%d %b %y"), + }, + { + "Nov 30th 2000": "Thu, 30 Nov 2000 00:00:00 ", + "Nov 30th 2001": "Fri, 30 Nov 2001 00:00:00 ", + }, + ), + ( + { + "year": timedelta(days=365), + "hour": timedelta(hours=1), + }, + {"year": "31536000000000000", "hour": "3600000000000"}, + ), + ({"a": True, "b": False, "c": False}, {"a": True, "b": False, "c": False}), + ({1: b"123", 2: b"HEX", 3: b"3"}, {"1": "313233", "2": "484558", "3": "33"}), + ( + {1: long(10), 2: long(-2147483647), 3: long(2147483647)}, + {"1": 10, "2": -2147483647, "3": 2147483647}, + ), + ({1: math.pi, 2: 1.1, 3: 1.2}, {"1": math.pi, "2": 1.1, "3": 1.2}), + ( + {1: Decimal(10.10), 2: Decimal(-5.001), 3: Decimal(-5.5)}, + {"1": 10.1, "2": -5.001, "3": -5.5}, + ), + ( + {1: bytearray(b"abc"), 2: bytearray(b"def")}, + {"1": "616263", "2": "646566"}, + ), + ({1: [1, 2, 3]}, {"1": [1, 2, 3]}), + ({1: {1: 1, 2: 2, 3: 3}}, {"1": {"1": 1, "2": 2, "3": 3}}), + ], + ids=[ + "none_object", + "empty_object", + "object_with_integers", + "object_with_mixed_types", + "object_with_dates", + "object_with_times", + "object_with_timedeltas", + "object_with_booleans", + "object_with_bytes", + "object_with_long_integers", + "object_with_floats", + "object_with_decimals", + "object_with_bytearrays", + "nested_array", + "nested_object", + ], +) +def test_binding_object_without_schema(conn_cnx, write_value, read_value): + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + cursor.execute("CREATE OR REPLACE TABLE TEST_TABLE1 (col1 OBJECT);") + cursor.execute(bind_query, params=(snowflake_object(write_value),)) + + results = cursor.execute("SELECT * FROM TEST_TABLE1").fetchall() + + assert json.loads(results[0][0]) == read_value + + +@pytest.mark.skipolddriver +@pytest.mark.parametrize( + "snowflake_type, write_value, read_value", + [ + ("MAP(NUMBER, NUMBER)", {}, {}), + # TODO: JSON format quotes keys, so even though the keys are integers, the assertion needs to expect strings + ("MAP(NUMBER, NUMBER)", {1: 1, 2: 2, 3: 3}, {"1": 1, "2": 2, "3": 3}), + ("MAP(TEXT, NUMBER)", {"1": 1, "2": 2, "3": 3}, {"1": 1, "2": 2, "3": 3}), + ("MAP(NUMBER, TEXT)", {1: "1", 2: "2", 3: "3"}, {"1": "1", "2": "2", "3": "3"}), + ( + "MAP(TEXT, DATE)", + {"a": datetime.strptime("2020-01-01", "%Y-%m-%d").date()}, + {"a": "2020-01-01"}, + ), + ( + "MAP(TEXT, TIMESTAMP_NTZ)", + {"a": datetime.strptime("2020-01-01", "%Y-%m-%d")}, + {"a": "Wed, 01 Jan 2020 00:00:00 Z"}, + ), + ( + "MAP(TEXT, TIME)", + {"year": timedelta(days=365), "hour": timedelta(hours=1)}, + {"year": "31536000000000000", "hour": "3600000000000"}, + ), + ( + "MAP(TEXT, BOOLEAN)", + {"a": True, "b": False, "c": True}, + {"a": True, "b": False, "c": True}, + ), + ( + "MAP(TEXT, BINARY)", + {"1": b"123", "2": b"HEX", "3": b"3"}, + {"1": "313233", "2": "484558", "3": "33"}, + ), + ( + "MAP(TEXT, NUMBER)", + {"1": long(10), "2": long(-2147483647), "3": long(2147483647)}, + {"1": 10, "2": -2147483647, "3": 2147483647}, + ), + ], + ids=[ + "empty_map", + "map_number_number", + "map_text_number", + "map_number_text", + "map_text_date", + "map_text_timestamp_ntz", + "map_text_timedelta", + "map_text_boolean", + "map_text_binary", + "map_text_long", + ], +) +def test_binding_structured_map(conn_cnx, snowflake_type, write_value, read_value): + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + cursor.execute(f"CREATE OR REPLACE TABLE TEST_TABLE1 (col1 {snowflake_type});") + cursor.execute(bind_query, params=(snowflake_map(write_value),)) + + results = cursor.execute("SELECT * FROM TEST_TABLE1").fetchone() + + print(write_value, read_value) + assert json.loads(results[0]) == read_value + + +@pytest.mark.skipolddriver +def test_structured_array_binding_timestamp(conn_cnx): + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + cursor.execute( + "CREATE OR REPLACE TABLE TEST_TABLE1 (col1 ARRAY(TIMESTAMP_LTZ))" + ) + cursor.execute( + bind_query, + params=( + snowflake_array( + [datetime.strptime("2020-01-01", "%Y-%m-%d")], + ), + ), + ) + result = cursor.execute("SELECT col1 FROM TEST_TABLE1").fetchone() + + assert json.loads(result[0]) == ["Wed, 01 Jan 2020 00:00:00 Z"] + + +@pytest.mark.skipolddriver +def test_structured_array_binding(conn_cnx): + bind_query = "INSERT INTO TEST_TABLE1 SELECT (?)" + with conn_cnx(paramstyle="qmark") as cnx, cnx.cursor() as cursor: + cursor.execute("CREATE OR REPLACE TABLE TEST_TABLE1 (col1 ARRAY(BINARY))") + cursor.execute( + bind_query, + params=( + snowflake_array( + [bytearray(b"abc"), b"1", b"2"], + ), + ), + ) + results = cursor.execute("SELECT * FROM TEST_TABLE1").fetchall() + + assert json.loads(results[0][0]) == ["616263", "31", "32"]