diff --git a/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs index b0e555185..c6952f84a 100755 --- a/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/SFDbDataReaderIT.cs @@ -20,14 +20,14 @@ namespace Snowflake.Data.Tests.IntegrationTests class SFDbDataReaderIT : SFBaseTest { protected override string TestName => base.TestName + _resultFormat; - + private readonly ResultFormat _resultFormat; - + public SFDbDataReaderIT(ResultFormat resultFormat) { _resultFormat = resultFormat; } - + private void ValidateResultFormat(IDataReader reader) { Assert.AreEqual(_resultFormat, ((SnowflakeDbDataReader)reader).ResultFormat); @@ -39,7 +39,7 @@ public void TestRecordsAffected() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola NUMBER"}); - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (1),(1),(1)"; @@ -67,7 +67,7 @@ public void TestGetNumber() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola NUMBER"}); - + IDbCommand cmd = conn.CreateCommand(); int numInt = 10000; @@ -114,7 +114,7 @@ public void TestGetNumber() Assert.IsFalse(reader.Read()); reader.Close(); - + CloseConnection(conn); } @@ -152,9 +152,9 @@ public void TestGetDouble() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(numFloat, reader.GetFloat(0)); Assert.AreEqual((decimal)numFloat, reader.GetDecimal(0)); @@ -235,7 +235,7 @@ public void TestGetTime(string inputTimeStr, int? precision) [TestCase("11:22:33.4455667")] [TestCase("23:59:59.9999999")] [TestCase("16:20:00.6666666")] - [TestCase("00:00:00.0000000")] + [TestCase("00:00:00.0000000")] [TestCase("00:00:00")] [TestCase("23:59:59.1")] [TestCase("23:59:59.12")] @@ -284,7 +284,7 @@ public void TestGetTimeSpan(string inputTimeStr) Assert.AreEqual(dateTimeTime.Minute, timeSpanTime.Minutes); Assert.AreEqual(dateTimeTime.Second, timeSpanTime.Seconds); Assert.AreEqual(dateTimeTime.Millisecond, timeSpanTime.Milliseconds); - + CloseConnection(conn); } } @@ -336,7 +336,7 @@ public void TestGetTimeSpanError() IDataReader reader = cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // All types except TIME fail conversion when calling GetTimeSpan @@ -344,19 +344,19 @@ public void TestGetTimeSpanError() { try { - + ((SnowflakeDbDataReader)reader).GetTimeSpan(i); Assert.Fail("Data should not be converted to TIME"); } catch (SnowflakeDbException e) { - Assert.AreEqual(270003, e.ErrorCode); + Assert.AreEqual(270003, e.ErrorCode); } } // Null value // Null value can not be converted to TimeSpan because it is a non-nullable type - + try { ((SnowflakeDbDataReader)reader).GetTimeSpan(12); @@ -371,7 +371,7 @@ public void TestGetTimeSpanError() TimeSpan timeSpanTime = ((SnowflakeDbDataReader)reader).GetTimeSpan(13); reader.Close(); - + CloseConnection(conn); } } @@ -425,9 +425,9 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // For time, we getDateTime on the column and ignore date part @@ -435,7 +435,7 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp if (dataType == SFDataType.DATE) { - Assert.AreEqual(inputTime.Date, reader.GetDateTime(0)); + Assert.AreEqual(inputTime.Date, actualTime); Assert.AreEqual(inputTime.Date.ToString("yyyy-MM-dd"), reader.GetString(0)); } if (dataType != SFDataType.DATE) @@ -449,14 +449,17 @@ private void TestGetDateAndOrTime(string inputTimeStr, int? precision, SFDataTyp { if (precision == 9) { - Assert.AreEqual(inputTime, reader.GetDateTime(0)); + Assert.AreEqual(inputTime, actualTime); } else { - Assert.AreEqual(inputTime.Date, reader.GetDateTime(0).Date); + Assert.AreEqual(inputTime.Date, actualTime.Date); } } + // DATE, TIME and TIMESTAMP_NTZ should be returned with DateTimeKind.Unspecified + Assert.AreEqual(DateTimeKind.Unspecified, actualTime.Kind); + reader.Close(); CloseConnection(conn); @@ -495,9 +498,9 @@ public void TestGetTimestampTZ(int timezoneOffsetInHours) using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola TIMESTAMP_TZ"}); - + DateTimeOffset now = DateTimeOffset.Now.ToOffset(TimeSpan.FromHours(timezoneOffsetInHours)); - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (?)"; @@ -514,9 +517,9 @@ public void TestGetTimestampTZ(int timezoneOffsetInHours) cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); DateTimeOffset dtOffset = (DateTimeOffset)reader.GetValue(0); reader.Close(); @@ -535,9 +538,9 @@ public void TestGetTimestampLTZ() using (var conn = CreateAndOpenConnection()) { CreateOrReplaceTable(conn, TableName, new []{"cola TIMESTAMP_LTZ"}); - + DateTimeOffset now = DateTimeOffset.Now; - + IDbCommand cmd = conn.CreateCommand(); string insertCommand = $"insert into {TableName} values (?)"; @@ -555,9 +558,9 @@ public void TestGetTimestampLTZ() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); DateTimeOffset dtOffset = (DateTimeOffset)reader.GetValue(0); reader.Close(); @@ -592,9 +595,9 @@ public void TestGetBoolean([Values]bool value) cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(value, reader.GetBoolean(0)); reader.Close(); @@ -655,18 +658,18 @@ public void TestGetBinary() "col2 VARCHAR(50)", "col3 DOUBLE" }); - + byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARAY"); string testChars = "TEST_GET_CHARS"; double testDouble = 1.2345678; string insertCommand = $"insert into {TableName} values (?, '{testChars}',{testDouble.ToString()})"; IDbCommand cmd = conn.CreateCommand(); cmd.CommandText = insertCommand; - + var p1 = cmd.CreateParameter(); p1.ParameterName = "1"; p1.DbType = DbType.Binary; - p1.Value = testBytes; + p1.Value = testBytes; cmd.Parameters.Add(p1); var count = cmd.ExecuteNonQuery(); @@ -674,9 +677,9 @@ public void TestGetBinary() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion Assert.IsTrue(testBytes.SequenceEqual((byte[])reader.GetValue(0))); @@ -714,7 +717,7 @@ public void TestGetBinary() Assert.AreEqual(read, toReadLength); Assert.IsTrue(testSubBytes.SequenceEqual(sub)); - // Read subset 'GET_BINARAY' from actual 'TEST_GET_BINARAY' data + // Read subset 'GET_BINARAY' from actual 'TEST_GET_BINARAY' data // and copy inside existing buffer replacing Xs toReadLength = 11; byte[] testSubBytesWithTargetOffset = Encoding.UTF8.GetBytes("OFFSET GET_BINARAY EXTRA"); @@ -731,7 +734,7 @@ public void TestGetBinary() //** Invalid data offsets **/ try { - // Data offset > data length + // Data offset > data length reader.GetBytes(0, 25, sub, 7, toReadLength); Assert.Fail(); } @@ -754,7 +757,7 @@ public void TestGetBinary() //** Invalid buffer offsets **// try { - // Buffer offset > buffer length + // Buffer offset > buffer length reader.GetBytes(0, 6, sub, 25, toReadLength); Assert.Fail(); } @@ -775,7 +778,7 @@ public void TestGetBinary() } //** Null buffer **// - // If null, this method returns the size required of the array in order to fit all + // If null, this method returns the size required of the array in order to fit all // of the specified data. read = reader.GetBytes(0, 6, null, 0, toReadLength); Assert.AreEqual(testBytes.Length, read); @@ -828,7 +831,7 @@ public void TestGetChars() "col2 BINARY", "col3 DOUBLE" }); - + string testChars = "TEST_GET_CHARS"; byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARY"); double testDouble = 1.2345678; @@ -849,7 +852,7 @@ public void TestGetChars() IDataReader reader = cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion Assert.IsTrue(testChars.Equals(reader.GetValue(0))); @@ -889,7 +892,7 @@ public void TestGetChars() Assert.IsTrue(testSubChars.SequenceEqual(sub)); - // Read subset 'GET_CHARS' from actual 'TEST_GET_CHARS' data + // Read subset 'GET_CHARS' from actual 'TEST_GET_CHARS' data // and copy inside existing buffer replacing Xs char[] testSubCharsWithTargetOffset = "OFFSET GET_CHARS EXTRA".ToArray(); toReadLength = 9; @@ -906,7 +909,7 @@ public void TestGetChars() //** Invalid data offsets **// try { - // Data offset > data length + // Data offset > data length reader.GetChars(0, 25, sub, 7, toReadLength); Assert.Fail(); } @@ -929,7 +932,7 @@ public void TestGetChars() //** Invalid buffer offsets **// try { - // Buffer offset > buffer length + // Buffer offset > buffer length reader.GetChars(0, 6, sub, 25, toReadLength); Assert.Fail(); } @@ -950,7 +953,7 @@ public void TestGetChars() } //** Null buffer **// - // If null, this method returns the size required of the array in order to fit all + // If null, this method returns the size required of the array in order to fit all // of the specified data. read = reader.GetChars(0, 6, null, 0, toReadLength); Assert.AreEqual(testChars.Length, read); @@ -1016,7 +1019,7 @@ public void TestGetStream() "col2 BINARY", "col3 DOUBLE" }); - + string testChars = "TEST_GET_CHARS"; byte[] testBytes = Encoding.UTF8.GetBytes("TEST_GET_BINARY"); double testDouble = 1.2345678; @@ -1037,7 +1040,7 @@ public void TestGetStream() DbDataReader reader = (DbDataReader) cmd.ExecuteReader(); ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); // Auto type conversion @@ -1087,9 +1090,9 @@ public void TestGetValueIndexOutOfBound() IDbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); try @@ -1128,7 +1131,7 @@ public void TestBasicDataReader() using (IDataReader reader = cmd.ExecuteReader()) { ValidateResultFormat(reader); - + Assert.AreEqual(2, reader.FieldCount); Assert.AreEqual(0, reader.Depth); Assert.IsTrue(((SnowflakeDbDataReader)reader).HasRows); @@ -1151,7 +1154,7 @@ public void TestBasicDataReader() reader.Close(); Assert.IsTrue(reader.IsClosed); - + try { reader.Read(); @@ -1199,7 +1202,7 @@ public void TestReadOutNullVal() using (IDataReader reader = cmd.ExecuteReader()) { ValidateResultFormat(reader); - + reader.Read(); object nullVal = reader.GetValue(0); Assert.AreEqual(DBNull.Value, nullVal); @@ -1211,7 +1214,7 @@ public void TestReadOutNullVal() } CloseConnection(conn); - } + } } [Test] @@ -1238,9 +1241,9 @@ public void TestGetGuid() cmd.CommandText = $"select * from {TableName}"; IDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); - + Assert.IsTrue(reader.Read()); Assert.AreEqual(val, reader.GetGuid(0)); @@ -1302,7 +1305,7 @@ public void TestCopyCmdResultSet() cmd.CommandText = $"create or replace stage {stageName}"; cmd.ExecuteNonQuery(); - cmd.CommandText = $"copy into {TableName} from @{stageName}"; + cmd.CommandText = $"copy into {TableName} from @{stageName}"; using (var rdr = cmd.ExecuteReader()) { // Can read the first row @@ -1433,7 +1436,7 @@ public void TestResultSetMetadata() CloseConnection(conn); } } - + [Test] public void TestHasRows() { @@ -1441,9 +1444,9 @@ public void TestHasRows() { DbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1 where 1=2"; - + DbDataReader reader = cmd.ExecuteReader(); - + ValidateResultFormat(reader); Assert.IsFalse(reader.HasRows); @@ -1451,7 +1454,7 @@ public void TestHasRows() CloseConnection(conn); } } - + [Test] public void TestHasRowsMultiStatement() { @@ -1460,15 +1463,15 @@ public void TestHasRowsMultiStatement() DbCommand cmd = conn.CreateCommand(); cmd.CommandText = "select 1;" + "select 1 where 1=2;" + - "select 1;" + + "select 1;" + "select 1 where 1=2;"; - + DbParameter param = cmd.CreateParameter(); param.ParameterName = "MULTI_STATEMENT_COUNT"; param.DbType = DbType.Int16; param.Value = 4; cmd.Parameters.Add(param); - + DbDataReader reader = cmd.ExecuteReader(); // multi statements are always returned in JSON @@ -1483,7 +1486,7 @@ public void TestHasRowsMultiStatement() // select 1 where 1=2 Assert.IsFalse(reader.HasRows); reader.NextResult(); - + // select 1 Assert.IsTrue(reader.HasRows); reader.Read(); @@ -1494,12 +1497,12 @@ public void TestHasRowsMultiStatement() Assert.IsFalse(reader.HasRows); reader.NextResult(); Assert.IsFalse(reader.HasRows); - + reader.Close(); CloseConnection(conn); } } - + [Test] [TestCase("99")] // Int8 [TestCase("9.9")] // Int8 + scale @@ -1564,23 +1567,23 @@ public void TestTimestampTz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_TZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTimeOffset.Parse(testValue); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } - + [Test] [TestCase("2019-01-01 12:12:12.1234567 +0500", 7)] [TestCase("2019-01-01 12:12:12.1234567 +1400", 7)] @@ -1591,23 +1594,23 @@ public void TestTimestampLtz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_LTZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTimeOffset.Parse(testValue).ToLocalTime(); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } - + [Test] [TestCase("2019-01-01 12:12:12.1234567", 7)] [TestCase("0001-01-01 00:00:00.0000000", 9)] @@ -1617,19 +1620,19 @@ public void TestTimestampNtz(string testValue, int scale) using (var conn = CreateAndOpenConnection()) { DbCommand cmd = conn.CreateCommand(); - + cmd.CommandText = $"select '{testValue}'::TIMESTAMP_NTZ({scale})"; using (SnowflakeDbDataReader reader = (SnowflakeDbDataReader)cmd.ExecuteReader()) { ValidateResultFormat(reader); reader.Read(); - + var expectedValue = DateTime.Parse(testValue); Assert.AreEqual(expectedValue, reader.GetValue(0)); } - + CloseConnection(conn); } } diff --git a/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs b/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs index 6f88126d9..784aa4132 100644 --- a/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs +++ b/Snowflake.Data.Tests/IntegrationTests/StructuredTypesWithEmbeddedUnstructuredIT.cs @@ -312,13 +312,55 @@ public void TestSelectDateTime(string dbValue, string dbType, DateTime? expected internal static IEnumerable DateTimeConversionCases() { - yield return new object[] { "2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime() }; - yield return new object[] { "2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTime.Parse("2024-07-11 09:20:05").ToUniversalTime() }; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTime.Parse("2024-07-11 21:20:05").ToUniversalTime() }; - yield return new object[] { "2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11").ToUniversalTime(), DateTime.Parse("2024-07-11").ToUniversalTime() }; - yield return new object[] { "2024-07-11 14:20:05.123456789", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.1234567").ToUniversalTime(), DateTime.Parse("2024-07-11 14:20:05.1234568").ToUniversalTime()}; - yield return new object[] { "2024-07-11 14:20:05.123456789 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTime.Parse("2024-07-11 09:20:05.1234568").ToUniversalTime() }; - yield return new object[] {"2024-07-11 14:20:05.123456789 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTime.Parse("2024-07-11 21:20:05.1234568").ToUniversalTime() }; + yield return new object[] + { + "2024-07-11 14:20:05", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05"), + DateTime.Parse("2024-07-11 14:20:05") // kind -> Unspecified + }; + yield return new object[] + { + "2024-07-11 14:20:05 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05"), DateTimeKind.Utc) + }; + yield return new object[] + { + "2024-07-11 14:20:05 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTime.Parse("2024-07-11 21:20:05").ToLocalTime() + }; + yield return new object[] + { + "2024-07-11", + SFDataType.DATE.ToString(), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified) + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05.1234567"), + DateTime.Parse("2024-07-11 14:20:05.1234568") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05.1234568"), DateTimeKind.Utc) + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTime.Parse("2024-07-11 21:20:05.1234568").ToLocalTime() + }; } [Test] @@ -354,13 +396,55 @@ public void TestSelectDateTimeOffset(string dbValue, string dbType, DateTime? ex internal static IEnumerable DateTimeOffsetConversionCases() { - yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11 14:20:05Z")}; - yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00")}; - yield return new object[] {"2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11Z")}; - yield return new object[] {"2024-07-11 14:20:05.123456789", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.1234567").ToUniversalTime(), DateTimeOffset.Parse("2024-07-11 14:20:05.1234568Z")}; - yield return new object[] {"2024-07-11 14:20:05.123456789 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456789 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), null, DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 -7:00")}; + yield return new object[] + { + "2024-07-11 14:20:05", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05"), + DateTimeOffset.Parse("2024-07-11 14:20:05Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00") + }; + yield return new object[] + { + "2024-07-11 14:20:05 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00").ToLocalTime() + }; + yield return new object[] + { + "2024-07-11", + SFDataType.DATE.ToString(), + DateTime.SpecifyKind(DateTime.Parse("2024-07-11"), DateTimeKind.Unspecified), + DateTimeOffset.Parse("2024-07-11Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789", + SFDataType.TIMESTAMP_NTZ.ToString(), + DateTime.Parse("2024-07-11 14:20:05.1234567"), + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568Z") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 +5:00", + SFDataType.TIMESTAMP_TZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 +5:00") + }; + yield return new object[] + { + "2024-07-11 14:20:05.123456789 -7:00", + SFDataType.TIMESTAMP_LTZ.ToString(), + null, + DateTimeOffset.Parse("2024-07-11 14:20:05.1234568 -7:00") + }; } private TimeZoneInfo GetTimeZone(SnowflakeDbConnection connection) diff --git a/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs b/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs index a10b4660c..0a91fdab5 100644 --- a/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs +++ b/Snowflake.Data.Tests/UnitTests/StructuredTypesTest.cs @@ -23,24 +23,29 @@ public void TestTimeConversions(string value, string sfTypeString, object expect // assert Assert.AreEqual(expected, result); + + if (csharpType == typeof(DateTime)) + { + Assert.AreEqual(((DateTime)expected).Kind,((DateTime)result).Kind); + } } internal static IEnumerable TimeConversionCases() { - yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05")}; yield return new object[] {"2024-07-11 14:20:05", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05Z")}; yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.Parse("2024-07-11 09:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05"), DateTimeKind.Utc)}; yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05 -7:00")}; - yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05").ToLocalTime()}; yield return new object[] {"14:20:05", SFDataType.TIME.ToString(), TimeSpan.Parse("14:20:05")}; yield return new object[] {"2024-07-11", SFDataType.DATE.ToString(), DateTime.Parse("2024-07-11")}; - yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTime.Parse("2024-07-11 14:20:05.123456")}; yield return new object[] {"2024-07-11 14:20:05.123456", SFDataType.TIMESTAMP_NTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456Z")}; yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456 +5:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.Parse("2024-07-11 09:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456 +5:00", SFDataType.TIMESTAMP_TZ.ToString(), DateTime.SpecifyKind(DateTime.Parse("2024-07-11 09:20:05.123456"), DateTimeKind.Utc)}; yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTimeOffset.Parse("2024-07-11 14:20:05.123456 -7:00")}; - yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05.123456").ToUniversalTime()}; + yield return new object[] {"2024-07-11 14:20:05.123456 -7:00", SFDataType.TIMESTAMP_LTZ.ToString(), DateTime.Parse("2024-07-11 21:20:05.123456").ToLocalTime()}; yield return new object[] {"14:20:05.123456", SFDataType.TIME.ToString(), TimeSpan.Parse("14:20:05.123456")}; } } diff --git a/Snowflake.Data/Core/ArrowResultChunk.cs b/Snowflake.Data/Core/ArrowResultChunk.cs index 1616ec42a..85e5de62c 100755 --- a/Snowflake.Data/Core/ArrowResultChunk.cs +++ b/Snowflake.Data/Core/ArrowResultChunk.cs @@ -14,16 +14,16 @@ internal class ArrowResultChunk : BaseResultChunk internal override ResultFormat ResultFormat => ResultFormat.ARROW; private static readonly DateTimeOffset s_epochDate = SFDataConverter.UnixEpoch; - - private static readonly long[] s_powersOf10 = { - 1, - 10, - 100, - 1000, - 10000, - 100000, - 1000000, - 10000000, + + private static readonly long[] s_powersOf10 = { + 1, + 10, + 100, + 1000, + 10000, + 100000, + 1000000, + 10000000, 100000000, 1000000000 }; @@ -62,7 +62,7 @@ public ArrowResultChunk(RecordBatch recordBatch) RowCount = recordBatch.Length; ColumnCount = recordBatch.ColumnCount; ChunkIndex = -1; - + ResetTempTables(); } @@ -81,11 +81,11 @@ public void AddRecordBatch(RecordBatch recordBatch) { RecordBatch.Add(recordBatch); } - + internal override void Reset(ExecResponseChunk chunkInfo, int chunkIndex) { base.Reset(chunkInfo, chunkIndex); - + _currentBatchIndex = 0; _currentRecordIndex = -1; RecordBatch.Clear(); @@ -97,7 +97,7 @@ internal override bool Next() { if (_currentBatchIndex >= RecordBatch.Count) return false; - + _currentRecordIndex += 1; if (_currentRecordIndex < RecordBatch[_currentBatchIndex].Length) return true; @@ -149,7 +149,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (column.IsNull(_currentRecordIndex)) return DBNull.Value; - + switch (srcType) { case SFDataType.FIXED: @@ -170,7 +170,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (scale == 0) return _short[columnIndex][_currentRecordIndex]; return _short[columnIndex][_currentRecordIndex] / (decimal)s_powersOf10[scale]; - + case Int32Array array: if (_int[columnIndex] == null) _int[columnIndex] = array.Values.ToArray(); @@ -184,7 +184,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) if (scale == 0) return _long[columnIndex][_currentRecordIndex]; return _long[columnIndex][_currentRecordIndex] / (decimal)s_powersOf10[scale]; - + case Decimal128Array array: return array.GetValue(_currentRecordIndex); } @@ -210,8 +210,8 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) _int[columnIndex] = ((StringArray)column).ValueOffsets.ToArray(); } return StringArray.DefaultEncoding.GetString( - _byte[columnIndex], - _int[columnIndex][_currentRecordIndex], + _byte[columnIndex], + _int[columnIndex][_currentRecordIndex], _int[columnIndex][_currentRecordIndex + 1] - _int[columnIndex][_currentRecordIndex]); case SFDataType.VECTOR: @@ -250,16 +250,16 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) case SFDataType.BINARY: return ((BinaryArray)column).GetBytes(_currentRecordIndex).ToArray(); - + case SFDataType.DATE: if (_int[columnIndex] == null) _int[columnIndex] = ((Date32Array)column).Values.ToArray(); - return SFDataConverter.UnixEpoch.AddTicks(_int[columnIndex][_currentRecordIndex] * TicksPerDay); - + return DateTime.SpecifyKind(SFDataConverter.UnixEpoch.AddTicks(_int[columnIndex][_currentRecordIndex] * TicksPerDay), DateTimeKind.Unspecified); + case SFDataType.TIME: { long value; - + if (column.GetType() == typeof(Int32Array)) { if (_int[columnIndex] == null) @@ -278,7 +278,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) value = _long[columnIndex][_currentRecordIndex]; } - + if (scale == 0) return DateTimeOffset.FromUnixTimeSeconds(value).DateTime; if (scale <= 3) @@ -292,7 +292,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) var structCol = (StructArray)column; if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)structCol.Fields[0]).Values.ToArray(); - + if (structCol.Fields.Count == 2) { if (_int[columnIndex] == null) @@ -309,7 +309,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) _fraction[columnIndex] = ((Int32Array)structCol.Fields[1]).Values.ToArray(); if (_int[columnIndex] == null) _int[columnIndex] = ((Int32Array)structCol.Fields[2]).Values.ToArray(); - + var epoch = _long[columnIndex][_currentRecordIndex]; var fraction = _fraction[columnIndex][_currentRecordIndex]; var timezone = _int[columnIndex][_currentRecordIndex]; @@ -331,7 +331,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) { if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)column).Values.ToArray(); - + var value = _long[columnIndex][_currentRecordIndex]; var epoch = ExtractEpoch(value, scale); var fraction = ExtractFraction(value, scale); @@ -353,7 +353,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) { if (_long[columnIndex] == null) _long[columnIndex] = ((Int64Array)column).Values.ToArray(); - + var value = _long[columnIndex][_currentRecordIndex]; var epoch = ExtractEpoch(value, scale); var fraction = ExtractFraction(value, scale); @@ -362,7 +362,7 @@ public object ExtractCell(int columnIndex, SFDataType srcType, long scale) } throw new NotSupportedException($"Type {srcType} is not supported."); } - + private long ExtractEpoch(long value, long scale) { return value / s_powersOf10[scale]; diff --git a/Snowflake.Data/Core/ArrowResultSet.cs b/Snowflake.Data/Core/ArrowResultSet.cs index 56a636c4e..a3a6e2628 100755 --- a/Snowflake.Data/Core/ArrowResultSet.cs +++ b/Snowflake.Data/Core/ArrowResultSet.cs @@ -18,7 +18,7 @@ class ArrowResultSet : SFBaseResultSet internal override ResultFormat ResultFormat => ResultFormat.ARROW; private static readonly SFLogger s_logger = SFLoggerFactory.GetLogger(); - + private readonly int _totalChunkCount; private BaseResultChunk _currentChunk; private readonly IChunkDownloader _chunkDownloader; @@ -44,7 +44,7 @@ public ArrowResultSet(QueryExecResponseData responseData, SFStatement sfStatemen isClosed = false; queryId = responseData.queryId; - + ReadChunk(responseData); } catch(Exception ex) @@ -95,21 +95,21 @@ internal override async Task NextAsync() return false; } - + internal override bool Next() { ThrowIfClosed(); if (_currentChunk.Next()) return true; - + if (_totalChunkCount > 0) { s_logger.Debug($"Get next chunk from chunk downloader, chunk: {_currentChunk.ChunkIndex + 1}/{_totalChunkCount}" + $" rows: {_currentChunk.RowCount}, size compressed: {_currentChunk.CompressedSize}," + $" size uncompressed: {_currentChunk.UncompressedSize}"); _currentChunk = Task.Run(async() => await (_chunkDownloader.GetNextChunkAsync()).ConfigureAwait(false)).Result; - + return _currentChunk?.Next() ?? false; } @@ -154,21 +154,21 @@ internal override bool Rewind() return false; } - + private object GetObjectInternal(int ordinal) { ThrowIfClosed(); ThrowIfOutOfBounds(ordinal); - + var type = sfResultSetMetaData.GetTypesByIndex(ordinal).Item1; var scale = sfResultSetMetaData.GetScaleByIndex(ordinal); - + var value = ((ArrowResultChunk)_currentChunk).ExtractCell(ordinal, type, (int)scale); return value ?? DBNull.Value; - + } - + internal override object GetValue(int ordinal) { var value = GetObjectInternal(ordinal); @@ -176,7 +176,7 @@ internal override object GetValue(int ordinal) { return value; } - + object obj; checked { @@ -196,6 +196,10 @@ internal override object GetValue(int ordinal) break; case bool ret: obj = ret; break; + case DateTime ret: obj = ret; + break; + case DateTimeOffset ret: obj = ret; + break; default: { var dstType = sfResultSetMetaData.GetCSharpTypeByIndex(ordinal); @@ -217,7 +221,7 @@ internal override bool GetBoolean(int ordinal) { return (bool)GetObjectInternal(ordinal); } - + internal override byte GetByte(int ordinal) { var value = GetObjectInternal(ordinal); @@ -244,7 +248,7 @@ internal override char GetChar(int ordinal) { return ((string)GetObjectInternal(ordinal))[0]; } - + internal override long GetChars(int ordinal, long dataOffset, char[] buffer, int bufferOffset, int length) { return ReadSubset(ordinal, dataOffset, buffer, bufferOffset, length); @@ -303,7 +307,7 @@ internal override double GetDouble(int ordinal) case int ret: return ret; case short ret: return ret; case sbyte ret: return ret; - default: return (double)value; + default: return (double)value; } } @@ -374,7 +378,7 @@ internal override long GetInt64(int ordinal) } } } - + internal override string GetString(int ordinal) { var value = GetObjectInternal(ordinal); @@ -394,14 +398,14 @@ internal override string GetString(int ordinal) return Convert.ToString(value); } - + private void UpdateSessionStatus(QueryExecResponseData responseData) { SFSession session = this.sfStatement.SfSession; session.UpdateSessionProperties(responseData); session.UpdateSessionParameterMap(responseData.parameters); } - + private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferOffset, int length) where T : struct { if (dataOffset < 0) @@ -417,7 +421,7 @@ private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferO if (buffer != null && bufferOffset > buffer.Length) { throw new System.ArgumentException( - "Destination buffer is not long enough. Check the buffer offset, length, and the buffer's lower bounds.", + "Destination buffer is not long enough. Check the buffer offset, length, and the buffer's lower bounds.", nameof(buffer)); } @@ -446,14 +450,14 @@ private long ReadSubset(int ordinal, long dataOffset, T[] buffer, int bufferO "Source data is not long enough. Check the data offset, length, and the data's lower bounds.", nameof(dataOffset)); } - + long dataLength = data.Length - dataOffset; long elementsRead = Math.Min(length, dataLength); Array.Copy(data, dataOffset, buffer, bufferOffset, elementsRead); return elementsRead; - + } - + } } diff --git a/Snowflake.Data/Core/Converter/TimeConverter.cs b/Snowflake.Data/Core/Converter/TimeConverter.cs index 3f1252762..7a95de580 100644 --- a/Snowflake.Data/Core/Converter/TimeConverter.cs +++ b/Snowflake.Data/Core/Converter/TimeConverter.cs @@ -12,15 +12,15 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (timestampType == SFDataType.TIMESTAMP_NTZ) { - var dateTimeUtc = DateTime.Parse(value).ToUniversalTime(); + var dateTimeNoTz = DateTime.Parse(value); if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeUtc; + return dateTimeNoTz; } if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return (DateTimeOffset) dateTimeUtc; + return (DateTimeOffset) DateTime.SpecifyKind(dateTimeNoTz, DateTimeKind.Utc); } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_NTZ into {fieldType} type"); @@ -35,21 +35,21 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeOffset.ToUniversalTime().DateTime.ToUniversalTime(); + return dateTimeOffset.UtcDateTime; } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_TZ into {fieldType} type"); } if (timestampType == SFDataType.TIMESTAMP_LTZ) { - var dateTimeOffset = DateTimeOffset.Parse(value); + var dateTimeOffsetLocal = DateTimeOffset.Parse(value).ToLocalTime(); if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return dateTimeOffset; + return dateTimeOffsetLocal; } if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return dateTimeOffset.UtcDateTime; + return dateTimeOffsetLocal.LocalDateTime; } throw new StructuredTypesReadingException($"Cannot read TIMESTAMP_LTZ into {fieldType} type"); } @@ -63,13 +63,14 @@ public object Convert(string value, SFDataType timestampType, Type fieldType) } if (timestampType == SFDataType.DATE) { - if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) + var dateTime = DateTime.Parse(value); + if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) { - return DateTimeOffset.Parse(value).ToUniversalTime(); + return dateTime; } - if (fieldType == typeof(DateTime) || fieldType == typeof(DateTime?)) + if (fieldType == typeof(DateTimeOffset) || fieldType == typeof(DateTimeOffset?)) { - return DateTime.Parse(value).ToUniversalTime(); + return (DateTimeOffset) DateTime.SpecifyKind(dateTime, DateTimeKind.Utc); } throw new StructuredTypesReadingException($"Cannot not read DATE into {fieldType} type"); } diff --git a/Snowflake.Data/Core/SFDataConverter.cs b/Snowflake.Data/Core/SFDataConverter.cs index a415e5058..90e956314 100755 --- a/Snowflake.Data/Core/SFDataConverter.cs +++ b/Snowflake.Data/Core/SFDataConverter.cs @@ -152,12 +152,12 @@ private static DateTime ConvertToDateTime(UTF8Buffer srcVal, SFDataType srcType) { case SFDataType.DATE: long srcValLong = FastParser.FastParseInt64(srcVal.Buffer, srcVal.offset, srcVal.length); - return UnixEpoch.AddDays(srcValLong); + return DateTime.SpecifyKind(UnixEpoch.AddDays(srcValLong), DateTimeKind.Unspecified);; case SFDataType.TIME: case SFDataType.TIMESTAMP_NTZ: var tickDiff = GetTicksFromSecondAndNanosecond(srcVal); - return UnixEpoch.AddTicks(tickDiff); + return DateTime.SpecifyKind(UnixEpoch.AddTicks(tickDiff), DateTimeKind.Unspecified); default: throw new SnowflakeDbException(SFError.INVALID_DATA_CONVERSION, srcVal, srcType, typeof(DateTime));