44
55import collections
66import datetime
7+ import decimal
78import operator
89
910from google .cloud .bigquery import schema
@@ -46,6 +47,29 @@ def test_dataframe_to_bigquery_fields_w_named_index(module_under_test):
4647 ),
4748 ],
4849 ),
50+ # Need to fallback to Arrow to avoid data loss and disambiguate
51+ # NUMERIC from BIGNUMERIC. We don't want to pick too small of a
52+ # type and lose precision. See:
53+ # https://github.com/googleapis/python-bigquery/issues/1650
54+ #
55+ (
56+ "bignumeric_column" ,
57+ [
58+ # Start with a lower precision Decimal to make sure we
59+ # aren't trying to determine the type from just one value.
60+ decimal .Decimal ("1.25" ),
61+ decimal .Decimal ("0.1234567891" ),
62+ ],
63+ ),
64+ (
65+ "numeric_column" ,
66+ [
67+ # Minimum value greater than 0 that can be handled: 1e-9
68+ # https://cloud.google.com/bigquery/docs/reference/standard-sql/data-types#numeric_types
69+ decimal .Decimal ("0.000000001" ),
70+ decimal .Decimal ("-0.000000001" ),
71+ ],
72+ ),
4973 ]
5074 )
5175 dataframe = pandas .DataFrame (df_data ).set_index ("str_index" , drop = True )
@@ -64,6 +88,8 @@ def test_dataframe_to_bigquery_fields_w_named_index(module_under_test):
6488 schema .SchemaField ("boolean_column" , "BOOLEAN" , "NULLABLE" ),
6589 schema .SchemaField ("datetime_column" , "DATETIME" , "NULLABLE" ),
6690 schema .SchemaField ("timestamp_column" , "TIMESTAMP" , "NULLABLE" ),
91+ schema .SchemaField ("bignumeric_column" , "BIGNUMERIC" , "NULLABLE" ),
92+ schema .SchemaField ("numeric_column" , "NUMERIC" , "NULLABLE" ),
6793 )
6894 assert returned_schema == expected_schema
6995
0 commit comments