Skip to content
This repository was archived by the owner on Mar 6, 2026. It is now read-only.

Commit e26d879

Browse files
authored
chore: sync v3 with master (#851)
(no issue) Just a regular update of the `v3` branch.
1 parent 9319eb1 commit e26d879

File tree

11 files changed

+95
-30
lines changed

11 files changed

+95
-30
lines changed

.github/CODEOWNERS

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,4 +8,4 @@
88
* @googleapis/api-bigquery @googleapis/yoshi-python
99

1010
# The python-samples-reviewers team is the default owner for samples changes
11-
/samples/ @googleapis/python-samples-owners
11+
/samples/ @googleapis/api-bigquery @googleapis/python-samples-owners

.github/sync-repo-settings.yaml

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,20 @@
33
branchProtectionRules:
44
# Identifies the protection rule pattern. Name of the branch to be protected.
55
# Defaults to `master`
6-
- pattern: '{master,v3}'
6+
- pattern: master
7+
requiresCodeOwnerReviews: true
8+
requiresStrictStatusChecks: true
9+
requiredStatusCheckContexts:
10+
- 'Kokoro'
11+
- 'Kokoro snippets-3.8'
12+
- 'cla/google'
13+
- 'Samples - Lint'
14+
- 'Samples - Python 3.6'
15+
- 'Samples - Python 3.7'
16+
- 'Samples - Python 3.8'
17+
- pattern: v3
18+
requiresCodeOwnerReviews: true
19+
requiresStrictStatusChecks: true
720
requiredStatusCheckContexts:
821
- 'Kokoro'
922
- 'Kokoro snippets-3.8'

CHANGELOG.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,20 @@
55
[1]: https://pypi.org/project/google-cloud-bigquery/#history
66

77

8+
### [2.23.2](https://www.github.com/googleapis/python-bigquery/compare/v2.23.1...v2.23.2) (2021-07-29)
9+
10+
11+
### Dependencies
12+
13+
* expand pyarrow pins to support 5.x releases ([#833](https://www.github.com/googleapis/python-bigquery/issues/833)) ([80e3a61](https://www.github.com/googleapis/python-bigquery/commit/80e3a61c60419fb19b70b664c6415cd01ba82f5b))
14+
15+
### [2.23.1](https://www.github.com/googleapis/python-bigquery/compare/v2.23.0...v2.23.1) (2021-07-28)
16+
17+
18+
### Bug Fixes
19+
20+
* `insert_rows()` accepts float column values as strings again ([#824](https://www.github.com/googleapis/python-bigquery/issues/824)) ([d9378af](https://www.github.com/googleapis/python-bigquery/commit/d9378af13add879118a1d004529b811f72c325d6))
21+
822
## [2.23.0](https://www.github.com/googleapis/python-bigquery/compare/v2.22.1...v2.23.0) (2021-07-27)
923

1024

google/cloud/bigquery/_helpers.py

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
import decimal
2020
import math
2121
import re
22+
from typing import Union
2223

2324
from google.cloud._helpers import UTC
2425
from google.cloud._helpers import _date_from_iso8601_date
@@ -312,14 +313,15 @@ def _int_to_json(value):
312313
return value
313314

314315

315-
def _float_to_json(value):
316+
def _float_to_json(value) -> Union[None, str, float]:
316317
"""Coerce 'value' to an JSON-compatible representation."""
317318
if value is None:
318319
return None
319-
elif math.isnan(value) or math.isinf(value):
320-
return str(value)
321-
else:
322-
return float(value)
320+
321+
if isinstance(value, str):
322+
value = float(value)
323+
324+
return str(value) if (math.isnan(value) or math.isinf(value)) else float(value)
323325

324326

325327
def _decimal_to_json(value):

google/cloud/bigquery/version.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,4 +12,4 @@
1212
# See the License for the specific language governing permissions and
1313
# limitations under the License.
1414

15-
__version__ = "2.23.0"
15+
__version__ = "2.23.2"

samples/geography/requirements.txt

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
geojson==2.5.0
2-
google-cloud-bigquery==2.22.1
3-
google-cloud-bigquery-storage==2.6.0
2+
google-cloud-bigquery==2.23.2
3+
google-cloud-bigquery-storage==2.6.2
44
Shapely==1.7.1
Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,3 @@
1-
google-cloud-testutils==0.3.0
1+
google-cloud-testutils==1.0.0
22
pytest==6.2.4
33
mock==4.0.3

samples/snippets/requirements.txt

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,12 @@
1-
google-cloud-bigquery==2.22.1
2-
google-cloud-bigquery-storage==2.6.0
3-
google-auth-oauthlib==0.4.4
1+
google-cloud-bigquery==2.23.2
2+
google-cloud-bigquery-storage==2.6.2
3+
google-auth-oauthlib==0.4.5
44
grpcio==1.39.0
55
ipython==7.16.1; python_version < '3.7'
66
ipython==7.17.0; python_version >= '3.7'
77
matplotlib==3.3.4; python_version < '3.7'
88
matplotlib==3.4.1; python_version >= '3.7'
99
pandas==1.1.5; python_version < '3.7'
1010
pandas==1.2.0; python_version >= '3.7'
11-
pyarrow==4.0.1
11+
pyarrow==5.0.0
1212
pytz==2021.1

setup.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@
4343
"google-resumable-media >= 0.6.0, < 3.0dev",
4444
"packaging >= 14.3",
4545
"protobuf >= 3.12.0",
46-
"pyarrow >= 3.0.0, < 5.0dev",
46+
"pyarrow >= 3.0.0, < 6.0dev",
4747
"requests >= 2.18.0, < 3.0.0dev",
4848
]
4949
extras = {

tests/system/test_pandas.py

Lines changed: 26 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import io
2222
import operator
2323

24+
import google.api_core.retry
2425
import pkg_resources
2526
import pytest
2627
import pytz
@@ -37,6 +38,10 @@
3738
PANDAS_INT64_VERSION = pkg_resources.parse_version("1.0.0")
3839

3940

41+
class MissingDataError(Exception):
42+
pass
43+
44+
4045
def test_load_table_from_dataframe_w_automatic_schema(bigquery_client, dataset_id):
4146
"""Test that a DataFrame with dtypes that map well to BigQuery types
4247
can be uploaded without specifying a schema.
@@ -657,27 +662,34 @@ def test_insert_rows_from_dataframe(bigquery_client, dataset_id):
657662
)
658663
for errors in chunk_errors:
659664
assert not errors
660-
661-
# Use query to fetch rows instead of listing directly from the table so
662-
# that we get values from the streaming buffer.
663-
rows = list(
664-
bigquery_client.query(
665-
"SELECT * FROM `{}.{}.{}`".format(
666-
table.project, table.dataset_id, table.table_id
667-
)
668-
)
669-
)
670-
671-
sorted_rows = sorted(rows, key=operator.attrgetter("int_col"))
672-
row_tuples = [r.values() for r in sorted_rows]
673665
expected = [
674666
# Pandas often represents NULL values as NaN. Convert to None for
675667
# easier comparison.
676668
tuple(None if col != col else col for col in data_row)
677669
for data_row in dataframe.itertuples(index=False)
678670
]
679671

680-
assert len(row_tuples) == len(expected)
672+
# Use query to fetch rows instead of listing directly from the table so
673+
# that we get values from the streaming buffer "within a few seconds".
674+
# https://cloud.google.com/bigquery/streaming-data-into-bigquery#dataavailability
675+
@google.api_core.retry.Retry(
676+
predicate=google.api_core.retry.if_exception_type(MissingDataError)
677+
)
678+
def get_rows():
679+
rows = list(
680+
bigquery_client.query(
681+
"SELECT * FROM `{}.{}.{}`".format(
682+
table.project, table.dataset_id, table.table_id
683+
)
684+
)
685+
)
686+
if len(rows) != len(expected):
687+
raise MissingDataError()
688+
return rows
689+
690+
rows = get_rows()
691+
sorted_rows = sorted(rows, key=operator.attrgetter("int_col"))
692+
row_tuples = [r.values() for r in sorted_rows]
681693

682694
for row, expected_row in zip(row_tuples, expected):
683695
assert (

0 commit comments

Comments
 (0)