Skip to content
Merged
Show file tree
Hide file tree
Changes from 9 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -41,3 +41,10 @@ steps:
- 'NOX_SESSION=integration_bigquery'
- 'PROJECT_ID=pso-kokoro-resources'
waitFor: ['-']
- id: integration_spanner
name: 'gcr.io/pso-kokoro-resources/python-multi'
args: ['bash', './ci/build.sh']
env:
- 'NOX_SESSION=integration_spanner'
- 'PROJECT_ID=pso-kokoro-resources'
waitFor: ['-']
16 changes: 16 additions & 0 deletions noxfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,3 +137,19 @@ def integration_bigquery(session):
raise Exception("Expected Env Var: %s" % env_var)

session.run("pytest", test_path, *session.posargs)


@nox.session(python=PYTHON_VERSIONS, venv_backend="venv")
def integration_spanner(session):
"""Run Spanner integration tests.
Ensure Spanner validation is running as expected.
"""
_setup_session_requirements(session, extra_packages=[])

expected_env_vars = ["PROJECT_ID"]
for env_var in expected_env_vars:
if not os.environ.get(env_var, ""):
raise Exception("Expected Env Var: %s" % env_var)

# TODO: Add tests for DVT data sources. See integration_bigquery.
session.run("pytest", "third_party/ibis/ibis_cloud_spanner/tests", *session.posargs)
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -16,5 +16,6 @@ pyarrow==3.0.0
pydata-google-auth==1.1.0
google-cloud-bigquery==2.7.0
google-cloud-bigquery-storage==2.2.1
google-cloud-spanner==3.1.0
setuptools>=34.0.0
jellyfish==0.8.2
2 changes: 2 additions & 0 deletions third_party/ibis/ibis_cloud_spanner/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@


70 changes: 70 additions & 0 deletions third_party/ibis/ibis_cloud_spanner/api.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
# Copyright 2021 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.


"""CloudScanner public API."""
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
"""CloudScanner public API."""
"""Cloud Spanner public API."""



from third_party.ibis.ibis_cloud_spanner.client import CloudSpannerClient
from third_party.ibis.ibis_cloud_spanner.compiler import dialect

import google.cloud.spanner # noqa: F401, fail early if spanner is missing
import ibis.common.exceptions as com

__all__ = ("compile", "connect", "verify")


def compile(expr, params=None):
"""Compile an expression for Cloud Spanner.

Returns
-------
compiled : str

See Also
--------
ibis.expr.types.Expr.compile

"""
from third_party.ibis.ibis_cloud_spanner.compiler import to_sql

return to_sql(expr, dialect.make_context(params=params))


def verify(expr, params=None):
"""Check if an expression can be compiled using Cloud Spanner."""
try:
compile(expr, params=params)
return True
except com.TranslationError:
return False


def connect(instance_id, database_id,) -> CloudSpannerClient:
"""Create a CloudSpannerClient for use with Ibis.

Parameters
----------
instance_id : str
A Cloud Spanner Instance id.
database_id : str
A database id inside of the Cloud Spanner Instance

Returns
-------
CloudSpannerClient

"""

return CloudSpannerClient(instance_id=instance_id, database_id=database_id)
Loading