diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
index d76692635508..0a980eb21df4 100644
--- a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
@@ -139,6 +139,13 @@ public Table build() {
/**
* Checks if this table exists.
*
+ *
Example of ensuring that a table exists.
+ *
{@code
+ * if (!table.exists()) {
+ * throw new RuntimeException("Table does not exist.");
+ * }
+ * }
+ *
* @return {@code true} if this table exists, {@code false} otherwise
* @throws BigQueryException upon failure
*/
@@ -149,6 +156,13 @@ public boolean exists() {
/**
* Fetches current table's latest information. Returns {@code null} if the table does not exist.
*
+ * Example of fetching a table's latest information, specifying particular table field options.
+ *
{@code
+ * TableField field1 = TableField.LAST_MODIFIED_TIME;
+ * TableField field2 = TableField.NUM_ROWS;
+ * Table reloaded = table.reload(TableOption.fields(field1, field2));
+ * }
+ *
* @param options table options
* @return a {@code Table} object with latest information or {@code null} if not found
* @throws BigQueryException upon failure
@@ -161,6 +175,13 @@ public Table reload(TableOption... options) {
* Updates the table's information with this table's information. Dataset's and table's
* user-defined ids cannot be changed. A new {@code Table} object is returned.
*
+ * Example of updating a table's information, specifying particular table field options.
+ *
{@code
+ * TableField field1 = TableField.LAST_MODIFIED_TIME;
+ * TableField field2 = TableField.NUM_ROWS;
+ * Table updated = table.update(TableOption.fields(field1, field2));
+ * }
+ *
* @param options dataset options
* @return a {@code Table} object with updated information
* @throws BigQueryException upon failure
@@ -172,6 +193,11 @@ public Table update(TableOption... options) {
/**
* Deletes this table.
*
+ * Example of deleting a table.
+ *
{@code
+ * table.delete();
+ * }
+ *
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
@@ -182,6 +208,23 @@ public boolean delete() {
/**
* Insert rows into the table.
*
+ * Example of inserting rows into a table.
+ *
{@code
+ * String rowId1 = "rowId1";
+ * String rowId2 = "rowId2";
+ * List rows = new ArrayList<>();
+ * Map row1 = new HashMap<>();
+ * row1.put("stringField", "value1");
+ * row1.put("booleanField", true);
+ * Map row2 = new HashMap<>();
+ * row2.put("stringField", "value2");
+ * row2.put("booleanField", false);
+ * rows.add(RowToInsert.of(rowId1, row1));
+ * rows.add(RowToInsert.of(rowId2, row2));
+ * InsertAllResponse response = table.insert(rows);
+ * // do something with response
+ * }
+ *
* @param rows rows to be inserted
* @throws BigQueryException upon failure
*/
@@ -193,6 +236,23 @@ public InsertAllResponse insert(Iterable rows)
/**
* Insert rows into the table.
*
+ * Example of inserting rows into a table which ignores invalid rows.
+ *
{@code
+ * String rowId1 = "rowId1";
+ * String rowId2 = "rowId2";
+ * List rows = new ArrayList<>();
+ * Map row1 = new HashMap<>();
+ * row1.put("stringField", 1);
+ * row1.put("booleanField", true);
+ * Map row2 = new HashMap<>();
+ * row2.put("stringField", "value2");
+ * row2.put("booleanField", false);
+ * rows.add(RowToInsert.of(rowId1, row1));
+ * rows.add(RowToInsert.of(rowId2, row2));
+ * InsertAllResponse response = table.insert(rows, true, true);
+ * // do something with response
+ * }
+ *
* @param rows rows to be inserted
* @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set
* the entire insert operation will fail if rows to be inserted contain an invalid row
@@ -213,6 +273,12 @@ public InsertAllResponse insert(Iterable rows,
/**
* Returns the paginated list rows in this table.
*
+ * Example of getting a paginated list of rows in a table.
+ *
{@code
+ * Page> page = table.list(TableDataListOption.pageSize(100));
+ * // do something with page
+ * }
+ *
* @param options table data list options
* @throws BigQueryException upon failure
*/
@@ -225,6 +291,27 @@ public Page> list(TableDataListOption... options)
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
+ * Example of copying a table to a destination table and dataset referenced by name.
+ *
{@code
+ * String datasetName = "my_dataset";
+ * String tableName = "my_destination_table";
+ * Job job = table.copy(datasetName, tableName);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ *
+ * }
+ *
* @param destinationDataset the user-defined id of the destination dataset
* @param destinationTable the user-defined id of the destination table
* @param options job options
@@ -239,6 +326,29 @@ public Job copy(String destinationDataset, String destinationTable, JobOption...
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
+ * Example copying a table to a destination table referenced by table ID.
+ *
{@code
+ * String dataset = "my_dataset";
+ * String tableName = "copy_destination";
+ * TableId destinationId = TableId.of(dataset, tableName);
+ * JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
+ *
+ * Job job = table.copy(destinationId, options);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ * }
+ *
* @param destinationTable the destination table of the copy job
* @param options job options
* @throws BigQueryException upon failure
@@ -253,6 +363,26 @@ public Job copy(TableId destinationTable, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the
* started {@link Job} object.
*
+ * Example extracting data to single Google Cloud Storage file.
+ *
{@code
+ * String format = "CSV";
+ * String gcsUrl = "gs://myapp.appspot.com/filename.csv";
+ * Job job = table.extract(format, gcsUrl);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ * }
+ *
* @param format the format of the extracted data
* @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path)
* where the extracted table should be written
@@ -268,6 +398,31 @@ public Job extract(String format, String destinationUri, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns
* the started {@link Job} object.
*
+ * Example extracting data to a list of Google Cloud Storage files.
+ *
{@code
+ * String format = "CSV";
+ * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_*.csv";
+ * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_*.csv";
+ * List destinationUris = new ArrayList<>();
+ * destinationUris.add(gcsUrl1);
+ * destinationUris.add(gcsUrl2);
+ *
+ * Job job = table.extract(format, destinationUris);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ * }
+ *
* @param format the format of the exported data
* @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path)
* where the extracted table should be written
@@ -285,6 +440,25 @@ public Job extract(String format, List destinationUris, JobOption... opt
* Starts a BigQuery Job to load data into the current table from the provided source URI. Returns
* the started {@link Job} object.
*
+ * Example loading data from a single Google Cloud Storage file.
+ *
{@code
+ * String sourceUri = "gs://myapp.appspot.com/filename.csv";
+ * Job job = table.load(FormatOptions.csv(), sourceUri);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ * }
+ *
* @param format the format of the data to load
* @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from
* which to load the data
@@ -300,6 +474,30 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options)
* Starts a BigQuery Job to load data into the current table from the provided source URIs.
* Returns the started {@link Job} object.
*
+ * Example loading data from a list of Google Cloud Storage files.
+ *
{@code
+ * String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_000000000000.csv";
+ * String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_000000000000.csv";
+ * List sourceUris = new ArrayList<>();
+ * sourceUris.add(gcsUrl1);
+ * sourceUris.add(gcsUrl2);
+ *
+ * Job job = table.load(FormatOptions.csv(), sourceUris);
+ *
+ * // Wait for the job to complete.
+ * try {
+ * Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ * WaitForOption.timeout(60, TimeUnit.SECONDS));
+ * if (completedJob != null && completedJob.status().error() == null) {
+ * // Job completed successfully.
+ * } else {
+ * // Handle error case.
+ * }
+ * } catch (InterruptedException | TimeoutException e) {
+ * // Handle interrupted wait.
+ * }
+ * }
+ *
* @param format the format of the exported data
* @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from
* which to load the data
diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java
new file mode 100644
index 000000000000..8f9e0d46bc91
--- /dev/null
+++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/TableSnippets.java
@@ -0,0 +1,327 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+
+import com.google.cloud.Page;
+import com.google.cloud.WaitForOption;
+import com.google.cloud.bigquery.BigQuery.JobField;
+import com.google.cloud.bigquery.BigQuery.JobOption;
+import com.google.cloud.bigquery.BigQuery.TableDataListOption;
+import com.google.cloud.bigquery.BigQuery.TableField;
+import com.google.cloud.bigquery.BigQuery.TableOption;
+import com.google.cloud.bigquery.BigQueryException;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.FormatOptions;
+import com.google.cloud.bigquery.InsertAllRequest.RowToInsert;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Job;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+
+/*
+ * EDITING INSTRUCTIONS
+ * This file is referenced in Table’s javadoc. Any change to this file should be reflected in
+ * Table’s javadoc.
+ */
+public class TableSnippets {
+ private final Table table;
+
+ public TableSnippets(Table table) {
+ this.table = table;
+ }
+
+ /**
+ * Example of ensuring that a table exists.
+ */
+ // [TARGET exists()]
+ public void checkExists() {
+ // [START checkExists]
+ if (!table.exists()) {
+ throw new IllegalArgumentException("Table does not exist.");
+ }
+ // [END checkExists]
+ }
+
+ /**
+ * Example of fetching a table's latest information, specifying particular table field options.
+ */
+ // [TARGET reload(TableOption...)]
+ // [VARIABLE TableField.LAST_MODIFIED_TIME]
+ // [VARIABLE TableField.NUM_ROWS]
+ public Table reloadTableWithFields(TableField field1, TableField field2) {
+ // [START reloadTableWithFields]
+ Table reloaded = table.reload(TableOption.fields(field1, field2));
+ // [END reloadTableWithFields]
+ return reloaded;
+ }
+
+ /**
+ * Example of updating a table's information, specifying particular table field options.
+ */
+ // [TARGET update(TableOption...)]
+ // [VARIABLE TableField.LAST_MODIFIED_TIME]
+ // [VARIABLE TableField.NUM_ROWS]
+ public Table updateTableWithFields(TableField field1, TableField field2) {
+ // [START updateTableWithFields]
+ Table updated = table.update(TableOption.fields(field1, field2));
+ // [END updateTableWithFields]
+ return updated;
+ }
+
+ /**
+ * Example of deleting a table.
+ */
+ // [TARGET delete()]
+ public void delete() {
+ // [START delete]
+ table.delete();
+ // [END delete]
+ }
+
+ /**
+ * Example of inserting rows into a table.
+ */
+ // [TARGET insert(Iterable)]
+ // [VARIABLE "rowId1"]
+ // [VARIABLE "rowId2"]
+ public InsertAllResponse insert(String rowId1, String rowId2) {
+ // [START insert]
+ List rows = new ArrayList<>();
+ Map row1 = new HashMap<>();
+ row1.put("stringField", "value1");
+ row1.put("booleanField", true);
+ Map row2 = new HashMap<>();
+ row2.put("stringField", "value2");
+ row2.put("booleanField", false);
+ rows.add(RowToInsert.of(rowId1, row1));
+ rows.add(RowToInsert.of(rowId2, row2));
+ InsertAllResponse response = table.insert(rows);
+ // do something with response
+ // [END insert]
+ return response;
+ }
+
+ /**
+ * Example of inserting rows into a table which ignores invalid rows.
+ */
+ // [TARGET insert(Iterable, boolean, boolean)]
+ // [VARIABLE "rowId1"]
+ // [VARIABLE "rowId2"]
+ public InsertAllResponse insertWithParams(String rowId1, String rowId2) {
+ // [START insertWithParams]
+ List rows = new ArrayList<>();
+ Map row1 = new HashMap<>();
+ row1.put("stringField", 1);
+ row1.put("booleanField", true);
+ Map row2 = new HashMap<>();
+ row2.put("stringField", "value2");
+ row2.put("booleanField", false);
+ rows.add(RowToInsert.of(rowId1, row1));
+ rows.add(RowToInsert.of(rowId2, row2));
+ InsertAllResponse response = table.insert(rows, true, true);
+ // do something with response
+ // [END insertWithParams]
+ return response;
+ }
+
+ /**
+ * Example of getting a paginated list of rows in a table.
+ */
+ // [TARGET list(TableDataListOption...)]
+ public Page> list() {
+ // [START list]
+ Page> page = table.list(TableDataListOption.pageSize(100));
+ // do something with page
+ // [END list]
+ return page;
+ }
+
+ /**
+ * Example of copying a table to a destination table and dataset referenced by name.
+ */
+ // [TARGET copy(String, String, JobOption...)]
+ // [VARIABLE "my_dataset"]
+ // [VARIABLE "my_destination_table"]
+ public Job copy(String datasetName, String tableName) {
+ // [START copy]
+ Job job = table.copy(datasetName, tableName);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+
+ // [END copy]
+ return job;
+ }
+
+ /**
+ * Example copying a table to a destination table referenced by table ID.
+ */
+ // [TARGET copy(TableId, JobOption...)]
+ // [VARIABLE "my_dataset"]
+ // [VARIABLE "copy_destination"]
+ public Job copyTableId(String dataset, String tableName) throws BigQueryException {
+ // [START copyTableId]
+ TableId destinationId = TableId.of(dataset, tableName);
+ JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
+
+ Job job = table.copy(destinationId, options);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+ // [END copyTableId]
+ return job;
+ }
+
+ /**
+ * Example extracting data to a list of Google Cloud Storage files.
+ */
+ // [TARGET extract(String, List, JobOption...)]
+ // [VARIABLE "CSV"]
+ // [VARIABLE "gs://myapp.appspot.com/PartitionA_*.csv"]
+ // [VARIABLE "gs://myapp.appspot.com/PartitionB_*.csv"]
+ public Job extractList(String format, String gcsUrl1, String gcsUrl2) {
+ // [START extractList]
+ List destinationUris = new ArrayList<>();
+ destinationUris.add(gcsUrl1);
+ destinationUris.add(gcsUrl2);
+
+ Job job = table.extract(format, destinationUris);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+ // [END extractList]
+ return job;
+ }
+
+ /**
+ * Example extracting data to single Google Cloud Storage file.
+ */
+ // [TARGET extract(String, String, JobOption...)]
+ // [VARIABLE "CSV"]
+ // [VARIABLE "gs://myapp.appspot.com/filename.csv"]
+ public Job extractSingle(String format, String gcsUrl) {
+ // [START extractSingle]
+ Job job = table.extract(format, gcsUrl);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+ // [END extractSingle]
+ return job;
+ }
+
+ /**
+ * Example loading data from a list of Google Cloud Storage files.
+ */
+ // [TARGET load(FormatOptions, List, JobOption...)]
+ // [VARIABLE "gs://myapp.appspot.com/PartitionA_000000000000.csv"]
+ // [VARIABLE "gs://myapp.appspot.com/PartitionB_000000000000.csv"]
+ public Job loadList(String gcsUrl1, String gcsUrl2) {
+ // [START loadList]
+ List sourceUris = new ArrayList<>();
+ sourceUris.add(gcsUrl1);
+ sourceUris.add(gcsUrl2);
+
+ Job job = table.load(FormatOptions.csv(), sourceUris);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+ // [END loadList]
+ return job;
+ }
+
+ /**
+ * Example loading data from a single Google Cloud Storage file.
+ */
+ // [TARGET load(FormatOptions, String, JobOption...)]
+ // [VARIABLE "gs://myapp.appspot.com/filename.csv"]
+ public Job loadSingle(String sourceUri) {
+ // [START loadSingle]
+ Job job = table.load(FormatOptions.csv(), sourceUri);
+
+ // Wait for the job to complete.
+ try {
+ Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
+ WaitForOption.timeout(60, TimeUnit.SECONDS));
+ if (completedJob != null && completedJob.status().error() == null) {
+ // Job completed successfully.
+ } else {
+ // Handle error case.
+ }
+ } catch (InterruptedException | TimeoutException e) {
+ // Handle interrupted wait.
+ }
+ // [END loadSingle]
+ return job;
+ }
+}
diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java
new file mode 100644
index 000000000000..e2b313112820
--- /dev/null
+++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITTableSnippets.java
@@ -0,0 +1,325 @@
+/*
+ * Copyright 2016 Google Inc. All Rights Reserved.
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.examples.bigquery.snippets;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.List;
+import java.util.Set;
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.cloud.Page;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.BigQuery.TableDataListOption;
+import com.google.cloud.bigquery.BigQuery.TableField;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.DatasetInfo;
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Field.Type;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.InsertAllResponse;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+import com.google.common.base.Function;
+import com.google.common.base.Objects;
+import com.google.common.collect.FluentIterable;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+
+/**
+ * Integration tests for {@link TableSnippets}.
+ */
+public class ITTableSnippets {
+ private static final String BASE_TABLE_NAME = "my_table";
+ private static final String DATASET_NAME = "my_dataset";
+ private static final String COPY_DATASET_NAME = "my_copy_dataset";
+ private static final Value ROW1 = new Value("value1", true);
+ private static final Value ROW2 = new Value("value2", false);
+ private static final Logger log = Logger.getLogger(ITTableSnippets.class.getName());
+
+ private static BigQuery bigquery;
+ private Table table;
+ private TableSnippets tableSnippets;
+
+ private static final String DOOMED_TABLE_NAME = "doomed_table";
+ private static final String DOOMED_DATASET_NAME = "doomed_dataset";
+ public static final TableId DOOMED_TABLE_ID = TableId.of(DOOMED_DATASET_NAME, DOOMED_TABLE_NAME);
+
+ private static Table doomedTable;
+ private static TableSnippets doomedTableSnippets;
+
+ private static int nextTableNumber;
+
+ @BeforeClass
+ public static void beforeClass() {
+ bigquery = BigQueryOptions.defaultInstance().service();
+ bigquery.create(DatasetInfo.builder(DATASET_NAME).build());
+ bigquery.create(DatasetInfo.builder(COPY_DATASET_NAME).build());
+ bigquery.create(DatasetInfo.builder(DOOMED_DATASET_NAME).build());
+ }
+
+ @Before
+ public void before() {
+ ++nextTableNumber;
+ StandardTableDefinition.Builder builder = StandardTableDefinition.builder();
+ builder.schema(
+ Schema.of(Field.of("stringField", Type.string()), Field.of("booleanField", Type.bool())));
+ table = bigquery.create(TableInfo.of(getTableId(), builder.build()));
+ bigquery.create(TableInfo.of(getCopyTableId(), builder.build()));
+ tableSnippets = new TableSnippets(table);
+
+ doomedTable = bigquery.create(TableInfo.of(DOOMED_TABLE_ID, builder.build()));
+ doomedTableSnippets = new TableSnippets(doomedTable);
+ }
+
+ @After
+ public void after() {
+ bigquery.delete(getTableId());
+ bigquery.delete(getCopyTableId());
+ bigquery.delete(DOOMED_TABLE_ID);
+ }
+
+ @AfterClass
+ public static void afterClass() {
+ bigquery.delete(DATASET_NAME, DatasetDeleteOption.deleteContents());
+ bigquery.delete(COPY_DATASET_NAME, DatasetDeleteOption.deleteContents());
+ bigquery.delete(DOOMED_DATASET_NAME, DatasetDeleteOption.deleteContents());
+ }
+
+ private String getTableName() {
+ return BASE_TABLE_NAME + nextTableNumber;
+ }
+
+ private TableId getTableId() {
+ return TableId.of(DATASET_NAME, getTableName());
+ }
+
+ private String getCopyTableName() {
+ return BASE_TABLE_NAME + "_copy_" + nextTableNumber;
+ }
+
+ private TableId getCopyTableId() {
+ return TableId.of(COPY_DATASET_NAME, getCopyTableName());
+ }
+
+ @Test
+ public void testCheckExists() {
+ log.info("testCheckExists");
+ tableSnippets.checkExists();
+ }
+
+ @Test
+ public void testReloadTableWithFields() {
+ log.info("testReloadTableWithFields");
+ tableSnippets.reloadTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS);
+ }
+
+ @Test
+ public void testUpdateTableWithFields() {
+ log.info("testUpdateTableWithFields");
+ tableSnippets.updateTableWithFields(TableField.LAST_MODIFIED_TIME, TableField.NUM_ROWS);
+ }
+
+ @Test
+ public void testDelete() {
+ log.info("testDelete");
+ doomedTableSnippets.delete();
+ }
+
+ @Test
+ public void testInsert() {
+ log.info("testInsert");
+ InsertAllResponse response = tableSnippets.insert("row1", "row2");
+ assertFalse(response.hasErrors());
+ verifyTestRows(table);
+ }
+
+ @Test
+ public void testInsertParams() throws InterruptedException {
+ InsertAllResponse response = tableSnippets.insertWithParams("row1", "row2");
+ assertTrue(response.hasErrors());
+ List> rows = ImmutableList.copyOf(tableSnippets.list().values());
+ while (rows.isEmpty()) {
+ Thread.sleep(500);
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ }
+ Set values =
+ FluentIterable.from(rows).transform(new Function, Value>() {
+ @Override
+ public Value apply(List row) {
+ return new Value(row.get(0).stringValue(), row.get(1).booleanValue());
+ }
+ }).toSet();
+ assertEquals(ImmutableSet.of(ROW2), values);
+ }
+
+ @Test
+ public void testList() throws InterruptedException {
+ List> rows = ImmutableList.copyOf(tableSnippets.list().values());
+ assertEquals(0, rows.size());
+
+ InsertAllResponse response = tableSnippets.insert("row1", "row2");
+ assertFalse(response.hasErrors());
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ while (rows.isEmpty()) {
+ Thread.sleep(500);
+ rows = ImmutableList.copyOf(tableSnippets.list().values());
+ }
+ assertEquals(2, rows.size());
+ }
+
+ @Test
+ public void testCopy() {
+ tableSnippets.copy(COPY_DATASET_NAME, BASE_TABLE_NAME);
+ }
+
+ @Test
+ public void testCopyTableId() {
+ log.info("testCopyTableId");
+ tableSnippets.copyTableId(COPY_DATASET_NAME, getCopyTableName());
+ }
+
+ @Test
+ public void testExtractList() {
+ log.info("testExtractList");
+ String projectId = bigquery.options().projectId();
+ String gcsFile1 = "gs://" + projectId + ".appspot.com/extractTestA_*.csv";
+ String gcsFile2 = "gs://" + projectId + ".appspot.com/extractTestB_*.csv";
+ tableSnippets.extractList("CSV", gcsFile1, gcsFile2);
+ }
+
+ @Test
+ public void testExtractSingle() {
+ log.info("testExtractSingle");
+ String projectId = bigquery.options().projectId();
+ String gcsFile = "gs://" + projectId + ".appspot.com/extractTest.csv";
+ tableSnippets.extractSingle("CSV", gcsFile);
+ }
+
+ @Test
+ public void testLoadList() {
+ log.info("testLoadList");
+ String projectId = bigquery.options().projectId();
+ String gcsFile1 = "gs://" + projectId + ".appspot.com/loadTest1.csv";
+ String gcsFile2 = "gs://" + projectId + ".appspot.com/loadTest2.csv";
+
+ // Before we can load, we should make sure those files exist.
+ tableSnippets.extractSingle("CSV", gcsFile1);
+ tableSnippets.extractSingle("CSV", gcsFile2);
+
+ tableSnippets.loadList(gcsFile1, gcsFile2);
+ }
+
+ @Test
+ public void testLoadSingle() {
+ log.info("testLoadSingle");
+ String projectId = bigquery.options().projectId();
+ String gcsFile = "gs://" + projectId + ".appspot.com/loadSingle.csv";
+
+ // Before we can load, we should make sure the file exists.
+ tableSnippets.extractSingle("CSV", gcsFile);
+
+ tableSnippets.loadSingle(gcsFile);
+ }
+
+ private static class Value {
+ final String stringField;
+ final boolean booleanField;
+
+ Value(String stringField, boolean booleanField) {
+ this.stringField = stringField;
+ this.booleanField = booleanField;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj instanceof Value) {
+ Value o = (Value) obj;
+ return Objects.equal(stringField, o.stringField) && booleanField == o.booleanField;
+ }
+ return false;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hashCode(stringField, booleanField);
+ }
+
+ @Override
+ public String toString() {
+ return "";
+ }
+ }
+
+ /**
+ * Verifies that the given table has the rows inserted by InsertTestRows().
+ *
+ * @param checkTable The table to query.
+ */
+ private void verifyTestRows(Table checkTable) {
+ List> rows = waitForTableRows(checkTable, 2);
+ // Verify that the table data matches what it's supposed to.
+ Set values =
+ FluentIterable.from(rows).transform(new Function, Value>() {
+ @Override
+ public Value apply(List row) {
+ return new Value(row.get(0).stringValue(), row.get(1).booleanValue());
+ }
+ }).toSet();
+ assertEquals(ImmutableSet.of(ROW2, ROW1), values);
+ }
+
+ /**
+ * Waits for a specified number of rows to appear in the given table. This is used by
+ * verifyTestRows to wait for data to appear before verifying.
+ *
+ * @param checkTable
+ * @param numRows
+ * @return The rows from the table.
+ */
+ private List> waitForTableRows(Table checkTable, int numRows) {
+ // Wait for the data to appear.
+ Page> page = checkTable.list(TableDataListOption.pageSize(100));
+ List> rows = ImmutableList.copyOf(page.values());
+ int numSleeps = 0;
+ while (rows.size() != numRows) {
+ assertTrue(numSleeps < 10);
+ log.info("Sleeping and waiting for " + numRows + " test rows to appear (currently "
+ + rows.size() + ")...");
+ try {
+ ++numSleeps;
+ Thread.sleep(5000);
+ } catch (InterruptedException e) {
+ }
+ page = checkTable.list(TableDataListOption.pageSize(100));
+ rows = ImmutableList.copyOf(page.values());
+ }
+ return rows;
+ }
+}