diff --git a/google-cloud-clients/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-clients/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java index bab3d3370518..ee370bea5a58 100644 --- a/google-cloud-clients/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java +++ b/google-cloud-clients/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java @@ -560,22 +560,24 @@ public int hashCode() { * String sourceUri = "gs://cloud-samples-data/bigquery/us-states/us-states.json"; * TableId tableId = TableId.of(datasetName, tableName); * // Table field definition - * Field[] fields = new Field[] { - * Field.of("name", LegacySQLTypeName.STRING), - * Field.of("post_abbr", LegacySQLTypeName.STRING) - * }; + * Field[] fields = + * new Field[] { + * Field.of("name", LegacySQLTypeName.STRING), + * Field.of("post_abbr", LegacySQLTypeName.STRING) + * }; * // Table schema definition * Schema schema = Schema.of(fields); - * LoadJobConfiguration configuration = LoadJobConfiguration.builder(tableId, sourceUri) - * .setFormatOptions(FormatOptions.json()) - * .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED) - * .setSchema(schema) - * .build(); + * LoadJobConfiguration configuration = + * LoadJobConfiguration.builder(tableId, sourceUri) + * .setFormatOptions(FormatOptions.json()) + * .setCreateDisposition(CreateDisposition.CREATE_IF_NEEDED) + * .setSchema(schema) + * .build(); * // Load the table - * Job remoteLoadJob = bigquery.create(JobInfo.of(configuration)); - * remoteLoadJob = remoteLoadJob.waitFor(); + * Job loadJob = bigquery.create(JobInfo.of(configuration)); + * loadJob = loadJob.waitFor(); * // Check the table - * System.out.println("State: " + remoteLoadJob.getStatus().getState()); + * System.out.println("State: " + loadJob.getStatus().getState()); * return ((StandardTableDefinition) bigquery.getTable(tableId).getDefinition()).getNumRows(); * } * @@ -771,9 +773,25 @@ public int hashCode() { * String datasetName = "my_dataset_name"; * String tableName = "my_table_name"; * String newDescription = "new_description"; - * Table oldTable = bigquery.getTable(datasetName, tableName); - * TableInfo tableInfo = oldTable.toBuilder().setDescription(newDescription).build(); - * Table newTable = bigquery.update(tableInfo); + * Table beforeTable = bigquery.getTable(datasetName, tableName); + * TableInfo tableInfo = beforeTable.toBuilder() + * .setDescription(newDescription) + * .build(); + * Table afterTable = bigquery.update(tableInfo); + * } + * + *

Example of updating a table by changing its expiration. + *

 {@code
+   * String datasetName = "my_dataset_name";
+   * String tableName = "my_table_name";
+   * Table beforeTable = bigquery.getTable(datasetName, tableName);
+   * 
+   * // Set table to expire 5 days from now.
+   * long expirationMillis = DateTime.now().plusDays(5).getMillis();
+   * TableInfo tableInfo = beforeTable.toBuilder()
+   *         .setExpirationTime(expirationMillis)
+   *         .build();
+   * Table afterTable = bigquery.update(tableInfo);
    * }
* * @throws BigQueryException upon failure @@ -869,10 +887,12 @@ public int hashCode() { * Map recordsContent = new HashMap<>(); * recordsContent.put("stringField", "Hello, World!"); * rowContent.put("recordField", recordsContent); - * InsertAllResponse response = bigquery.insertAll(InsertAllRequest.newBuilder(tableId) - * .addRow("rowId", rowContent) - * // More rows can be added in the same RPC by invoking .addRow() on the builder - * .build()); + * InsertAllResponse response = + * bigquery.insertAll( + * InsertAllRequest.newBuilder(tableId) + * .addRow("rowId", rowContent) + * // More rows can be added in the same RPC by invoking .addRow() on the builder + * .build()); * if (response.hasErrors()) { * // If any of the insertions failed, this lets you inspect the errors * for (Entry> entry : response.getInsertErrors().entrySet()) { @@ -936,8 +956,7 @@ public int hashCode() { * String tableName = "my_table_name"; * Schema schema = ...; * String field = "field"; - * TableResult tableData = - * bigquery.listTableData(datasetName, tableName, schema); + * TableResult tableData = bigquery.listTableData(datasetName, tableName, schema); * for (FieldValueList row : tableData.iterateAll()) { * row.get(field); * } @@ -1083,10 +1102,8 @@ TableResult listTableData( *

Example of running a query. *

 {@code
    * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
-   * String query =
-   *     "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
-   * QueryJobConfiguration queryConfig =
-   *     QueryJobConfiguration.newBuilder(query).build();
+   * String query = "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+   * QueryJobConfiguration queryConfig = QueryJobConfiguration.newBuilder(query).build();
    * 
    * // Print the results.
    * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
@@ -1145,9 +1162,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
    * String csvData = "StringValue1\nStringValue2\n";
    * TableId tableId = TableId.of(datasetName, tableName);
    * WriteChannelConfiguration writeChannelConfiguration =
-   *     WriteChannelConfiguration.newBuilder(tableId)
-   *         .setFormatOptions(FormatOptions.csv())
-   *         .build();
+   *     WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
    * TableDataWriteChannel writer = bigquery.writer(writeChannelConfiguration);
    * // Write data to writer
    * try {
@@ -1170,9 +1185,7 @@ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption...
    * String location = "us";
    * TableId tableId = TableId.of(datasetName, tableName);
    * WriteChannelConfiguration writeChannelConfiguration =
-   *     WriteChannelConfiguration.newBuilder(tableId)
-   *         .setFormatOptions(FormatOptions.csv())
-   *         .build();
+   *     WriteChannelConfiguration.newBuilder(tableId).setFormatOptions(FormatOptions.csv()).build();
    * // The location must be specified; other fields can be auto-detected.
    * JobId jobId = JobId.newBuilder().setLocation(location).build();
    * TableDataWriteChannel writer = bigquery.writer(jobId, writeChannelConfiguration);
diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java
index f7ec0642a256..b1edd909c041 100644
--- a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java
+++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/BigQuerySnippets.java
@@ -59,6 +59,8 @@
 import com.google.cloud.bigquery.TableInfo;
 import com.google.cloud.bigquery.TableResult;
 import com.google.cloud.bigquery.WriteChannelConfiguration;
+import org.joda.time.DateTime;
+
 import java.io.IOException;
 import java.io.OutputStream;
 import java.nio.ByteBuffer;
@@ -123,13 +125,35 @@ public Dataset updateDataset(String datasetName, String newDescription) {
   // [VARIABLE "my_dataset_name"]
   // [VARIABLE "my_table_name"]
   // [VARIABLE "new_description"]
-  public Table updateTable(String datasetName, String tableName, String newDescription) {
+  public Table updateTableDescription(String datasetName, String tableName, String newDescription) {
     // [START bigquery_update_table_description]
-    Table oldTable = bigquery.getTable(datasetName, tableName);
-    TableInfo tableInfo = oldTable.toBuilder().setDescription(newDescription).build();
-    Table newTable = bigquery.update(tableInfo);
+    Table beforeTable = bigquery.getTable(datasetName, tableName);
+    TableInfo tableInfo = beforeTable.toBuilder()
+        .setDescription(newDescription)
+        .build();
+    Table afterTable = bigquery.update(tableInfo);
     // [END bigquery_update_table_description]
-    return newTable;
+    return afterTable;
+  }
+
+  /**
+   * Example of updating a table by changing its expiration.
+   */
+  // [TARGET update(TableInfo, TableOption...)]
+  // [VARIABLE "my_dataset_name"]
+  // [VARIABLE "my_table_name"]
+  public Table updateTableExpiration(String datasetName, String tableName) {
+    // [START bigquery_update_table_expiration]
+    Table beforeTable = bigquery.getTable(datasetName, tableName);
+
+    // Set table to expire 5 days from now.
+    long expirationMillis = DateTime.now().plusDays(5).getMillis();
+    TableInfo tableInfo = beforeTable.toBuilder()
+            .setExpirationTime(expirationMillis)
+            .build();
+    Table afterTable = bigquery.update(tableInfo);
+    // [END bigquery_update_table_expiration]
+    return afterTable;
   }
 
   /**
diff --git a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/CloudSnippets.java b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/CloudSnippets.java
index eff8f48061ad..a0fcd2f331fb 100644
--- a/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/CloudSnippets.java
+++ b/google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/CloudSnippets.java
@@ -17,6 +17,7 @@
 package com.google.cloud.examples.bigquery.snippets;
 
 import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.CopyJobConfiguration;
 import com.google.cloud.bigquery.FieldValue;
 import com.google.cloud.bigquery.FieldValueList;
 import com.google.cloud.bigquery.FormatOptions;
@@ -27,9 +28,13 @@
 import com.google.cloud.bigquery.QueryParameterValue;
 import com.google.cloud.bigquery.StandardTableDefinition;
 import com.google.cloud.bigquery.TableId;
+
+import java.util.Arrays;
 import java.util.concurrent.TimeoutException;
+
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
+import org.joda.time.Instant;
 import org.joda.time.format.DateTimeFormatter;
 import org.joda.time.format.ISODateTimeFormat;
 
@@ -309,4 +314,84 @@ public void loadTableGcsParquet(String datasetName) throws InterruptedException
     System.out.printf("Loaded %d rows.\n", destinationTable.getNumRows());
     // [END bigquery_load_table_gcs_parquet]
   }
+
+  private void generateTableWithDdl(String datasetId, String tableId) throws InterruptedException {
+    String sql = String.format(
+        "CREATE TABLE %s.%s " +
+        "AS " +
+        "SELECT " +
+        "2000 + CAST(18 * RAND() as INT64) AS year, " +
+        "IF(RAND() > 0.5,\"foo\",\"bar\") AS token " +
+        "FROM " +
+        "UNNEST(GENERATE_ARRAY(0,5,1)) AS r", datasetId, tableId);
+    Job job = bigquery.create(JobInfo.of(QueryJobConfiguration.newBuilder(sql).build()));
+    job.waitFor();
+  }
+
+  /**
+   * Example of copying multiple tables to a destination.
+   */
+  public void copyTables(String datasetId, String destinationTableId) throws InterruptedException {
+    generateTableWithDdl(datasetId, "table1");
+    generateTableWithDdl(datasetId, "table2");
+
+    // [START bigquery_copy_table_multiple_source]
+    TableId destinationTable = TableId.of(datasetId, destinationTableId);
+    CopyJobConfiguration configuration =
+        CopyJobConfiguration.newBuilder(
+            destinationTable,
+            Arrays.asList(
+                TableId.of(datasetId, "table1"),
+                TableId.of(datasetId, "table2")))
+        .build();
+
+    // Copy the tables.
+    Job job = bigquery.create(JobInfo.of(configuration));
+    job = job.waitFor();
+
+    // Check the table
+    StandardTableDefinition table = bigquery.getTable(destinationTable).getDefinition();
+    System.out.println("State: " + job.getStatus().getState());
+    System.out.printf("Copied %d rows.\n", table.getNumRows());
+    // [END bigquery_copy_table_multiple_source]
+  }
+
+  /**
+   * Example of undeleting a table.
+   */
+  public void undeleteTable(String datasetId) throws InterruptedException {
+    generateTableWithDdl(datasetId, "oops_undelete_me");
+
+    // [START bigquery_undelete_table]
+    // String datasetId = "my_dataset";
+    String tableId = "oops_undelete_me";
+
+    // Record the current time.  We'll use this as the snapshot time
+    // for recovering the table.
+    long snapTime = Instant.now().getMillis();
+
+    // "Accidentally" delete the table.
+    bigquery.delete(TableId.of(datasetId, tableId));
+
+    // Construct the restore-from tableID using a snapshot decorator.
+    String snapshotTableId = String.format("%s@%d", tableId, snapTime);
+    // Choose a new table ID for the recovered table data.
+    String recoverTableId = String.format("%s_recovered", tableId);
+
+    // Construct and run a copy job.
+    CopyJobConfiguration configuration =
+        CopyJobConfiguration.newBuilder(
+            TableId.of(datasetId, recoverTableId),
+            TableId.of(datasetId, snapshotTableId))
+        .build();
+    Job job = bigquery.create(JobInfo.of(configuration));
+    job = job.waitFor();
+
+    // Check the table
+    StandardTableDefinition table = bigquery.getTable(
+            TableId.of(datasetId, recoverTableId)).getDefinition();
+    System.out.println("State: " + job.getStatus().getState());
+    System.out.printf("Recovered %d rows.\n", table.getNumRows());
+    // [END bigquery_undelete_table]
+  }
 }
diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java
index 9218a7b497f9..815aecef69d2 100644
--- a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java
+++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITBigQuerySnippets.java
@@ -123,7 +123,9 @@ public void testCreateGetAndDeleteTable() throws InterruptedException {
     TableId tableId = TableId.of(bigquery.getOptions().getProjectId(), DATASET, tableName);
     assertEquals(
         tableId, bigquerySnippets.getTable(tableId.getDataset(), tableId.getTable()).getTableId());
-    assertNotNull(bigquerySnippets.updateTable(DATASET, tableName, "new description"));
+    assertNotNull(bigquerySnippets.updateTableDescription(DATASET, tableName, "new description"));
+    table = bigquerySnippets.updateTableExpiration(DATASET, tableName);
+    assertNotNull(table.getExpirationTime());
     assertEquals(
         "new description",
         bigquerySnippets
diff --git a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITCloudSnippets.java b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITCloudSnippets.java
index 69b9046f380b..a00707b248cb 100644
--- a/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITCloudSnippets.java
+++ b/google-cloud-examples/src/test/java/com/google/cloud/examples/bigquery/snippets/ITCloudSnippets.java
@@ -127,4 +127,18 @@ public void testLoadTableGcsParquet() throws InterruptedException {
     assertTrue(got.contains("DONE"));
     assertTrue(got.contains("Loaded 50 rows."));
   }
+
+  @Test
+  public void testCopyTables() throws InterruptedException {
+    cloudSnippets.copyTables(DATASET, "copytablesdestination");
+    String got = bout.toString();
+    assertTrue(got.contains("DONE"));
+  }
+
+  @Test
+  public void testUndeleteTable() throws InterruptedException {
+    cloudSnippets.undeleteTable(DATASET);
+    String got = bout.toString();
+    assertTrue(got.contains("DONE"));
+  }
 }