Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
cc8de4a
add bigquery snippets and tests (in progress)
tangiel Sep 16, 2016
a47e9b3
Starting work on copy Table ID snippet
kdeus Sep 16, 2016
7508764
fix test setup/teardown
tangiel Sep 16, 2016
ff3133d
Still working toward a copy tableID test
kdeus Sep 16, 2016
37b33f6
Fixed merge conflict.
kdeus Sep 16, 2016
56b385f
Fixed the other merge conflict.
kdeus Sep 16, 2016
8068576
Working copy by table ID snippet.
kdeus Sep 16, 2016
866cf43
Moved parameters. Better snippet.
kdeus Sep 16, 2016
60bc8fa
Moved parameters. Better snippet.
kdeus Sep 16, 2016
10fdfac
add working first TableSnippets test
tangiel Sep 16, 2016
1faa1e3
Added todo, did some cleanup
kdeus Sep 16, 2016
3335d54
Merge branch 'master' into kdeus-snippets
kdeus Sep 16, 2016
fddd00e
change Before/AfterClass to Before/After
tangiel Sep 16, 2016
0efb3c1
Made table names unique, added sample data to copy table.
kdeus Sep 16, 2016
0626a79
Fixed merge conflicts.
kdeus Sep 16, 2016
b25e3ca
Unique copy table names, and helper functions.
kdeus Sep 16, 2016
32576ac
add testInsertParams, testList, and testCopy
tangiel Sep 16, 2016
c1e416a
Add testExtract, and some minor cleanup.
kdeus Sep 16, 2016
88b9378
Merge branch 'master' into kdeus-snippets
kdeus Sep 16, 2016
7049cd4
Remove unnecessary imports.
kdeus Sep 16, 2016
524f37b
fix TARGET parameters
tangiel Sep 16, 2016
63de7b4
Add tests for Table exists, reload, update, delete
tcoffee-google Sep 16, 2016
c5e31c6
Better extract multiple, and added test for extract single.
kdeus Sep 16, 2016
9c4d44e
Merge branch 'master' into kdeus-snippets
kdeus Sep 16, 2016
b8f6b79
Add test logs for Table exists, reload, update, delete
tcoffee-google Sep 17, 2016
740247c
Added loadList snippet and test.
kdeus Sep 17, 2016
56fd8ed
Load test creates files first.
kdeus Sep 17, 2016
1667315
Merge branch 'master' into kdeus-snippets
kdeus Sep 17, 2016
ee12c01
Added loadSing.e
kdeus Sep 17, 2016
1c03b0a
Add docs for Table exists, reload, update, delete
tcoffee-google Sep 17, 2016
ba88911
run formatter over code
tangiel Sep 17, 2016
9733dcc
update Table javadoc
tangiel Sep 17, 2016
84904d8
TableSnippets: cleanup unused imports
tangiel Sep 19, 2016
2647a4e
cleanup ITTableSnippets
tangiel Sep 19, 2016
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -139,6 +139,13 @@ public Table build() {
/**
* Checks if this table exists.
*
* <p>Example of ensuring that a table exists.
* <pre> {@code
* if (!table.exists()) {
* throw new RuntimeException("Table does not exist.");
* }
* }</pre>
*
* @return {@code true} if this table exists, {@code false} otherwise
* @throws BigQueryException upon failure
*/
Expand All @@ -149,6 +156,13 @@ public boolean exists() {
/**
* Fetches current table's latest information. Returns {@code null} if the table does not exist.
*
* <p>Example of fetching a table's latest information, specifying particular table field options.
* <pre> {@code
* TableField field1 = TableField.LAST_MODIFIED_TIME;
* TableField field2 = TableField.NUM_ROWS;
* Table reloaded = table.reload(TableOption.fields(field1, field2));
* }</pre>
*
* @param options table options
* @return a {@code Table} object with latest information or {@code null} if not found
* @throws BigQueryException upon failure
Expand All @@ -161,6 +175,13 @@ public Table reload(TableOption... options) {
* Updates the table's information with this table's information. Dataset's and table's
* user-defined ids cannot be changed. A new {@code Table} object is returned.
*
* <p>Example of updating a table's information, specifying particular table field options.
* <pre> {@code
* TableField field1 = TableField.LAST_MODIFIED_TIME;
* TableField field2 = TableField.NUM_ROWS;
* Table updated = table.update(TableOption.fields(field1, field2));
* }</pre>
*
* @param options dataset options
* @return a {@code Table} object with updated information
* @throws BigQueryException upon failure
Expand All @@ -172,6 +193,11 @@ public Table update(TableOption... options) {
/**
* Deletes this table.
*
* <p>Example of deleting a table.
* <pre> {@code
* table.delete();
* }</pre>
*
* @return {@code true} if table was deleted, {@code false} if it was not found
* @throws BigQueryException upon failure
*/
Expand All @@ -182,6 +208,23 @@ public boolean delete() {
/**
* Insert rows into the table.
*
* <p>Example of inserting rows into a table.
* <pre> {@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", "value1");
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @throws BigQueryException upon failure
*/
Expand All @@ -193,6 +236,23 @@ public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows)
/**
* Insert rows into the table.
*
* <p>Example of inserting rows into a table which ignores invalid rows.
* <pre> {@code
* String rowId1 = "rowId1";
* String rowId2 = "rowId2";
* List<RowToInsert> rows = new ArrayList<>();
* Map<String, Object> row1 = new HashMap<>();
* row1.put("stringField", 1);
* row1.put("booleanField", true);
* Map<String, Object> row2 = new HashMap<>();
* row2.put("stringField", "value2");
* row2.put("booleanField", false);
* rows.add(RowToInsert.of(rowId1, row1));
* rows.add(RowToInsert.of(rowId2, row2));
* InsertAllResponse response = table.insert(rows, true, true);
* // do something with response
* }</pre>
*
* @param rows rows to be inserted
* @param skipInvalidRows whether to insert all valid rows, even if invalid rows exist. If not set
* the entire insert operation will fail if rows to be inserted contain an invalid row
Expand All @@ -213,6 +273,12 @@ public InsertAllResponse insert(Iterable<InsertAllRequest.RowToInsert> rows,
/**
* Returns the paginated list rows in this table.
*
* <p>Example of getting a paginated list of rows in a table.
* <pre> {@code
* Page<List<FieldValue>> page = table.list(TableDataListOption.pageSize(100));
* // do something with page
* }</pre>
*
* @param options table data list options
* @throws BigQueryException upon failure
*/
Expand All @@ -225,6 +291,27 @@ public Page<List<FieldValue>> list(TableDataListOption... options)
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example of copying a table to a destination table and dataset referenced by name.
* <pre> {@code
* String datasetName = "my_dataset";
* String tableName = "my_destination_table";
* Job job = table.copy(datasetName, tableName);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
*
* }</pre>
*
* @param destinationDataset the user-defined id of the destination dataset
* @param destinationTable the user-defined id of the destination table
* @param options job options
Expand All @@ -239,6 +326,29 @@ public Job copy(String destinationDataset, String destinationTable, JobOption...
* Starts a BigQuery Job to copy the current table to the provided destination table. Returns the
* started {@link Job} object.
*
* <p>Example copying a table to a destination table referenced by table ID.
* <pre> {@code
* String dataset = "my_dataset";
* String tableName = "copy_destination";
* TableId destinationId = TableId.of(dataset, tableName);
* JobOption options = JobOption.fields(JobField.STATUS, JobField.USER_EMAIL);
*
* Job job = table.copy(destinationId, options);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
* }</pre>
*
* @param destinationTable the destination table of the copy job
* @param options job options
* @throws BigQueryException upon failure
Expand All @@ -253,6 +363,26 @@ public Job copy(TableId destinationTable, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URI. Returns the
* started {@link Job} object.
*
* <p>Example extracting data to single Google Cloud Storage file.
* <pre> {@code
* String format = "CSV";
* String gcsUrl = "gs://myapp.appspot.com/filename.csv";
* Job job = table.extract(format, gcsUrl);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
* }</pre>
*
* @param format the format of the extracted data
* @param destinationUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path)
* where the extracted table should be written
Expand All @@ -268,6 +398,31 @@ public Job extract(String format, String destinationUri, JobOption... options)
* Starts a BigQuery Job to extract the current table to the provided destination URIs. Returns
* the started {@link Job} object.
*
* <p>Example extracting data to a list of Google Cloud Storage files.
* <pre> {@code
* String format = "CSV";
* String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_*.csv";
* String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_*.csv";
* List<String> destinationUris = new ArrayList<>();
* destinationUris.add(gcsUrl1);
* destinationUris.add(gcsUrl2);
*
* Job job = table.extract(format, destinationUris);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
* }</pre>
*
* @param format the format of the exported data
* @param destinationUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path)
* where the extracted table should be written
Expand All @@ -285,6 +440,25 @@ public Job extract(String format, List<String> destinationUris, JobOption... opt
* Starts a BigQuery Job to load data into the current table from the provided source URI. Returns
* the started {@link Job} object.
*
* <p>Example loading data from a single Google Cloud Storage file.
* <pre> {@code
* String sourceUri = "gs://myapp.appspot.com/filename.csv";
* Job job = table.load(FormatOptions.csv(), sourceUri);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
* }</pre>
*
* @param format the format of the data to load
* @param sourceUri the fully-qualified Google Cloud Storage URI (e.g. gs://bucket/path) from
* which to load the data
Expand All @@ -300,6 +474,30 @@ public Job load(FormatOptions format, String sourceUri, JobOption... options)
* Starts a BigQuery Job to load data into the current table from the provided source URIs.
* Returns the started {@link Job} object.
*
* <p>Example loading data from a list of Google Cloud Storage files.
* <pre> {@code
* String gcsUrl1 = "gs://myapp.appspot.com/PartitionA_000000000000.csv";
* String gcsUrl2 = "gs://myapp.appspot.com/PartitionB_000000000000.csv";
* List<String> sourceUris = new ArrayList<>();
* sourceUris.add(gcsUrl1);
* sourceUris.add(gcsUrl2);
*
* Job job = table.load(FormatOptions.csv(), sourceUris);
*
* // Wait for the job to complete.
* try {
* Job completedJob = job.waitFor(WaitForOption.checkEvery(1, TimeUnit.SECONDS),
* WaitForOption.timeout(60, TimeUnit.SECONDS));
* if (completedJob != null && completedJob.status().error() == null) {
* // Job completed successfully.
* } else {
* // Handle error case.
* }
* } catch (InterruptedException | TimeoutException e) {
* // Handle interrupted wait.
* }
* }</pre>
*
* @param format the format of the exported data
* @param sourceUris the fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) from
* which to load the data
Expand Down
Loading