From ec0dc4908311f6753da1807ac698c08fd5a28fbc Mon Sep 17 00:00:00 2001
From: Vadym Matsishevskyi <25311427+vam-google@users.noreply.github.com>
Date: Thu, 10 May 2018 10:54:50 -0700
Subject: [PATCH 0001/3398] Move grpc and proto artifacts to google-cloud-java
from api-client-staging (second part) (#3251)
1) Radically change the structure of the repo. Now the root `pom.xml` file is not deployed to maven and is not a parent pom for any of the other modules, but is simply an aggregator pom, which aggregates other mostly independent of each other modules.
2) Update proto and grpc artifact to the latest generated versions as of time of this PR.
3) Add `cloudiot-v1` and `websecurityscanner-v1alpha` proto and grp artifacts (not released yet even in api-client-staging). Note, gapic clients for this apis are not added yet.
4) Rename `google-cloud-pom` parent artifact (for manual and gapic clients) to `google-cloud-clients`.
5) Move all manual and gapic clients from root directory to `google-cloud-clients` sub directory.
6) Make `google-cloud-bom` not a child of `google-cloud-clients` (former `google-cloud-pom`) anymore, keep it on root level so it becomes a sibling of `google-cloud-clients` (module which used to be its parent).
7) Similarly make `google-cloud-examples`, `google-cloud-testing` and `google-cloud-util` not children of `google-cloud-clients` and keep them on root level. Make these three modules also excluded from maven deployment (they will not be published to maven anymore).
After this PR is done, additional work is required to fix circleci individual IT tests runs (should be trivial). Also deployment and documentation scripts must be modified accordingly (will be done right after pushing this PR).
---
google-cloud-bigquery/README.md | 248 +++
google-cloud-bigquery/pom.xml | 109 ++
.../cloud/bigquery/benchmark/Benchmark.java | 84 +
.../google/cloud/bigquery/benchmark/README.md | 9 +
.../cloud/bigquery/benchmark/queries.json | 10 +
.../java/com/google/cloud/bigquery/Acl.java | 497 ++++++
.../com/google/cloud/bigquery/BigQuery.java | 1226 ++++++++++++++
.../google/cloud/bigquery/BigQueryError.java | 146 ++
.../cloud/bigquery/BigQueryException.java | 110 ++
.../cloud/bigquery/BigQueryFactory.java | 25 +
.../google/cloud/bigquery/BigQueryImpl.java | 741 +++++++++
.../cloud/bigquery/BigQueryOptions.java | 148 ++
.../cloud/bigquery/CopyJobConfiguration.java | 295 ++++
.../com/google/cloud/bigquery/CsvOptions.java | 288 ++++
.../com/google/cloud/bigquery/Dataset.java | 339 ++++
.../com/google/cloud/bigquery/DatasetId.java | 101 ++
.../google/cloud/bigquery/DatasetInfo.java | 570 +++++++
.../bigquery/DatastoreBackupOptions.java | 105 ++
.../bigquery/EncryptionConfiguration.java | 101 ++
.../bigquery/ExternalTableDefinition.java | 387 +++++
.../bigquery/ExtractJobConfiguration.java | 309 ++++
.../java/com/google/cloud/bigquery/Field.java | 310 ++++
.../com/google/cloud/bigquery/FieldList.java | 118 ++
.../com/google/cloud/bigquery/FieldValue.java | 304 ++++
.../google/cloud/bigquery/FieldValueList.java | 135 ++
.../google/cloud/bigquery/FormatOptions.java | 119 ++
.../cloud/bigquery/GoogleSheetsOptions.java | 123 ++
.../cloud/bigquery/InsertAllRequest.java | 498 ++++++
.../cloud/bigquery/InsertAllResponse.java | 125 ++
.../java/com/google/cloud/bigquery/Job.java | 465 ++++++
.../cloud/bigquery/JobConfiguration.java | 146 ++
.../google/cloud/bigquery/JobException.java | 45 +
.../java/com/google/cloud/bigquery/JobId.java | 124 ++
.../com/google/cloud/bigquery/JobInfo.java | 419 +++++
.../google/cloud/bigquery/JobStatistics.java | 651 ++++++++
.../com/google/cloud/bigquery/JobStatus.java | 195 +++
.../com/google/cloud/bigquery/Labels.java | 95 ++
.../cloud/bigquery/LegacySQLTypeName.java | 110 ++
.../cloud/bigquery/LoadConfiguration.java | 218 +++
.../cloud/bigquery/LoadJobConfiguration.java | 518 ++++++
.../com/google/cloud/bigquery/Option.java | 71 +
.../cloud/bigquery/QueryJobConfiguration.java | 873 ++++++++++
.../cloud/bigquery/QueryParameterValue.java | 375 +++++
.../google/cloud/bigquery/QueryResponse.java | 58 +
.../com/google/cloud/bigquery/QueryStage.java | 764 +++++++++
.../com/google/cloud/bigquery/Schema.java | 109 ++
.../cloud/bigquery/StandardSQLTypeName.java | 51 +
.../bigquery/StandardTableDefinition.java | 232 +++
.../java/com/google/cloud/bigquery/Table.java | 593 +++++++
.../cloud/bigquery/TableDataWriteChannel.java | 168 ++
.../cloud/bigquery/TableDefinition.java | 165 ++
.../com/google/cloud/bigquery/TableId.java | 127 ++
.../com/google/cloud/bigquery/TableInfo.java | 475 ++++++
.../google/cloud/bigquery/TableResult.java | 134 ++
.../cloud/bigquery/TimePartitioning.java | 154 ++
.../google/cloud/bigquery/TimelineSample.java | 124 ++
.../cloud/bigquery/UserDefinedFunction.java | 173 ++
.../google/cloud/bigquery/ViewDefinition.java | 216 +++
.../bigquery/WriteChannelConfiguration.java | 444 +++++
.../google/cloud/bigquery/package-info.java | 46 +
.../bigquery/spi/BigQueryRpcFactory.java | 27 +
.../cloud/bigquery/spi/v2/BigQueryRpc.java | 231 +++
.../bigquery/spi/v2/HttpBigQueryRpc.java | 468 ++++++
.../testing/RemoteBigQueryHelper.java | 156 ++
.../cloud/bigquery/testing/package-info.java | 38 +
.../com/google/cloud/bigquery/AclTest.java | 101 ++
.../cloud/bigquery/BigQueryErrorTest.java | 62 +
.../cloud/bigquery/BigQueryExceptionTest.java | 161 ++
.../cloud/bigquery/BigQueryImplTest.java | 1436 +++++++++++++++++
.../cloud/bigquery/BigQueryOptionsTest.java | 35 +
.../bigquery/CopyJobConfigurationTest.java | 141 ++
.../google/cloud/bigquery/CsvOptionsTest.java | 88 +
.../google/cloud/bigquery/DatasetIdTest.java | 60 +
.../cloud/bigquery/DatasetInfoTest.java | 176 ++
.../google/cloud/bigquery/DatasetTest.java | 387 +++++
.../bigquery/DatastoreBackupOptionsTest.java | 63 +
.../bigquery/ExternalTableDefinitionTest.java | 114 ++
.../bigquery/ExtractJobConfigurationTest.java | 146 ++
.../google/cloud/bigquery/FieldListTest.java | 109 ++
.../com/google/cloud/bigquery/FieldTest.java | 104 ++
.../cloud/bigquery/FieldValueListTest.java | 189 +++
.../google/cloud/bigquery/FieldValueTest.java | 124 ++
.../cloud/bigquery/FormatOptionsTest.java | 58 +
.../bigquery/GoogleSheetsOptionsTest.java | 65 +
.../cloud/bigquery/InsertAllRequestTest.java | 238 +++
.../cloud/bigquery/InsertAllResponseTest.java | 79 +
.../com/google/cloud/bigquery/JobIdTest.java | 62 +
.../google/cloud/bigquery/JobInfoTest.java | 380 +++++
.../cloud/bigquery/JobStatisticsTest.java | 252 +++
.../google/cloud/bigquery/JobStatusTest.java | 67 +
.../com/google/cloud/bigquery/JobTest.java | 470 ++++++
.../com/google/cloud/bigquery/LabelsTest.java | 76 +
.../bigquery/LoadJobConfigurationTest.java | 167 ++
.../com/google/cloud/bigquery/OptionTest.java | 65 +
.../bigquery/QueryJobConfigurationTest.java | 170 ++
.../bigquery/QueryParameterValueTest.java | 266 +++
.../google/cloud/bigquery/QueryStageTest.java | 193 +++
.../com/google/cloud/bigquery/SchemaTest.java | 62 +
.../cloud/bigquery/SerializationTest.java | 278 ++++
.../bigquery/StandardTableDefinitionTest.java | 127 ++
.../bigquery/TableDataWriteChannelTest.java | 368 +++++
.../google/cloud/bigquery/TableIdTest.java | 62 +
.../google/cloud/bigquery/TableInfoTest.java | 248 +++
.../cloud/bigquery/TableResultTest.java | 103 ++
.../com/google/cloud/bigquery/TableTest.java | 417 +++++
.../cloud/bigquery/TimePartitioningTest.java | 97 ++
.../cloud/bigquery/TimelineSampleTest.java | 54 +
.../bigquery/UserDefinedFunctionTest.java | 57 +
.../cloud/bigquery/ViewDefinitionTest.java | 109 ++
.../WriteChannelConfigurationTest.java | 177 ++
.../cloud/bigquery/it/ITBigQueryTest.java | 1318 +++++++++++++++
.../bigquery/spi/v2/HttpBigQueryRpcTest.java | 47 +
.../testing/RemoteBigQueryHelperTest.java | 94 ++
113 files changed, 26560 insertions(+)
create mode 100644 google-cloud-bigquery/README.md
create mode 100644 google-cloud-bigquery/pom.xml
create mode 100644 google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java
create mode 100644 google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/README.md
create mode 100644 google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/queries.json
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryError.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryFactory.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatastoreBackupOptions.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EncryptionConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldList.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValueList.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FormatOptions.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/GoogleSheetsOptions.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllRequest.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/InsertAllResponse.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Job.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobException.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobId.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobInfo.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatistics.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/JobStatus.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Labels.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LegacySQLTypeName.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/LoadJobConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Option.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryJobConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryParameterValue.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryResponse.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/QueryStage.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Schema.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardSQLTypeName.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/StandardTableDefinition.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Table.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDataWriteChannel.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableDefinition.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableId.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableInfo.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TableResult.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TimePartitioning.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/TimelineSample.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/UserDefinedFunction.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ViewDefinition.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/WriteChannelConfiguration.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/package-info.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/BigQueryRpcFactory.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/BigQueryRpc.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpc.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelper.java
create mode 100644 google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/testing/package-info.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/AclTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryErrorTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryExceptionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryImplTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/BigQueryOptionsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CopyJobConfigurationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/CsvOptionsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetIdTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetInfoTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatasetTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/DatastoreBackupOptionsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExternalTableDefinitionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ExtractJobConfigurationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldListTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueListTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FieldValueTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/FormatOptionsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/GoogleSheetsOptionsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllRequestTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/InsertAllResponseTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobIdTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobInfoTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatisticsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobStatusTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/JobTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LabelsTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/LoadJobConfigurationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/OptionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryJobConfigurationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryParameterValueTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/QueryStageTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SchemaTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/SerializationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/StandardTableDefinitionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableDataWriteChannelTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableIdTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableInfoTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableResultTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TableTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimePartitioningTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/TimelineSampleTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/UserDefinedFunctionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/ViewDefinitionTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/WriteChannelConfigurationTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/it/ITBigQueryTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/spi/v2/HttpBigQueryRpcTest.java
create mode 100644 google-cloud-bigquery/src/test/java/com/google/cloud/bigquery/testing/RemoteBigQueryHelperTest.java
diff --git a/google-cloud-bigquery/README.md b/google-cloud-bigquery/README.md
new file mode 100644
index 000000000000..075ea091617a
--- /dev/null
+++ b/google-cloud-bigquery/README.md
@@ -0,0 +1,248 @@
+Google Cloud Java Client for BigQuery
+====================================
+
+Java idiomatic client for [Google Cloud BigQuery][cloud-bigquery].
+
+[](https://circleci.com/gh/GoogleCloudPlatform/google-cloud-java/tree/master)
+[](https://coveralls.io/r/GoogleCloudPlatform/google-cloud-java?branch=master)
+[]( https://img.shields.io/maven-central/v/com.google.cloud/google-cloud-bigquery.svg)
+[](https://www.codacy.com/app/mziccard/google-cloud-java)
+[](https://www.versioneye.com/user/projects/58fe4c8d6ac171426c414772)
+
+- [Product Documentation][bigquery-product-docs]
+- [Client Library Documentation][bigquery-client-lib-docs]
+
+Quickstart
+----------
+[//]: # ({x-version-update-start:google-cloud-bigquery:released})
+If you are using Maven, add this to your pom.xml file
+```xml
+
+ com.google.cloud
+ google-cloud-bigquery
+ 1.29.0
+
+```
+If you are using Gradle, add this to your dependencies
+```Groovy
+compile 'com.google.cloud:google-cloud-bigquery:1.29.0'
+```
+If you are using SBT, add this to your dependencies
+```Scala
+libraryDependencies += "com.google.cloud" % "google-cloud-bigquery" % "1.29.0"
+```
+[//]: # ({x-version-update-end})
+
+Example Application
+-------------------
+- [`BigQueryExample`](../google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/BigQueryExample.java) - A simple command line interface providing some of Cloud BigQuery's functionality.
+Read more about using this application on the [`BigQueryExample` docs page](https://googlecloudplatform.github.io/google-cloud-java/apidocs/?com/google/cloud/examples/bigquery/BigQueryExample.html).
+
+Authentication
+--------------
+
+See the [Authentication](https://github.com/GoogleCloudPlatform/google-cloud-java#authentication) section in the base directory's README.
+
+About Google Cloud BigQuery
+--------------------------
+
+[Google Cloud BigQuery][cloud-bigquery] is a fully managed, NoOps, low cost data analytics service.
+Data can be streamed into BigQuery at millions of rows per second to enable real-time analysis.
+With BigQuery you can easily deploy Petabyte-scale Databases.
+
+Be sure to activate the Google Cloud BigQuery API on the Developer's Console to use BigQuery from
+your project.
+
+See the [BigQuery client library docs][bigquery-client-lib-docs] to learn how to interact
+with Google Cloud BigQuery using this Client Library.
+
+Getting Started
+---------------
+#### Prerequisites
+For this tutorial, you will need a
+[Google Developers Console](https://console.developers.google.com/) project with the BigQuery API
+enabled. You will need to [enable billing](https://support.google.com/cloud/answer/6158867?hl=en) to
+use Google Cloud BigQuery.
+[Follow these instructions](https://cloud.google.com/docs/authentication#preparation) to get your
+project set up. You will also need to set up the local development environment by [installing the
+Google Cloud SDK](https://cloud.google.com/sdk/) and running the following commands in command line:
+`gcloud auth login` and `gcloud config set project [YOUR PROJECT ID]`.
+
+#### Installation and setup
+You'll need to obtain the `google-cloud-bigquery` library. See the [Quickstart](#quickstart) section
+to add `google-cloud-bigquery` as a dependency in your code.
+
+#### Creating an authorized service object
+To make authenticated requests to Google Cloud BigQuery, you must create a service object with
+credentials. You can then make API calls by calling methods on the BigQuery service object. The
+simplest way to authenticate is to use
+[Application Default Credentials](https://developers.google.com/identity/protocols/application-default-credentials).
+These credentials are automatically inferred from your environment, so you only need the following
+code to create your service object:
+
+```java
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQueryOptions;
+
+BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+```
+
+For other authentication options, see the
+[Authentication](https://github.com/GoogleCloudPlatform/google-cloud-java#authentication) page.
+
+#### Creating a dataset
+With BigQuery you can create datasets. A dataset is a grouping mechanism that holds zero or more
+tables. Add the following import at the top of your file:
+
+```java
+import com.google.cloud.bigquery.DatasetInfo;
+```
+Then, to create the dataset, use the following code:
+
+```java
+// Create a dataset
+String datasetId = "my_dataset_id";
+bigquery.create(DatasetInfo.newBuilder(datasetId).build());
+```
+
+#### Creating a table
+With BigQuery you can create different types of tables: normal tables with an associated schema,
+external tables backed by data stored on [Google Cloud Storage][cloud-storage] and view tables that
+are created from a BigQuery SQL query. In this code snippet we show how to create a normal table
+with only one string field. Add the following imports at the top of your file:
+
+```java
+import com.google.cloud.bigquery.Field;
+import com.google.cloud.bigquery.Schema;
+import com.google.cloud.bigquery.StandardTableDefinition;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableId;
+import com.google.cloud.bigquery.TableInfo;
+```
+Then add the following code to create the table:
+
+```java
+TableId tableId = TableId.of(datasetId, "my_table_id");
+// Table field definition
+Field stringField = Field.of("StringField", LegacySQLTypeName.STRING);
+// Table schema definition
+Schema schema = Schema.of(stringField);
+// Create a table
+StandardTableDefinition tableDefinition = StandardTableDefinition.of(schema);
+Table createdTable = bigquery.create(TableInfo.of(tableId, tableDefinition));
+```
+
+#### Loading data into a table
+BigQuery provides several ways to load data into a table: streaming rows or loading data from a
+Google Cloud Storage file. In this code snippet we show how to stream rows into a table.
+Add the following imports at the top of your file:
+
+```java
+import com.google.cloud.bigquery.InsertAllRequest;
+import com.google.cloud.bigquery.InsertAllResponse;
+
+import java.util.HashMap;
+import java.util.Map;
+```
+Then add the following code to insert data:
+
+```java
+Map firstRow = new HashMap<>();
+Map secondRow = new HashMap<>();
+firstRow.put("StringField", "value1");
+secondRow.put("StringField", "value2");
+// Create an insert request
+InsertAllRequest insertRequest = InsertAllRequest.newBuilder(tableId)
+ .addRow(firstRow)
+ .addRow(secondRow)
+ .build();
+// Insert rows
+InsertAllResponse insertResponse = bigquery.insertAll(insertRequest);
+// Check if errors occurred
+if (insertResponse.hasErrors()) {
+ System.out.println("Errors occurred while inserting rows");
+}
+```
+
+#### Querying data
+BigQuery enables querying data by running queries and waiting for the result. Queries can be run
+directly or through a Query Job. In this code snippet we show how to run a query directly and wait
+for the result. Add the following imports at the top of your file:
+
+```java
+import com.google.cloud.bigquery.FieldValueList;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+```
+Then add the following code to run the query and wait for the result:
+
+```java
+// Create a query request
+QueryJobConfiguration queryConfig =
+ QueryJobConfiguration.newBuilder("SELECT my_column FROM my_dataset_id.my_table_id").build();
+// Read rows
+System.out.println("Table rows:");
+for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+ System.out.println(row);
+}
+```
+#### Complete source code
+
+In
+[InsertDataAndQueryTable.java](../google-cloud-examples/src/main/java/com/google/cloud/examples/bigquery/snippets/InsertDataAndQueryTable.java)
+we put together all the code shown above into one program. The program assumes that you are
+running on Compute Engine or from your own desktop. To run the example on App Engine, simply move
+the code from the main method to your application's servlet class and change the print statements to
+display on your webpage.
+
+Troubleshooting
+---------------
+
+To get help, follow the instructions in the [shared Troubleshooting document](https://github.com/GoogleCloudPlatform/gcloud-common/blob/master/troubleshooting/readme.md#troubleshooting).
+
+Transport
+---------
+BigQuery uses HTTP for the transport layer.
+
+Java Versions
+-------------
+
+Java 7 or above is required for using this client.
+
+Testing
+-------
+
+This library has tools to help make tests for code using Cloud BigQuery.
+
+See [TESTING] to read more about testing.
+
+Versioning
+----------
+
+This library follows [Semantic Versioning](http://semver.org/).
+
+It is currently in major version zero (``0.y.z``), which means that anything
+may change at any time and the public API should not be considered
+stable.
+
+Contributing
+------------
+
+Contributions to this library are always welcome and highly encouraged.
+
+See [CONTRIBUTING] for more information on how to get started.
+
+License
+-------
+
+Apache 2.0 - See [LICENSE] for more information.
+
+
+[CONTRIBUTING]:https://github.com/GoogleCloudPlatform/google-cloud-java/blob/master/CONTRIBUTING.md
+[LICENSE]: https://github.com/GoogleCloudPlatform/google-cloud-java/blob/master/LICENSE
+[TESTING]: https://github.com/GoogleCloudPlatform/google-cloud-java/blob/master/TESTING.md#testing-code-that-uses-bigquery
+[cloud-platform]: https://cloud.google.com/
+
+[cloud-bigquery]: https://cloud.google.com/bigquery/
+[cloud-storage]: https://cloud.google.com/storage/
+[bigquery-product-docs]: https://cloud.google.com/bigquery/docs/
+[bigquery-client-lib-docs]: https://googlecloudplatform.github.io/google-cloud-java/latest/apidocs/index.html?com/google/cloud/bigquery/package-summary.html
diff --git a/google-cloud-bigquery/pom.xml b/google-cloud-bigquery/pom.xml
new file mode 100644
index 000000000000..1e5dfc09434b
--- /dev/null
+++ b/google-cloud-bigquery/pom.xml
@@ -0,0 +1,109 @@
+
+
+ 4.0.0
+ google-cloud-bigquery
+ 1.29.1-SNAPSHOT
+ jar
+ Google Cloud BigQuery
+ https://github.com/GoogleCloudPlatform/google-cloud-java/tree/master/google-cloud-bigquery
+
+ Java idiomatic client for Google Cloud BigQuery.
+
+
+ com.google.cloud
+ google-cloud-clients
+ 0.47.1-alpha-SNAPSHOT
+
+
+ google-cloud-bigquery
+
+
+
+ ${project.groupId}
+ google-cloud-core
+
+
+ ${project.groupId}
+ google-cloud-core-http
+
+
+ com.google.auto.value
+ auto-value
+ compile
+
+
+ ${project.groupId}
+ google-cloud-storage
+ test
+
+
+ com.google.apis
+ google-api-services-bigquery
+ compile
+
+
+ com.google.guava
+ guava-jdk5
+
+
+
+
+ ${project.groupId}
+ google-cloud-core
+ test-jar
+ test
+
+
+ junit
+ junit
+ test
+
+
+ com.google.truth
+ truth
+ test
+
+
+ org.easymock
+ easymock
+ test
+
+
+ org.objenesis
+ objenesis
+ test
+
+
+
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+
+ false
+ false
+
+
+
+ org.codehaus.mojo
+ build-helper-maven-plugin
+ 1.7
+
+
+ add-source
+ generate-sources
+
+ add-source
+
+
+
+ src/benchmark/java
+
+
+
+
+
+
+
+
diff --git a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java
new file mode 100644
index 000000000000..ae3e586303c9
--- /dev/null
+++ b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/Benchmark.java
@@ -0,0 +1,84 @@
+/*
+ * Copyright 2017 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery.benchmark;
+
+import com.google.api.client.json.jackson.JacksonFactory;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.BigQueryOptions;
+import com.google.cloud.bigquery.FieldValue;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+import com.google.cloud.bigquery.TableResult;
+import java.io.FileInputStream;
+import java.util.List;
+import org.threeten.bp.Clock;
+import org.threeten.bp.Duration;
+import org.threeten.bp.Instant;
+
+public class Benchmark {
+
+ private static final double NS_PER_SECOND = 1000 * 1000 * 1000;
+
+ private Benchmark() {}
+
+ public static void main(String[] args) throws Exception {
+ if (args.length < 1) {
+ System.out.println("need path to queries.json");
+ return;
+ }
+ String[] requests =
+ new JacksonFactory()
+ .createJsonParser(new FileInputStream(args[0]))
+ .parseAndClose(String[].class);
+
+ Clock clock = Clock.systemUTC();
+ BigQuery bq = BigQueryOptions.getDefaultInstance().getService();
+
+ for (String request : requests) {
+ if (request.isEmpty()) {
+ continue;
+ }
+
+ Instant start = clock.instant();
+ TableResult result =
+ bq.query(QueryJobConfiguration.newBuilder(request).setUseLegacySql(false).build());
+
+ int rows = 0;
+ int cols = 0;
+ Duration firstByte = null;
+ for (List row : result.iterateAll()) {
+ rows++;
+ if (cols == 0) {
+ cols = row.size();
+ firstByte = Duration.between(start, clock.instant());
+ } else if (cols != row.size()) {
+ throw new IllegalStateException(
+ String.format("expected %d cols, found %d", cols, row.size()));
+ }
+ }
+ Duration total = Duration.between(start, clock.instant());
+
+ assert firstByte != null;
+ double firstByteSec = (double) (firstByte.getNano()) / NS_PER_SECOND + firstByte.getSeconds();
+ double totalSec = (double) (total.getNano()) / NS_PER_SECOND + total.getSeconds();
+
+ System.out.println(
+ String.format(
+ "query \"%s\": read %d rows, %d cols, first byte %f sec, total %f sec",
+ request, rows, cols, firstByteSec, totalSec));
+ }
+ }
+}
diff --git a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/README.md b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/README.md
new file mode 100644
index 000000000000..a041db38a072
--- /dev/null
+++ b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/README.md
@@ -0,0 +1,9 @@
+# BigQuery Benchmark
+This directory contains benchmarks for BigQuery client.
+
+## Usage
+From the `google-cloud-bigquery` directory, run
+`mvn compile exec:java -Dexec.mainClass=com.google.cloud.bigquery.benchmark.Benchmark -Dexec.args="src/benchmark/java/com/google/cloud/bigquery/benchmark/queries.json"`
+
+BigQuery service caches requests so the benchmark should be run
+at least twice, disregarding the first result.
diff --git a/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/queries.json b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/queries.json
new file mode 100644
index 000000000000..13fed38b52b3
--- /dev/null
+++ b/google-cloud-bigquery/src/benchmark/java/com/google/cloud/bigquery/benchmark/queries.json
@@ -0,0 +1,10 @@
+[
+ "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 10000",
+ "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 100000",
+ "SELECT * FROM `nyc-tlc.yellow.trips` LIMIT 1000000",
+ "SELECT title FROM `bigquery-public-data.samples.wikipedia` ORDER BY title LIMIT 1000",
+ "SELECT title, id, timestamp, contributor_ip FROM `bigquery-public-data.samples.wikipedia` WHERE title like 'Blo%' ORDER BY id",
+ "SELECT * FROM `bigquery-public-data.baseball.games_post_wide` ORDER BY gameId",
+ "SELECT * FROM `bigquery-public-data.samples.github_nested` WHERE repository.has_downloads ORDER BY repository.created_at LIMIT 10000",
+ "SELECT repo_name, path FROM `bigquery-public-data.github_repos.files` WHERE path LIKE '%.java' ORDER BY id LIMIT 1000000"
+]
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java
new file mode 100644
index 000000000000..df0ae0a532db
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Acl.java
@@ -0,0 +1,497 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.core.ApiFunction;
+import com.google.api.services.bigquery.model.Dataset.Access;
+import com.google.cloud.StringEnumType;
+import com.google.cloud.StringEnumValue;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * Access Control for a BigQuery Dataset. BigQuery uses ACLs to manage permissions on datasets. ACLs
+ * are not directly supported on tables. A table inherits its ACL from the dataset that contains it.
+ * Project roles affect your ability to run jobs or manage the project, while dataset roles affect
+ * how you can access or modify the data inside of a project.
+ *
+ * @see Access Control
+ */
+public final class Acl implements Serializable {
+
+ private static final long serialVersionUID = 8357269726277191556L;
+
+ private final Entity entity;
+ private final Role role;
+
+ /**
+ * Dataset roles supported by BigQuery.
+ *
+ * @see Dataset Roles
+ */
+ public static final class Role extends StringEnumValue {
+ private static final long serialVersionUID = -1992679397135956912L;
+
+ private static final ApiFunction CONSTRUCTOR =
+ new ApiFunction() {
+ @Override
+ public Role apply(String constant) {
+ return new Role(constant);
+ }
+ };
+
+ private static final StringEnumType type = new StringEnumType(
+ Role.class,
+ CONSTRUCTOR);
+
+ /**
+ * Can read, query, copy or export tables in the dataset.
+ */
+ public static final Role READER = type.createAndRegister("READER");
+
+ /**
+ * Same as {@link #READER} plus can edit or append data in the dataset.
+ */
+ public static final Role WRITER = type.createAndRegister("WRITER");
+
+ /**
+ * Same as {@link #WRITER} plus can update and delete the dataset.
+ */
+ public static final Role OWNER = type.createAndRegister("OWNER");
+
+ private Role(String constant) {
+ super(constant);
+ }
+
+ /**
+ * Get the Role for the given String constant, and throw an exception if the constant is
+ * not recognized.
+ */
+ public static Role valueOfStrict(String constant) {
+ return type.valueOfStrict(constant);
+ }
+
+ /**
+ * Get the Role for the given String constant, and allow unrecognized values.
+ */
+ public static Role valueOf(String constant) {
+ return type.valueOf(constant);
+ }
+
+ /**
+ * Return the known values for Role.
+ */
+ public static Role[] values() {
+ return type.values();
+ }
+ }
+
+ /**
+ * Base class for BigQuery entities that can be grant access to the dataset.
+ */
+ public abstract static class Entity implements Serializable {
+
+ private static final long serialVersionUID = 8111776788607959944L;
+
+ private final Type type;
+
+ /**
+ * Types of BigQuery entities.
+ */
+ public enum Type {
+ DOMAIN, GROUP, USER, VIEW
+ }
+
+ Entity(Type type) {
+ this.type = type;
+ }
+
+
+ public Type getType() {
+ return type;
+ }
+
+ abstract Access toPb();
+
+ static Entity fromPb(Access access) {
+ if (access.getDomain() != null) {
+ return new Domain(access.getDomain());
+ }
+ if (access.getGroupByEmail() != null) {
+ return new Group(access.getGroupByEmail());
+ }
+ if (access.getSpecialGroup() != null) {
+ return new Group(access.getSpecialGroup());
+ }
+ if (access.getUserByEmail() != null) {
+ return new User(access.getUserByEmail());
+ }
+ if (access.getView() != null) {
+ return new View(TableId.fromPb(access.getView()));
+ }
+ // Unreachable
+ throw new BigQueryException(BigQueryException.UNKNOWN_CODE,
+ "Unrecognized access configuration");
+ }
+ }
+
+ /**
+ * Class for a BigQuery Domain entity. Objects of this class represent a domain to grant access
+ * to. Any users signed in with the domain specified will be granted the specified access.
+ */
+ public static final class Domain extends Entity {
+
+ private static final long serialVersionUID = -3033025857280447253L;
+
+ private final String domain;
+
+ /**
+ * Creates a Domain entity given the domain name.
+ */
+ public Domain(String domain) {
+ super(Type.DOMAIN);
+ this.domain = domain;
+ }
+
+
+ /**
+ * Returns the domain name.
+ */
+ public String getDomain() {
+ return domain;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ Domain domainEntity = (Domain) obj;
+ return Objects.equals(getType(), domainEntity.getType())
+ && Objects.equals(domain, domainEntity.getDomain());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), domain);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ @Override
+ Access toPb() {
+ return new Access().setDomain(domain);
+ }
+ }
+
+ /**
+ * Class for a BigQuery Group entity. Objects of this class represent a group to granted access
+ * to. A Group entity can be created given the group's email or can be a special group:
+ * {@link #ofProjectOwners()}, {@link #ofProjectReaders()}, {@link #ofProjectWriters()} or
+ * {@link #ofAllAuthenticatedUsers()}.
+ */
+ public static final class Group extends Entity {
+
+ private static final String PROJECT_OWNERS = "projectOwners";
+ private static final String PROJECT_READERS = "projectReaders";
+ private static final String PROJECT_WRITERS = "projectWriters";
+ private static final String ALL_AUTHENTICATED_USERS = "allAuthenticatedUsers";
+ private static final long serialVersionUID = 5146829352398103029L;
+
+ private final String identifier;
+
+ /**
+ * Creates a Group entity given its identifier. Identifier can be either a
+ *
+ * special group identifier or a group email.
+ */
+ public Group(String identifier) {
+ super(Type.GROUP);
+ this.identifier = identifier;
+ }
+
+
+ /**
+ * Returns group's identifier, can be either a
+ *
+ * special group identifier or a group email.
+ */
+ public String getIdentifier() {
+ return identifier;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ Group group = (Group) obj;
+ return Objects.equals(getType(), group.getType())
+ && Objects.equals(identifier, group.identifier);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), identifier);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ @Override
+ Access toPb() {
+ switch (identifier) {
+ case PROJECT_OWNERS:
+ return new Access().setSpecialGroup(PROJECT_OWNERS);
+ case PROJECT_READERS:
+ return new Access().setSpecialGroup(PROJECT_READERS);
+ case PROJECT_WRITERS:
+ return new Access().setSpecialGroup(PROJECT_WRITERS);
+ case ALL_AUTHENTICATED_USERS:
+ return new Access().setSpecialGroup(ALL_AUTHENTICATED_USERS);
+ default:
+ return new Access().setGroupByEmail(identifier);
+ }
+ }
+
+ /**
+ * Returns a Group entity representing all project's owners.
+ */
+ public static Group ofProjectOwners() {
+ return new Group(PROJECT_OWNERS);
+ }
+
+ /**
+ * Returns a Group entity representing all project's readers.
+ */
+ public static Group ofProjectReaders() {
+ return new Group(PROJECT_READERS);
+ }
+
+ /**
+ * Returns a Group entity representing all project's writers.
+ */
+ public static Group ofProjectWriters() {
+ return new Group(PROJECT_WRITERS);
+ }
+
+ /**
+ * Returns a Group entity representing all BigQuery authenticated users.
+ */
+ public static Group ofAllAuthenticatedUsers() {
+ return new Group(ALL_AUTHENTICATED_USERS);
+ }
+ }
+
+ /**
+ * Class for a BigQuery User entity. Objects of this class represent a user to grant access to
+ * given the email address.
+ */
+ public static final class User extends Entity {
+
+ private static final long serialVersionUID = -4942821351073996141L;
+
+ private final String email;
+
+ /**
+ * Creates a User entity given the user's email.
+ */
+ public User(String email) {
+ super(Type.USER);
+ this.email = email;
+ }
+
+
+ /**
+ * Returns user's email.
+ */
+ public String getEmail() {
+ return email;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ User user = (User) obj;
+ return Objects.equals(getType(), user.getType()) && Objects.equals(email, user.email);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), email);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ @Override
+ Access toPb() {
+ return new Access().setUserByEmail(email);
+ }
+ }
+
+ /**
+ * Class for a BigQuery View entity. Objects of this class represent a view from a different
+ * dataset to grant access to. Queries executed against that view will have read access to tables
+ * in this dataset. The role field is not required when this field is set. If that view is updated
+ * by any user, access to the view needs to be granted again via an update operation.
+ */
+ public static final class View extends Entity {
+
+ private static final long serialVersionUID = -6851072781269419383L;
+
+ private final TableId id;
+
+ /**
+ * Creates a View entity given the view's id.
+ */
+ public View(TableId id) {
+ super(Type.VIEW);
+ this.id = id;
+ }
+
+
+ /**
+ * Returns table's identity.
+ */
+ public TableId getId() {
+ return id;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ View view = (View) obj;
+ return Objects.equals(getType(), view.getType()) && Objects.equals(id, view.id);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), id);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ @Override
+ Access toPb() {
+ return new Access().setView(id.toPb());
+ }
+ }
+
+ private Acl(Entity entity, Role role) {
+ this.entity = checkNotNull(entity);
+ this.role = role;
+ }
+
+
+ /**
+ * Returns the entity for this ACL.
+ */
+ public Entity getEntity() {
+ return entity;
+ }
+
+
+ /**
+ * Returns the role specified by this ACL.
+ */
+ public Role getRole() {
+ return role;
+ }
+
+ /**
+ * Returns an Acl object.
+ *
+ * @param entity the entity for this ACL object
+ * @param role the role to associate to the {@code entity} object
+ */
+ public static Acl of(Entity entity, Role role) {
+ return new Acl(entity, role);
+ }
+
+ /**
+ * Returns an Acl object for a view entity.
+ */
+ public static Acl of(View view) {
+ return new Acl(view, null);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(entity, role);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+ if (obj == null || getClass() != obj.getClass()) {
+ return false;
+ }
+ final Acl other = (Acl) obj;
+ return Objects.equals(this.entity, other.entity)
+ && Objects.equals(this.role, other.role);
+ }
+
+ Access toPb() {
+ Access accessPb = entity.toPb();
+ if (role != null) {
+ accessPb.setRole(role.name());
+ }
+ return accessPb;
+ }
+
+ static Acl fromPb(Access access) {
+ return Acl.of(Entity.fromPb(access),
+ access.getRole() != null ? Role.valueOf(access.getRole()) : null);
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java
new file mode 100644
index 000000000000..bab3d3370518
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQuery.java
@@ -0,0 +1,1226 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.api.core.InternalApi;
+import com.google.api.gax.paging.Page;
+import com.google.cloud.FieldSelector;
+import com.google.cloud.FieldSelector.Helper;
+import com.google.cloud.RetryOption;
+import com.google.cloud.Service;
+import com.google.cloud.bigquery.spi.v2.BigQueryRpc;
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * An interface for Google Cloud BigQuery.
+ *
+ * @see Google Cloud BigQuery
+ */
+public interface BigQuery extends Service {
+
+ /**
+ * Fields of a BigQuery Dataset resource.
+ *
+ * @see Dataset
+ * Resource
+ */
+ enum DatasetField implements FieldSelector {
+ ACCESS("access"),
+ CREATION_TIME("creationTime"),
+ DATASET_REFERENCE("datasetReference"),
+ DEFAULT_TABLE_EXPIRATION_MS("defaultTableExpirationMsS"),
+ DESCRIPTION("description"),
+ ETAG("etag"),
+ FRIENDLY_NAME("friendlyName"),
+ ID("id"),
+ LABELS("labels"),
+ LAST_MODIFIED_TIME("lastModifiedTime"),
+ LOCATION("location"),
+ SELF_LINK("selfLink");
+
+ static final List extends FieldSelector> REQUIRED_FIELDS =
+ ImmutableList.of(DATASET_REFERENCE);
+
+ private final String selector;
+
+ DatasetField(String selector) {
+ this.selector = selector;
+ }
+
+
+ @Override
+ public String getSelector() {
+ return selector;
+ }
+ }
+
+ /**
+ * Fields of a BigQuery Table resource.
+ *
+ * @see Table
+ * Resource
+ */
+ enum TableField implements FieldSelector {
+ CREATION_TIME("creationTime"),
+ DESCRIPTION("description"),
+ ETAG("etag"),
+ EXPIRATION_TIME("expirationTime"),
+ EXTERNAL_DATA_CONFIGURATION("externalDataConfiguration"),
+ FRIENDLY_NAME("friendlyName"),
+ ID("id"),
+ LAST_MODIFIED_TIME("lastModifiedTime"),
+ LOCATION("location"),
+ NUM_BYTES("numBytes"),
+ NUM_ROWS("numRows"),
+ SCHEMA("schema"),
+ SELF_LINK("selfLink"),
+ STREAMING_BUFFER("streamingBuffer"),
+ TABLE_REFERENCE("tableReference"),
+ TIME_PARTITIONING("timePartitioning"),
+ TYPE("type"),
+ VIEW("view");
+
+ static final List extends FieldSelector> REQUIRED_FIELDS =
+ ImmutableList.of(TABLE_REFERENCE, TYPE);
+
+ private final String selector;
+
+ TableField(String selector) {
+ this.selector = selector;
+ }
+
+
+ @Override
+ public String getSelector() {
+ return selector;
+ }
+ }
+
+ /**
+ * Fields of a BigQuery Job resource.
+ *
+ * @see Job Resource
+ *
+ */
+ enum JobField implements FieldSelector {
+ CONFIGURATION("configuration"),
+ ETAG("etag"),
+ ID("id"),
+ JOB_REFERENCE("jobReference"),
+ SELF_LINK("selfLink"),
+ STATISTICS("statistics"),
+ STATUS("status"),
+ USER_EMAIL("user_email");
+
+ static final List extends FieldSelector> REQUIRED_FIELDS =
+ ImmutableList.of(JOB_REFERENCE, CONFIGURATION);
+
+ private final String selector;
+
+ JobField(String selector) {
+ this.selector = selector;
+ }
+
+
+ @Override
+ public String getSelector() {
+ return selector;
+ }
+ }
+
+ /**
+ * Class for specifying dataset list options.
+ */
+ class DatasetListOption extends Option {
+
+ private static final long serialVersionUID = 8660294969063340498L;
+
+ private DatasetListOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the maximum number of datasets returned per page.
+ */
+ public static DatasetListOption pageSize(long pageSize) {
+ return new DatasetListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize);
+ }
+
+ /**
+ * Returns an option to specify the page token from which to start listing datasets.
+ */
+ public static DatasetListOption pageToken(String pageToken) {
+ return new DatasetListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken);
+ }
+
+ /**
+ * Returns an options to list all datasets, even hidden ones.
+ */
+ public static DatasetListOption all() {
+ return new DatasetListOption(BigQueryRpc.Option.ALL_DATASETS, true);
+ }
+ }
+
+ /**
+ * Class for specifying dataset get, create and update options.
+ */
+ class DatasetOption extends Option {
+
+ private static final long serialVersionUID = 1674133909259913250L;
+
+ private DatasetOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the dataset's fields to be returned by the RPC call. If this
+ * option is not provided all dataset's fields are returned. {@code DatasetOption.fields} can
+ * be used to specify only the fields of interest. {@link Dataset#getDatasetId()} is always
+ * returned, even if not specified.
+ */
+ public static DatasetOption fields(DatasetField... fields) {
+ return new DatasetOption(BigQueryRpc.Option.FIELDS,
+ Helper.selector(DatasetField.REQUIRED_FIELDS, fields));
+ }
+ }
+
+ /**
+ * Class for specifying dataset delete options.
+ */
+ class DatasetDeleteOption extends Option {
+
+ private static final long serialVersionUID = -7166083569900951337L;
+
+ private DatasetDeleteOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to delete a dataset even if non-empty. If not provided, attempting to
+ * delete a non-empty dataset will result in a {@link BigQueryException} being thrown.
+ */
+ public static DatasetDeleteOption deleteContents() {
+ return new DatasetDeleteOption(BigQueryRpc.Option.DELETE_CONTENTS, true);
+ }
+ }
+
+ /**
+ * Class for specifying table list options.
+ */
+ class TableListOption extends Option {
+
+ private static final long serialVersionUID = 8660294969063340498L;
+
+ private TableListOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the maximum number of tables returned per page.
+ */
+ public static TableListOption pageSize(long pageSize) {
+ checkArgument(pageSize >= 0);
+ return new TableListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize);
+ }
+
+ /**
+ * Returns an option to specify the page token from which to start listing tables.
+ */
+ public static TableListOption pageToken(String pageToken) {
+ return new TableListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken);
+ }
+ }
+
+ /**
+ * Class for specifying table get, create and update options.
+ */
+ class TableOption extends Option {
+
+ private static final long serialVersionUID = -1723870134095936772L;
+
+ private TableOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the table's fields to be returned by the RPC call. If this
+ * option is not provided all table's fields are returned. {@code TableOption.fields} can be
+ * used to specify only the fields of interest. {@link Table#getTableId()} and type (which is part
+ * of {@link Table#getDefinition()}) are always returned, even if not specified.
+ */
+ public static TableOption fields(TableField... fields) {
+ return new TableOption(BigQueryRpc.Option.FIELDS,
+ Helper.selector(TableField.REQUIRED_FIELDS, fields));
+ }
+ }
+
+ /**
+ * Class for specifying table data list options.
+ */
+ class TableDataListOption extends Option {
+
+ private static final long serialVersionUID = 8488823381738864434L;
+
+ private TableDataListOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the maximum number of rows returned per page.
+ */
+ public static TableDataListOption pageSize(long pageSize) {
+ checkArgument(pageSize >= 0);
+ return new TableDataListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize);
+ }
+
+ /**
+ * Returns an option to specify the page token from which to start listing table data.
+ */
+ public static TableDataListOption pageToken(String pageToken) {
+ return new TableDataListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken);
+ }
+
+ /**
+ * Returns an option that sets the zero-based index of the row from which to start listing table
+ * data.
+ */
+ public static TableDataListOption startIndex(long index) {
+ checkArgument(index >= 0);
+ return new TableDataListOption(BigQueryRpc.Option.START_INDEX, index);
+ }
+ }
+
+ /**
+ * Class for specifying job list options.
+ */
+ class JobListOption extends Option {
+
+ private static final long serialVersionUID = -8207122131226481423L;
+
+ private JobListOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to list all jobs, even the ones issued by other users.
+ */
+ public static JobListOption allUsers() {
+ return new JobListOption(BigQueryRpc.Option.ALL_USERS, true);
+ }
+
+ /**
+ * Returns an option to list only jobs that match the provided state filters.
+ */
+ public static JobListOption stateFilter(JobStatus.State... stateFilters) {
+ List stringFilters = Lists.transform(ImmutableList.copyOf(stateFilters),
+ new Function() {
+ @Override
+ public String apply(JobStatus.State state) {
+ return state.name().toLowerCase();
+ }
+ });
+ return new JobListOption(BigQueryRpc.Option.STATE_FILTER, stringFilters);
+ }
+
+ /**
+ * Returns an option to specify the maximum number of jobs returned per page.
+ */
+ public static JobListOption pageSize(long pageSize) {
+ checkArgument(pageSize >= 0);
+ return new JobListOption(BigQueryRpc.Option.MAX_RESULTS, pageSize);
+ }
+
+ /**
+ * Returns an option to specify the page token from which to start listing jobs.
+ */
+ public static JobListOption pageToken(String pageToken) {
+ return new JobListOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken);
+ }
+
+ /**
+ * Returns an option to specify the job's fields to be returned by the RPC call. If this option
+ * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to
+ * specify only the fields of interest. {@link Job#getJobId()}, {@link JobStatus#getState()},
+ * {@link JobStatus#getError()} as well as type-specific configuration (e.g.
+ * {@link QueryJobConfiguration#getQuery()} for Query Jobs) are always returned, even if not
+ * specified. {@link JobField#SELF_LINK} and {@link JobField#ETAG} can not be selected when
+ * listing jobs.
+ */
+ public static JobListOption fields(JobField... fields) {
+ return new JobListOption(BigQueryRpc.Option.FIELDS,
+ Helper.listSelector("jobs", JobField.REQUIRED_FIELDS, fields, "state", "errorResult"));
+ }
+ }
+
+ /**
+ * Class for specifying table get and create options.
+ */
+ class JobOption extends Option {
+
+ private static final long serialVersionUID = -3111736712316353665L;
+
+ private JobOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the job's fields to be returned by the RPC call. If this option
+ * is not provided all job's fields are returned. {@code JobOption.fields()} can be used to
+ * specify only the fields of interest. {@link Job#getJobId()} as well as type-specific
+ * configuration (e.g. {@link QueryJobConfiguration#getQuery()} for Query Jobs) are always
+ * returned, even if not specified.
+ */
+ public static JobOption fields(JobField... fields) {
+ return new JobOption(BigQueryRpc.Option.FIELDS,
+ Helper.selector(JobField.REQUIRED_FIELDS, fields));
+ }
+ }
+
+ /**
+ * Class for specifying query results options.
+ */
+ class QueryResultsOption extends Option {
+
+ private static final long serialVersionUID = 3788898503226985525L;
+
+ private QueryResultsOption(BigQueryRpc.Option option, Object value) {
+ super(option, value);
+ }
+
+ /**
+ * Returns an option to specify the maximum number of rows returned per page.
+ */
+ public static QueryResultsOption pageSize(long pageSize) {
+ checkArgument(pageSize >= 0);
+ return new QueryResultsOption(BigQueryRpc.Option.MAX_RESULTS, pageSize);
+ }
+
+ /**
+ * Returns an option to specify the page token from which to start getting query results.
+ */
+ public static QueryResultsOption pageToken(String pageToken) {
+ return new QueryResultsOption(BigQueryRpc.Option.PAGE_TOKEN, pageToken);
+ }
+
+ /**
+ * Returns an option that sets the zero-based index of the row from which to start getting query
+ * results.
+ */
+ public static QueryResultsOption startIndex(long startIndex) {
+ checkArgument(startIndex >= 0);
+ return new QueryResultsOption(BigQueryRpc.Option.START_INDEX, startIndex);
+ }
+
+ /**
+ * Returns an option that sets how long to wait for the query to complete, in milliseconds,
+ * before returning. Default is 10 seconds.
+ */
+ public static QueryResultsOption maxWaitTime(long maxWaitTime) {
+ checkArgument(maxWaitTime >= 0);
+ return new QueryResultsOption(BigQueryRpc.Option.TIMEOUT, maxWaitTime);
+ }
+ }
+
+ class QueryOption implements Serializable {
+ private static final long serialVersionUID = 6206193419355824689L;
+
+ private final Object option;
+
+ private QueryOption(Object option) {
+ this.option = option;
+ }
+
+ public QueryResultsOption getQueryResultsOption() {
+ return option instanceof QueryResultsOption ? (QueryResultsOption) option : null;
+ }
+
+ public RetryOption getRetryOption() {
+ return option instanceof RetryOption ? (RetryOption) option : null;
+ }
+
+ static QueryResultsOption[] filterQueryResultsOptions(QueryOption... options) {
+ List queryResultOptions = new ArrayList<>(options.length);
+ for (QueryOption opt : options) {
+ if (opt.getQueryResultsOption() != null) {
+ queryResultOptions.add(opt.getQueryResultsOption());
+ }
+ }
+ return queryResultOptions.toArray(new QueryResultsOption[queryResultOptions.size()]);
+ }
+
+ static RetryOption[] filterRetryOptions(QueryOption... options) {
+ List retryOptions = new ArrayList<>(options.length);
+ for (QueryOption opt : options) {
+ if (opt.getRetryOption() != null) {
+ retryOptions.add(opt.getRetryOption());
+ }
+ }
+ return retryOptions.toArray(new RetryOption[retryOptions.size()]);
+ }
+
+ public static QueryOption of(QueryResultsOption resultsOption) {
+ return new QueryOption(resultsOption);
+ }
+
+ public static QueryOption of(RetryOption waitOption) {
+ return new QueryOption(waitOption);
+ }
+
+ @Override
+ public boolean equals(Object o) {
+ if (this == o) {
+ return true;
+ }
+ if (o == null || getClass() != o.getClass()) {
+ return false;
+ }
+
+ QueryOption that = (QueryOption) o;
+
+ return option != null ? option.equals(that.option) : that.option == null;
+ }
+
+ @Override
+ public int hashCode() {
+ return option != null ? option.hashCode() : 0;
+ }
+ }
+
+
+ /**
+ * Creates a new dataset.
+ *
+ *
Example of creating a dataset.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * Dataset dataset = null;
+ * DatasetInfo datasetInfo = DatasetInfo.newBuilder(datasetName).build();
+ * try {
+ * // the dataset was created
+ * dataset = bigquery.create(datasetInfo);
+ * } catch (BigQueryException e) {
+ * // the dataset was not created
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ Dataset create(DatasetInfo datasetInfo, DatasetOption... options);
+
+ /**
+ * Creates a new table.
+ *
+ *
+ *
+ * @throws BigQueryException upon failure
+ */
+ Dataset getDataset(DatasetId datasetId, DatasetOption... options);
+
+ /**
+ * Lists the project's datasets. This method returns partial information on each dataset:
+ * ({@link Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and
+ * {@link Dataset#getGeneratedId()}). To get complete information use either
+ * {@link #getDataset(String, DatasetOption...)} or
+ * {@link #getDataset(DatasetId, DatasetOption...)}.
+ *
+ *
Example of listing datasets, specifying the page size.
+ *
{@code
+ * // List datasets in the default project
+ * Page datasets = bigquery.listDatasets(DatasetListOption.pageSize(100));
+ * for (Dataset dataset : datasets.iterateAll()) {
+ * // do something with the dataset
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ Page listDatasets(DatasetListOption... options);
+
+ /**
+ * Lists the datasets in the provided project. This method returns partial information on each
+ * dataset: ({@link Dataset#getDatasetId()}, {@link Dataset#getFriendlyName()} and
+ * {@link Dataset#getGeneratedId()}). To get complete information use either
+ * {@link #getDataset(String, DatasetOption...)} or
+ * {@link #getDataset(DatasetId, DatasetOption...)}.
+ *
+ *
Example of listing datasets in a project, specifying the page size.
+ *
{@code
+ * String projectId = "my_project_id";
+ * // List datasets in a specified project
+ * Page datasets = bigquery.listDatasets(projectId, DatasetListOption.pageSize(100));
+ * for (Dataset dataset : datasets.iterateAll()) {
+ * // do something with the dataset
+ * }
+ * }
Example of deleting a dataset from its id, even if non-empty.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * boolean deleted = bigquery.delete(datasetName, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
+ * }
+ * }
+ *
+ * @return {@code true} if dataset was deleted, {@code false} if it was not found
+ * @throws BigQueryException upon failure
+ */
+ boolean delete(String datasetId, DatasetDeleteOption... options);
+
+ /**
+ * Deletes the requested dataset.
+ *
+ *
Example of deleting a dataset, even if non-empty.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * DatasetId datasetId = DatasetId.of(projectId, datasetName);
+ * boolean deleted = bigquery.delete(datasetId, DatasetDeleteOption.deleteContents());
+ * if (deleted) {
+ * // the dataset was deleted
+ * } else {
+ * // the dataset was not found
+ * }
+ * }
+ *
+ * @return {@code true} if dataset was deleted, {@code false} if it was not found
+ * @throws BigQueryException upon failure
+ */
+ boolean delete(DatasetId datasetId, DatasetDeleteOption... options);
+
+ /**
+ * Deletes the requested table.
+ *
+ *
Example of deleting a table.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * boolean deleted = bigquery.delete(datasetName, tableName);
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
+ * @return {@code true} if table was deleted, {@code false} if it was not found
+ * @throws BigQueryException upon failure
+ */
+ boolean delete(String datasetId, String tableId);
+
+ /**
+ * Deletes the requested table.
+ *
+ *
Example of deleting a table.
+ *
{@code
+ * String projectId = "my_project_id";
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableId = TableId.of(projectId, datasetName, tableName);
+ * boolean deleted = bigquery.delete(tableId);
+ * if (deleted) {
+ * // the table was deleted
+ * } else {
+ * // the table was not found
+ * }
+ * }
+ *
+ * @return {@code true} if table was deleted, {@code false} if it was not found
+ * @throws BigQueryException upon failure
+ */
+ boolean delete(TableId tableId);
+
+ /**
+ * Updates dataset information.
+ *
+ *
Example of updating a dataset by changing its description.
+ *
+ *
+ * @throws BigQueryException upon failure
+ */
+ Table getTable(TableId tableId, TableOption... options);
+
+ /**
+ * Lists the tables in the dataset. This method returns partial information on each table:
+ * ({@link Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()} and type,
+ * which is part of {@link Table#getDefinition()}). To get complete information use either
+ * {@link #getTable(TableId, TableOption...)} or
+ * {@link #getTable(String, String, TableOption...)}.
+ *
+ *
Example of listing the tables in a dataset, specifying the page size.
+ *
tables = bigquery.listTables(datasetName, TableListOption.pageSize(100));
+ * for (Table table : tables.iterateAll()) {
+ * // do something with the table
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ Page
listTables(String datasetId, TableListOption... options);
+
+ /**
+ * Lists the tables in the dataset. This method returns partial information on each table:
+ * ({@link Table#getTableId()}, {@link Table#getFriendlyName()}, {@link Table#getGeneratedId()}
+ * and type, which is part of {@link Table#getDefinition()}). To get complete information use either
+ * {@link #getTable(TableId, TableOption...)} or
+ * {@link #getTable(String, String, TableOption...)}.
+ *
+ *
Example of listing table rows, specifying the page size.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
+ * // simply omit the option.
+ * TableResult tableData =
+ * bigquery.listTableData(datasetName, tableName, TableDataListOption.pageSize(100));
+ * for (FieldValueList row : tableData.iterateAll()) {
+ * // do something with the row
+ * }
+ * }
Example of listing table rows, specifying the page size.
+ *
{@code
+ * String datasetName = "my_dataset_name";
+ * String tableName = "my_table_name";
+ * TableId tableIdObject = TableId.of(datasetName, tableName);
+ * // This example reads the result 100 rows per RPC call. If there's no need to limit the number,
+ * // simply omit the option.
+ * TableResult tableData =
+ * bigquery.listTableData(tableIdObject, TableDataListOption.pageSize(100));
+ * for (FieldValueList row : tableData.iterateAll()) {
+ * // do something with the row
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ TableResult listTableData(TableId tableId, TableDataListOption... options);
+
+ /**
+ * Lists the table's rows. If the {@code schema} is not {@code null}, it is available to the
+ * {@link FieldValueList} iterated over.
+ *
+ *
+ *
+ * @throws BigQueryException upon failure
+ */
+ TableResult listTableData(
+ String datasetId, String tableId, Schema schema, TableDataListOption... options);
+
+ /**
+ * Lists the table's rows. If the {@code schema} is not {@code null}, it is available to the
+ * {@link FieldValueList} iterated over.
+ *
+ *
+ *
+ * @throws BigQueryException upon failure
+ */
+ TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options);
+
+ /**
+ * Returns the requested job or {@code null} if not found. If the location of the job is not "US"
+ * or "EU", {@link #getJob(JobId, JobOption...)} must be used instead.
+ *
+ *
Example of getting a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * Job job = bigquery.getJob(jobName);
+ * if (job == null) {
+ * // job was not found
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ Job getJob(String jobId, JobOption... options);
+
+ /**
+ * Returns the requested job or {@code null} if not found. If the location of the job is not "US"
+ * or "EU", the {@code jobId} must specify the job location.
+ *
+ *
Example of getting a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * JobId jobIdObject = JobId.of(jobName);
+ * Job job = bigquery.getJob(jobIdObject);
+ * if (job == null) {
+ * // job was not found
+ * }
+ * }
Example of listing jobs, specifying the page size.
+ *
{@code
+ * Page jobs = bigquery.listJobs(JobListOption.pageSize(100));
+ * for (Job job : jobs.iterateAll()) {
+ * // do something with the job
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ */
+ Page listJobs(JobListOption... options);
+
+ /**
+ * Sends a job cancel request. This call will return immediately. The job status can then be
+ * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
+ * JobOption...)}).
+ *
+ *
If the location of the job is not "US" or "EU", {@link #cancel(JobId)} must be used instead.
+ *
+ *
Example of cancelling a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * boolean success = bigquery.cancel(jobName);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
+ * }
+ * }
+ *
+ * @return {@code true} if cancel was requested successfully, {@code false} if the job was not
+ * found
+ * @throws BigQueryException upon failure
+ */
+ boolean cancel(String jobId);
+
+ /**
+ * Sends a job cancel request. This call will return immediately. The job status can then be
+ * checked using either {@link #getJob(JobId, JobOption...)} or {@link #getJob(String,
+ * JobOption...)}).
+ *
+ *
If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
+ * location.
+ *
+ *
Example of cancelling a job.
+ *
{@code
+ * String jobName = "my_job_name";
+ * JobId jobId = JobId.of(jobName);
+ * boolean success = bigquery.cancel(jobId);
+ * if (success) {
+ * // job was cancelled
+ * } else {
+ * // job was not found
+ * }
+ * }
+ *
+ * @return {@code true} if cancel was requested successfully, {@code false} if the job was not
+ * found
+ * @throws BigQueryException upon failure
+ */
+ boolean cancel(JobId jobId);
+
+ /**
+ * Runs the query associated with the request, using an internally-generated random JobId.
+ *
+ *
If the location of the job is not "US" or "EU", {@link #query(QueryJobConfiguration, JobId,
+ * JobOption...)} must be used instead.
+ *
+ *
This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()}
+ * queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
+ *
+ *
Example of running a query.
+ *
{@code
+ * // BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
+ * String query =
+ * "SELECT corpus FROM `bigquery-public-data.samples.shakespeare` GROUP BY corpus;";
+ * QueryJobConfiguration queryConfig =
+ * QueryJobConfiguration.newBuilder(query).build();
+ *
+ * // Print the results.
+ * for (FieldValueList row : bigquery.query(queryConfig).iterateAll()) {
+ * for (FieldValue val : row) {
+ * System.out.printf("%s,", val.toString());
+ * }
+ * System.out.printf("\n");
+ * }
+ * }
+ *
+ * @throws BigQueryException upon failure
+ * @throws InterruptedException if the current thread gets interrupted while waiting for the query
+ * to complete
+ * @throws JobException if the job completes unsuccessfully
+ */
+ TableResult query(QueryJobConfiguration configuration, JobOption... options)
+ throws InterruptedException, JobException;
+
+ /**
+ * Runs the query associated with the request, using the given JobId.
+ *
+ *
If the location of the job is not "US" or "EU", the {@code jobId} must specify the job
+ * location.
+ *
+ *
This method cannot be used in conjuction with {@link QueryJobConfiguration#dryRun()}
+ * queries. Since dry-run queries are not actually executed, there's no way to retrieve results.
+ *
+ *
See {@link #query(QueryJobConfiguration, JobOption...)} for examples on populating a {@link
+ * QueryJobConfiguration}.
+ *
+ * @throws BigQueryException upon failure
+ * @throws InterruptedException if the current thread gets interrupted while waiting for the query
+ * to complete
+ * @throws JobException if the job completes unsuccessfully
+ */
+ TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options)
+ throws InterruptedException, JobException;
+
+ /**
+ * Returns results of the query associated with the provided job.
+ *
+ *
Users are encouraged to use {@link Job#getQueryResults(QueryResultsOption...)} instead.
+ */
+ @InternalApi
+ QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options);
+
+ /**
+ * Returns a channel to write data to be inserted into a BigQuery table. Data format and other
+ * options can be configured using the {@link WriteChannelConfiguration} parameter. If the job is
+ * not in "US" or "EU", {@link #writer(JobId, WriteChannelConfiguration)} must be used instead.
+ *
+ *
Example of creating a channel with which to write to a table.
+ *
+ *
+ * @throws BigQueryException upon failure
+ */
+ TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration);
+
+ /**
+ * Returns a channel to write data to be inserted into a BigQuery table. Data format and other
+ * options can be configured using the {@link WriteChannelConfiguration} parameter. If the job is
+ * not in "US" or "EU", the {@code jobId} must contain the location of the job.
+ *
+ *
Example of creating a channel with which to write to a table.
+ *
+ *
+ */
+ TableDataWriteChannel writer(JobId jobId, WriteChannelConfiguration writeChannelConfiguration);
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryError.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryError.java
new file mode 100644
index 000000000000..7540f6d01e9d
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryError.java
@@ -0,0 +1,146 @@
+/*
+ * Copyright 2016 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.api.services.bigquery.model.ErrorProto;
+import com.google.common.base.Function;
+import com.google.common.base.MoreObjects;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * Google Cloud BigQuery Error. Objects of this class represent errors encountered by the BigQuery
+ * service while executing a request. A BigQuery Job that terminated with an error has a non-null
+ * {@link JobStatus#getError()}. A job can also encounter errors during its execution that do not
+ * cause the whole job to fail (see {@link JobStatus#getExecutionErrors()}). Similarly, queries and
+ * insert all requests can cause BigQuery errors that do not mean the whole operation failed (see
+ * {@link JobStatus#getExecutionErrors()} and {@link InsertAllResponse#getInsertErrors()}).
+ * When a {@link BigQueryException} is thrown the BigQuery Error that caused it, if any, can be
+ * accessed with {@link BigQueryException#getError()}.
+ */
+public final class BigQueryError implements Serializable {
+
+ static final Function FROM_PB_FUNCTION =
+ new Function() {
+ @Override
+ public BigQueryError apply(ErrorProto pb) {
+ return BigQueryError.fromPb(pb);
+ }
+ };
+ static final Function TO_PB_FUNCTION =
+ new Function() {
+ @Override
+ public ErrorProto apply(BigQueryError error) {
+ return error.toPb();
+ }
+ };
+ private static final long serialVersionUID = -6566785320629096688L;
+
+ private final String reason;
+ private final String location;
+ private final String debugInfo;
+ private final String message;
+
+ public BigQueryError(String reason, String location, String message, String debugInfo) {
+ this.reason = reason;
+ this.location = location;
+ this.debugInfo = debugInfo;
+ this.message = message;
+ }
+
+ public BigQueryError(String reason, String location, String message) {
+ this.reason = reason;
+ this.location = location;
+ this.message = message;
+ this.debugInfo = null;
+ }
+
+
+ /**
+ * Returns short error code that summarizes the error.
+ *
+ * @see Troubleshooting
+ * Errors
+ */
+ public String getReason() {
+ return reason;
+ }
+
+
+ /**
+ * Returns where the error occurred, if present.
+ */
+ public String getLocation() {
+ return location;
+ }
+
+ String getDebugInfo() {
+ return debugInfo;
+ }
+
+
+ /**
+ * Returns a human-readable description of the error.
+ */
+ public String getMessage() {
+ return message;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(reason, location, message);
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("reason", reason)
+ .add("location", location)
+ .add("message", message)
+ .toString();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof BigQueryError
+ && Objects.equals(toPb(), ((BigQueryError) obj).toPb());
+ }
+
+ ErrorProto toPb() {
+ ErrorProto errorPb = new ErrorProto();
+ if (reason != null) {
+ errorPb.setReason(reason);
+ }
+ if (location != null) {
+ errorPb.setLocation(location);
+ }
+ if (message != null) {
+ errorPb.setMessage(message);
+ }
+ if (debugInfo != null) {
+ errorPb.setDebugInfo(debugInfo);
+ }
+ return errorPb;
+ }
+
+ static BigQueryError fromPb(ErrorProto errorPb) {
+ return new BigQueryError(errorPb.getReason(), errorPb.getLocation(), errorPb.getMessage(),
+ errorPb.getDebugInfo());
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java
new file mode 100644
index 000000000000..ad709a3f7807
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryException.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.cloud.BaseServiceException;
+import com.google.cloud.RetryHelper.RetryHelperException;
+import com.google.cloud.http.BaseHttpServiceException;
+import com.google.common.collect.ImmutableSet;
+import java.io.IOException;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ExecutionException;
+
+/**
+ * BigQuery service exception.
+ *
+ * @see Google Cloud
+ * BigQuery error codes
+ */
+public final class BigQueryException extends BaseHttpServiceException {
+
+ // see: https://cloud.google.com/bigquery/troubleshooting-errors
+ private static final Set RETRYABLE_ERRORS = ImmutableSet.of(
+ new Error(500, null),
+ new Error(502, null),
+ new Error(503, null),
+ new Error(504, null));
+ private static final long serialVersionUID = -5006625989225438209L;
+
+ private final BigQueryError error;
+
+ public BigQueryException(int code, String message) {
+ this(code, message, (Throwable) null);
+ }
+
+ public BigQueryException(int code, String message, Throwable cause) {
+ super(code, message, null, true, RETRYABLE_ERRORS, cause);
+ this.error = null;
+ }
+
+ public BigQueryException(int code, String message, BigQueryError error) {
+ super(code, message, error != null ? error.getReason() : null, true, RETRYABLE_ERRORS);
+ this.error = error;
+ }
+
+ public BigQueryException(IOException exception) {
+ super(exception, true, RETRYABLE_ERRORS);
+ BigQueryError error = null;
+ if (getReason() != null) {
+ error = new BigQueryError(getReason(), getLocation(), getMessage(), getDebugInfo());
+ }
+ this.error = error;
+ }
+
+
+ /**
+ * Returns the {@link BigQueryError} that caused this exception. Returns {@code null} if none
+ * exists.
+ */
+ public BigQueryError getError() {
+ return error;
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof BigQueryException)) {
+ return false;
+ }
+ BigQueryException other = (BigQueryException) obj;
+ return super.equals(other) && Objects.equals(error, other.error);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(super.hashCode(), error);
+ }
+
+ /**
+ * Translate RetryHelperException to the BigQueryException that caused the error. This method will
+ * always throw an exception.
+ *
+ * @throws BigQueryException when {@code ex} was caused by a {@code BigQueryException}
+ */
+ static BaseServiceException translateAndThrow(RetryHelperException ex) {
+ BaseServiceException.translate(ex);
+ throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause());
+ }
+
+ static BaseServiceException translateAndThrow(ExecutionException ex) {
+ BaseServiceException.translate(ex);
+ throw new BigQueryException(UNKNOWN_CODE, ex.getMessage(), ex.getCause());
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryFactory.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryFactory.java
new file mode 100644
index 000000000000..e02103f04a53
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryFactory.java
@@ -0,0 +1,25 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.cloud.ServiceFactory;
+
+/**
+ * An interface for BigQuery factories.
+ */
+public interface BigQueryFactory extends ServiceFactory {
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java
new file mode 100644
index 000000000000..c4d6f0f7ae88
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryImpl.java
@@ -0,0 +1,741 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.cloud.RetryHelper.runWithRetries;
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.api.core.InternalApi;
+import com.google.api.gax.paging.Page;
+import com.google.api.services.bigquery.model.ErrorProto;
+import com.google.api.services.bigquery.model.GetQueryResultsResponse;
+import com.google.api.services.bigquery.model.TableDataInsertAllRequest;
+import com.google.api.services.bigquery.model.TableDataInsertAllRequest.Rows;
+import com.google.api.services.bigquery.model.TableDataInsertAllResponse;
+import com.google.api.services.bigquery.model.TableDataList;
+import com.google.api.services.bigquery.model.TableRow;
+import com.google.api.services.bigquery.model.TableSchema;
+import com.google.cloud.BaseService;
+import com.google.cloud.PageImpl;
+import com.google.cloud.PageImpl.NextPageFetcher;
+import com.google.cloud.RetryHelper;
+import com.google.cloud.RetryHelper.RetryHelperException;
+import com.google.cloud.Tuple;
+import com.google.cloud.bigquery.InsertAllRequest.RowToInsert;
+import com.google.cloud.bigquery.spi.v2.BigQueryRpc;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Function;
+import com.google.common.base.Supplier;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Iterables;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.Callable;
+
+final class BigQueryImpl extends BaseService implements BigQuery {
+
+ private static class DatasetPageFetcher implements NextPageFetcher {
+
+ private static final long serialVersionUID = -3057564042439021278L;
+ private final Map requestOptions;
+ private final BigQueryOptions serviceOptions;
+ private final String projectId;
+
+ DatasetPageFetcher(String projectId, BigQueryOptions serviceOptions, String cursor,
+ Map optionMap) {
+ this.projectId = projectId;
+ this.requestOptions =
+ PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
+ this.serviceOptions = serviceOptions;
+ }
+
+ @Override
+ public Page getNextPage() {
+ return listDatasets(projectId, serviceOptions, requestOptions);
+ }
+ }
+
+ private static class TablePageFetcher implements NextPageFetcher
{
+
+ private static final long serialVersionUID = 8611248840504201187L;
+ private final Map requestOptions;
+ private final BigQueryOptions serviceOptions;
+ private final DatasetId datasetId;
+
+ TablePageFetcher(DatasetId datasetId, BigQueryOptions serviceOptions, String cursor,
+ Map optionMap) {
+ this.requestOptions =
+ PageImpl.nextRequestOptions(BigQueryRpc.Option.PAGE_TOKEN, cursor, optionMap);
+ this.serviceOptions = serviceOptions;
+ this.datasetId = datasetId;
+ }
+
+ @Override
+ public Page
tables = Iterables.transform(result.y(),
+ new Function() {
+ @Override
+ public Table apply(com.google.api.services.bigquery.model.Table table) {
+ return Table.fromPb(serviceOptions.getService(), table);
+ }
+ });
+ return new PageImpl<>(new TablePageFetcher(datasetId, serviceOptions, cursor, optionsMap),
+ cursor, tables);
+ } catch (RetryHelper.RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ }
+
+ @Override
+ public InsertAllResponse insertAll(InsertAllRequest request) {
+ final TableId tableId = request.getTable().setProjectId(getOptions().getProjectId());
+ final TableDataInsertAllRequest requestPb = new TableDataInsertAllRequest();
+ requestPb.setIgnoreUnknownValues(request.ignoreUnknownValues());
+ requestPb.setSkipInvalidRows(request.skipInvalidRows());
+ requestPb.setTemplateSuffix(request.getTemplateSuffix());
+ // Using an array of size 1 here to have a mutable boolean variable, which can be modified in
+ // an anonymous inner class.
+ final boolean[] allInsertIdsSet = {true};
+ List rowsPb = Lists.transform(request.getRows(), new Function() {
+ @Override
+ public Rows apply(RowToInsert rowToInsert) {
+ allInsertIdsSet[0] &= rowToInsert.getId() != null;
+ return new Rows().setInsertId(rowToInsert.getId()).setJson(rowToInsert.getContent());
+ }
+ });
+ requestPb.setRows(rowsPb);
+
+ TableDataInsertAllResponse responsePb;
+ if (allInsertIdsSet[0]) {
+ // allowing retries only if all row insertIds are set (used for deduplication)
+ try {
+ responsePb = runWithRetries(
+ new Callable() {
+ @Override
+ public TableDataInsertAllResponse call() throws Exception {
+ return bigQueryRpc.insertAll(tableId.getProject(), tableId.getDataset(),
+ tableId.getTable(), requestPb);
+ }
+ }, getOptions().getRetrySettings(), EXCEPTION_HANDLER, getOptions().getClock());
+ } catch (RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ } else {
+ responsePb = bigQueryRpc.insertAll(tableId.getProject(), tableId.getDataset(),
+ tableId.getTable(), requestPb);
+ }
+
+ return InsertAllResponse.fromPb(responsePb);
+ }
+
+ @Override
+ public TableResult listTableData(
+ String datasetId, String tableId, TableDataListOption... options) {
+ return listTableData(TableId.of(datasetId, tableId), options);
+ }
+
+ @Override
+ public TableResult listTableData(TableId tableId, TableDataListOption... options) {
+ return listTableData(tableId, null, options);
+ }
+
+ @Override
+ public TableResult listTableData(
+ String datasetId, String tableId, Schema schema, TableDataListOption... options) {
+ return listTableData(TableId.of(datasetId, tableId), schema, options);
+ }
+
+ @Override
+ public TableResult listTableData(TableId tableId, Schema schema, TableDataListOption... options) {
+ Tuple extends Page, Long> data =
+ listTableData(tableId, getOptions(), optionMap(options));
+ return new TableResult(schema, data.y(), data.x());
+ }
+
+ private static Tuple extends Page, Long> listTableData(
+ final TableId tableId,
+ final BigQueryOptions serviceOptions,
+ final Map optionsMap) {
+ try {
+ final TableId completeTableId = tableId.setProjectId(serviceOptions.getProjectId());
+ TableDataList result =
+ runWithRetries(
+ new Callable() {
+ @Override
+ public TableDataList call() {
+ return serviceOptions
+ .getBigQueryRpcV2()
+ .listTableData(
+ completeTableId.getProject(),
+ completeTableId.getDataset(),
+ completeTableId.getTable(),
+ optionsMap);
+ }
+ },
+ serviceOptions.getRetrySettings(),
+ EXCEPTION_HANDLER,
+ serviceOptions.getClock());
+ String cursor = result.getPageToken();
+ return Tuple.of(
+ new PageImpl<>(
+ new TableDataPageFetcher(tableId, serviceOptions, cursor, optionsMap),
+ cursor,
+ transformTableData(result.getRows())),
+ result.getTotalRows());
+ } catch (RetryHelper.RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ }
+
+ private static Iterable transformTableData(Iterable tableDataPb) {
+ return ImmutableList.copyOf(
+ Iterables.transform(
+ tableDataPb != null ? tableDataPb : ImmutableList.of(),
+ new Function() {
+ @Override
+ public FieldValueList apply(TableRow rowPb) {
+ return FieldValueList.fromPb(rowPb.getF(), null);
+ }
+ }));
+ }
+
+ @Override
+ public Job getJob(String jobId, JobOption... options) {
+ return getJob(JobId.of(jobId), options);
+ }
+
+ @Override
+ public Job getJob(JobId jobId, JobOption... options) {
+ final Map optionsMap = optionMap(options);
+ final JobId completeJobId = jobId.setProjectId(getOptions().getProjectId());
+ try {
+ com.google.api.services.bigquery.model.Job answer =
+ runWithRetries(
+ new Callable() {
+ @Override
+ public com.google.api.services.bigquery.model.Job call() {
+ return bigQueryRpc.getJob(
+ completeJobId.getProject(),
+ completeJobId.getJob(),
+ completeJobId.getLocation(),
+ optionsMap);
+ }
+ },
+ getOptions().getRetrySettings(),
+ EXCEPTION_HANDLER,
+ getOptions().getClock());
+ return answer == null ? null : Job.fromPb(this, answer);
+ } catch (RetryHelper.RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ }
+
+ @Override
+ public Page listJobs(JobListOption... options) {
+ return listJobs(getOptions(), optionMap(options));
+ }
+
+ private static Page listJobs(final BigQueryOptions serviceOptions,
+ final Map optionsMap) {
+ Tuple> result =
+ runWithRetries(new Callable>>() {
+ @Override
+ public Tuple>
+ call() {
+ return serviceOptions.getBigQueryRpcV2().listJobs(serviceOptions.getProjectId(), optionsMap);
+ }
+ }, serviceOptions.getRetrySettings(), EXCEPTION_HANDLER, serviceOptions.getClock());
+ String cursor = result.x();
+ Iterable jobs = Iterables.transform(result.y(),
+ new Function() {
+ @Override
+ public Job apply(com.google.api.services.bigquery.model.Job job) {
+ return Job.fromPb(serviceOptions.getService(), job);
+ }
+ });
+ return new PageImpl<>(new JobPageFetcher(serviceOptions, cursor, optionsMap), cursor, jobs);
+ }
+
+ @Override
+ public boolean cancel(String jobId) {
+ return cancel(JobId.of(jobId));
+ }
+
+ @Override
+ public boolean cancel(JobId jobId) {
+ final JobId completeJobId = jobId.setProjectId(getOptions().getProjectId());
+ try {
+ return runWithRetries(
+ new Callable() {
+ @Override
+ public Boolean call() {
+ return bigQueryRpc.cancel(
+ completeJobId.getProject(), completeJobId.getJob(), completeJobId.getLocation());
+ }
+ },
+ getOptions().getRetrySettings(),
+ EXCEPTION_HANDLER,
+ getOptions().getClock());
+ } catch (RetryHelper.RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ }
+
+ @Override
+ public TableResult query(QueryJobConfiguration configuration, JobOption... options)
+ throws InterruptedException, JobException {
+ Job.checkNotDryRun(configuration, "query");
+ return create(JobInfo.of(configuration), options).getQueryResults();
+ }
+
+ @Override
+ public TableResult query(QueryJobConfiguration configuration, JobId jobId, JobOption... options)
+ throws InterruptedException, JobException {
+ Job.checkNotDryRun(configuration, "query");
+ return create(JobInfo.of(jobId, configuration), options).getQueryResults();
+ }
+
+ @Override
+ public QueryResponse getQueryResults(JobId jobId, QueryResultsOption... options) {
+ Map optionsMap = optionMap(options);
+ return getQueryResults(jobId, getOptions(), optionsMap);
+ }
+
+ private static QueryResponse getQueryResults(JobId jobId,
+ final BigQueryOptions serviceOptions, final Map optionsMap) {
+ final JobId completeJobId = jobId.setProjectId(serviceOptions.getProjectId());
+ try {
+ GetQueryResultsResponse results =
+ runWithRetries(
+ new Callable() {
+ @Override
+ public GetQueryResultsResponse call() {
+ return serviceOptions
+ .getBigQueryRpcV2()
+ .getQueryResults(
+ completeJobId.getProject(),
+ completeJobId.getJob(),
+ completeJobId.getLocation(),
+ optionsMap);
+ }
+ },
+ serviceOptions.getRetrySettings(),
+ EXCEPTION_HANDLER,
+ serviceOptions.getClock());
+ TableSchema schemaPb = results.getSchema();
+
+ ImmutableList.Builder errors = ImmutableList.builder();
+ if (results.getErrors() != null) {
+ for (ErrorProto error : results.getErrors()) {
+ errors.add(BigQueryError.fromPb(error));
+ }
+ }
+
+ return QueryResponse.newBuilder()
+ .setCompleted(results.getJobComplete())
+ .setSchema(schemaPb == null ? null : Schema.fromPb(schemaPb))
+ .setTotalRows(results.getTotalRows() == null ? 0 : results.getTotalRows().longValue())
+ .setErrors(errors.build())
+ .build();
+ } catch (RetryHelper.RetryHelperException e) {
+ throw BigQueryException.translateAndThrow(e);
+ }
+ }
+
+ @Override
+ public TableDataWriteChannel writer(WriteChannelConfiguration writeChannelConfiguration) {
+ return writer(JobId.of(), writeChannelConfiguration);
+ }
+
+ @Override
+ public TableDataWriteChannel writer(
+ JobId jobId, WriteChannelConfiguration writeChannelConfiguration) {
+ return new TableDataWriteChannel(
+ getOptions(),
+ jobId.setProjectId(getOptions().getProjectId()),
+ writeChannelConfiguration.setProjectId(getOptions().getProjectId()));
+ }
+
+ @VisibleForTesting
+ static Map optionMap(Option... options) {
+ Map optionMap = Maps.newEnumMap(BigQueryRpc.Option.class);
+ for (Option option : options) {
+ Object prev = optionMap.put(option.getRpcOption(), option.getValue());
+ checkArgument(prev == null, "Duplicate option %s", option);
+ }
+ return optionMap;
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
new file mode 100644
index 000000000000..28dd0a4b6477
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/BigQueryOptions.java
@@ -0,0 +1,148 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.cloud.http.HttpTransportOptions;
+import com.google.cloud.ServiceDefaults;
+import com.google.cloud.ServiceOptions;
+import com.google.cloud.ServiceRpc;
+import com.google.cloud.TransportOptions;
+import com.google.cloud.bigquery.spi.v2.BigQueryRpc;
+import com.google.cloud.bigquery.spi.BigQueryRpcFactory;
+import com.google.cloud.bigquery.spi.v2.HttpBigQueryRpc;
+import com.google.common.collect.ImmutableSet;
+
+import java.util.Set;
+
+public class BigQueryOptions extends ServiceOptions {
+
+ private static final String API_SHORT_NAME = "BigQuery";
+ private static final String BIGQUERY_SCOPE = "https://www.googleapis.com/auth/bigquery";
+ private static final Set SCOPES = ImmutableSet.of(BIGQUERY_SCOPE);
+ private static final long serialVersionUID = -2437598817433266049L;
+
+ public static class DefaultBigQueryFactory implements BigQueryFactory {
+
+ private static final BigQueryFactory INSTANCE = new DefaultBigQueryFactory();
+
+ @Override
+ public BigQuery create(BigQueryOptions options) {
+ return new BigQueryImpl(options);
+ }
+ }
+
+ public static class DefaultBigQueryRpcFactory implements BigQueryRpcFactory {
+
+ private static final BigQueryRpcFactory INSTANCE = new DefaultBigQueryRpcFactory();
+
+ @Override
+ public ServiceRpc create(BigQueryOptions options) {
+ return new HttpBigQueryRpc(options);
+ }
+ }
+
+ public static class Builder extends
+ ServiceOptions.Builder {
+
+ private Builder() {
+ }
+
+ private Builder(BigQueryOptions options) {
+ super(options);
+ }
+
+ @Override
+ public Builder setTransportOptions(TransportOptions transportOptions) {
+ if (!(transportOptions instanceof HttpTransportOptions)) {
+ throw new IllegalArgumentException(
+ "Only http transport is allowed for " + API_SHORT_NAME + ".");
+ }
+ return super.setTransportOptions(transportOptions);
+ }
+
+ @Override
+ public BigQueryOptions build() {
+ return new BigQueryOptions(this);
+ }
+ }
+
+ private BigQueryOptions(Builder builder) {
+ super(BigQueryFactory.class, BigQueryRpcFactory.class, builder, new BigQueryDefaults());
+ }
+
+ private static class BigQueryDefaults implements
+ ServiceDefaults {
+
+ @Override
+ public BigQueryFactory getDefaultServiceFactory() {
+ return DefaultBigQueryFactory.INSTANCE;
+ }
+
+ @Override
+ public BigQueryRpcFactory getDefaultRpcFactory() {
+ return DefaultBigQueryRpcFactory.INSTANCE;
+ }
+
+ @Override
+ public TransportOptions getDefaultTransportOptions() {
+ return getDefaultHttpTransportOptions();
+ }
+ }
+
+ public static HttpTransportOptions getDefaultHttpTransportOptions() {
+ return HttpTransportOptions.newBuilder().build();
+ }
+
+ @Override
+ protected Set getScopes() {
+ return SCOPES;
+ }
+
+ protected BigQueryRpc getBigQueryRpcV2() {
+ return (BigQueryRpc) getRpc();
+ }
+
+ @SuppressWarnings("unchecked")
+ @Override
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ public int hashCode() {
+ return baseHashCode();
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ if (!(obj instanceof BigQueryOptions)) {
+ return false;
+ }
+ BigQueryOptions other = (BigQueryOptions) obj;
+ return baseEquals(other);
+ }
+
+
+ public static BigQueryOptions getDefaultInstance() {
+ return newBuilder().build();
+ }
+
+
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java
new file mode 100644
index 000000000000..7558094a2b5c
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CopyJobConfiguration.java
@@ -0,0 +1,295 @@
+/*
+ * Copyright 2016 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.services.bigquery.model.JobConfigurationTableCopy;
+import com.google.common.base.Function;
+import com.google.common.base.MoreObjects.ToStringHelper;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Google BigQuery copy job configuration. A copy job copies an existing table to another new or
+ * existing table. Copy job configurations have {@link JobConfiguration.Type#COPY} type.
+ */
+public final class CopyJobConfiguration extends JobConfiguration {
+
+ private static final long serialVersionUID = 1140509641399762967L;
+
+ private final List sourceTables;
+ private final TableId destinationTable;
+ private final JobInfo.CreateDisposition createDisposition;
+ private final JobInfo.WriteDisposition writeDisposition;
+ private final EncryptionConfiguration destinationEncryptionConfiguration;
+
+ public static final class Builder
+ extends JobConfiguration.Builder {
+
+ private List sourceTables;
+ private TableId destinationTable;
+ private JobInfo.CreateDisposition createDisposition;
+ private JobInfo.WriteDisposition writeDisposition;
+ private EncryptionConfiguration destinationEncryptionConfiguration;
+
+ private Builder() {
+ super(Type.COPY);
+ }
+
+ private Builder(CopyJobConfiguration jobConfiguration) {
+ this();
+ this.sourceTables = jobConfiguration.sourceTables;
+ this.destinationTable = jobConfiguration.destinationTable;
+ this.createDisposition = jobConfiguration.createDisposition;
+ this.writeDisposition = jobConfiguration.writeDisposition;
+ this.destinationEncryptionConfiguration = jobConfiguration.destinationEncryptionConfiguration;
+ }
+
+ private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) {
+ this();
+ JobConfigurationTableCopy copyConfigurationPb = configurationPb.getCopy();
+ this.destinationTable = TableId.fromPb(copyConfigurationPb.getDestinationTable());
+ if (copyConfigurationPb.getSourceTables() != null) {
+ this.sourceTables =
+ Lists.transform(copyConfigurationPb.getSourceTables(), TableId.FROM_PB_FUNCTION);
+ } else {
+ this.sourceTables = ImmutableList.of(TableId.fromPb(copyConfigurationPb.getSourceTable()));
+ }
+ if (copyConfigurationPb.getCreateDisposition() != null) {
+ this.createDisposition =
+ JobInfo.CreateDisposition.valueOf(copyConfigurationPb.getCreateDisposition());
+ }
+ if (copyConfigurationPb.getWriteDisposition() != null) {
+ this.writeDisposition = JobInfo.WriteDisposition.valueOf(
+ copyConfigurationPb.getWriteDisposition());
+ }
+ if (copyConfigurationPb.getDestinationEncryptionConfiguration() != null) {
+ this.destinationEncryptionConfiguration = new EncryptionConfiguration.Builder(
+ copyConfigurationPb.getDestinationEncryptionConfiguration()).build();
+ }
+ }
+
+
+ /**
+ * Sets the source tables to copy.
+ */
+ public Builder setSourceTables(List sourceTables) {
+ this.sourceTables = sourceTables != null ? ImmutableList.copyOf(sourceTables) : null;
+ return this;
+ }
+
+
+ /**
+ * Sets the destination table of the copy job.
+ */
+ public Builder setDestinationTable(TableId destinationTable) {
+ this.destinationTable = destinationTable;
+ return this;
+ }
+
+
+ public Builder setDestinationEncryptionConfiguration(
+ EncryptionConfiguration encryptionConfiguration) {
+ this.destinationEncryptionConfiguration = encryptionConfiguration;
+ return this;
+ }
+
+
+ /**
+ * Sets whether the job is allowed to create new tables.
+ *
+ * @see
+ * Create Disposition
+ */
+ public Builder setCreateDisposition(JobInfo.CreateDisposition createDisposition) {
+ this.createDisposition = createDisposition;
+ return this;
+ }
+
+
+ /**
+ * Sets the action that should occur if the destination table already exists.
+ *
+ * @see
+ * Write Disposition
+ */
+ public Builder setWriteDisposition(JobInfo.WriteDisposition writeDisposition) {
+ this.writeDisposition = writeDisposition;
+ return this;
+ }
+
+ public CopyJobConfiguration build() {
+ return new CopyJobConfiguration(this);
+ }
+ }
+
+ private CopyJobConfiguration(Builder builder) {
+ super(builder);
+ this.sourceTables = checkNotNull(builder.sourceTables);
+ this.destinationTable = checkNotNull(builder.destinationTable);
+ this.createDisposition = builder.createDisposition;
+ this.writeDisposition = builder.writeDisposition;
+ this.destinationEncryptionConfiguration = builder.destinationEncryptionConfiguration;
+ }
+
+
+ /**
+ * Returns the source tables to copy.
+ */
+ public List getSourceTables() {
+ return sourceTables;
+ }
+
+
+ /**
+ * Returns the destination table to load the data into.
+ */
+ public TableId getDestinationTable() {
+ return destinationTable;
+ }
+
+
+ public EncryptionConfiguration getDestinationEncryptionConfiguration() {
+ return destinationEncryptionConfiguration;
+ }
+
+
+ /**
+ * Returns whether the job is allowed to create new tables.
+ *
+ * @see
+ * Create Disposition
+ */
+ public JobInfo.CreateDisposition getCreateDisposition() {
+ return this.createDisposition;
+ }
+
+
+ /**
+ * Returns the action that should occur if the destination table already exists.
+ *
+ * @see
+ * Write Disposition
+ */
+ public JobInfo.WriteDisposition getWriteDisposition() {
+ return writeDisposition;
+ }
+
+ @Override
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ ToStringHelper toStringHelper() {
+ return super.toStringHelper()
+ .add("sourceTables", sourceTables)
+ .add("destinationTable", destinationTable)
+ .add("destinationEncryptionConfiguration", destinationEncryptionConfiguration)
+ .add("createDisposition", createDisposition)
+ .add("writeDisposition", writeDisposition);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof CopyJobConfiguration
+ && baseEquals((CopyJobConfiguration) obj);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(baseHashCode(), sourceTables, destinationTable, createDisposition,
+ writeDisposition);
+ }
+
+ @Override
+ CopyJobConfiguration setProjectId(final String projectId) {
+ Builder builder = toBuilder();
+ builder.setSourceTables(
+ Lists.transform(getSourceTables(), new Function() {
+ @Override
+ public TableId apply(TableId tableId) {
+ return tableId.setProjectId(projectId);
+ }
+ }));
+ builder.setDestinationTable(getDestinationTable().setProjectId(projectId));
+ return builder.build();
+ }
+
+ @Override
+ com.google.api.services.bigquery.model.JobConfiguration toPb() {
+ JobConfigurationTableCopy configurationPb = new JobConfigurationTableCopy();
+ configurationPb.setDestinationTable(destinationTable.toPb());
+ if (sourceTables.size() == 1) {
+ configurationPb.setSourceTable(sourceTables.get(0).toPb());
+ } else {
+ configurationPb.setSourceTables(Lists.transform(sourceTables, TableId.TO_PB_FUNCTION));
+ }
+ if (createDisposition != null) {
+ configurationPb.setCreateDisposition(createDisposition.toString());
+ }
+ if (writeDisposition != null) {
+ configurationPb.setWriteDisposition(writeDisposition.toString());
+ }
+ if (destinationEncryptionConfiguration != null) {
+ configurationPb.setDestinationEncryptionConfiguration(
+ destinationEncryptionConfiguration.toPb());
+ }
+ return new com.google.api.services.bigquery.model.JobConfiguration().setCopy(configurationPb);
+ }
+
+
+ /**
+ * Creates a builder for a BigQuery Copy Job configuration given destination and source table.
+ */
+ public static Builder newBuilder(TableId destinationTable, TableId sourceTable) {
+ return newBuilder(destinationTable, ImmutableList.of(checkNotNull(sourceTable)));
+ }
+
+
+ /**
+ * Creates a builder for a BigQuery Copy Job configuration given destination and source tables.
+ */
+ public static Builder newBuilder(TableId destinationTable, List sourceTables) {
+ return new Builder().setDestinationTable(destinationTable).setSourceTables(sourceTables);
+ }
+
+ /**
+ * Returns a BigQuery Copy Job configuration for the given destination and source table.
+ */
+ public static CopyJobConfiguration of(TableId destinationTable, TableId sourceTable) {
+ return newBuilder(destinationTable, sourceTable).build();
+ }
+
+ /**
+ * Returns a BigQuery Copy Job configuration for the given destination and source tables.
+ */
+ public static CopyJobConfiguration of(TableId destinationTable, List sourceTables) {
+ return newBuilder(destinationTable, sourceTables).build();
+ }
+
+ @SuppressWarnings("unchecked")
+ static CopyJobConfiguration fromPb(
+ com.google.api.services.bigquery.model.JobConfiguration jobPb) {
+ return new Builder(jobPb).build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java
new file mode 100644
index 000000000000..d800ff1c7a0f
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/CsvOptions.java
@@ -0,0 +1,288 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.common.base.MoreObjects;
+
+import java.nio.charset.Charset;
+import java.util.Objects;
+
+/**
+ * Google BigQuery options for CSV format. This class wraps some properties of CSV files used by
+ * BigQuery to parse external data.
+ */
+public final class CsvOptions extends FormatOptions {
+
+ private static final long serialVersionUID = 2193570529308612708L;
+
+ private final Boolean allowJaggedRows;
+ private final Boolean allowQuotedNewLines;
+ private final String encoding;
+ private final String fieldDelimiter;
+ private final String quote;
+ private final Long skipLeadingRows;
+
+ public static final class Builder {
+
+ private Boolean allowJaggedRows;
+ private Boolean allowQuotedNewLines;
+ private String encoding;
+ private String fieldDelimiter;
+ private String quote;
+ private Long skipLeadingRows;
+
+ private Builder() {}
+
+ private Builder(CsvOptions csvOptions) {
+ this.allowJaggedRows = csvOptions.allowJaggedRows;
+ this.allowQuotedNewLines = csvOptions.allowQuotedNewLines;
+ this.encoding = csvOptions.encoding;
+ this.fieldDelimiter = csvOptions.fieldDelimiter;
+ this.quote = csvOptions.quote;
+ this.skipLeadingRows = csvOptions.skipLeadingRows;
+ }
+
+
+ /**
+ * Set whether BigQuery should accept rows that are missing trailing optional columns. If
+ * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false},
+ * records with missing trailing columns are treated as bad records, and if there are too many
+ * bad records, an invalid error is returned in the job result. By default, rows with missing
+ * trailing columns are considered bad records.
+ */
+ public Builder setAllowJaggedRows(boolean allowJaggedRows) {
+ this.allowJaggedRows = allowJaggedRows;
+ return this;
+ }
+
+
+ /**
+ * Sets whether BigQuery should allow quoted data sections that contain newline characters in a
+ * CSV file. By default quoted newline are not allowed.
+ */
+ public Builder setAllowQuotedNewLines(boolean allowQuotedNewLines) {
+ this.allowQuotedNewLines = allowQuotedNewLines;
+ return this;
+ }
+
+
+ /**
+ * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The
+ * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split
+ * using the values set in {@link #setQuote(String)} and {@link #setFieldDelimiter(String)}.
+ */
+ public Builder setEncoding(String encoding) {
+ this.encoding = encoding;
+ return this;
+ }
+
+
+ /**
+ * Sets the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. The
+ * default value is UTF-8. BigQuery decodes the data after the raw, binary data has been split
+ * using the values set in {@link #setQuote(String)} and {@link #setFieldDelimiter(String)}.
+ */
+ public Builder setEncoding(Charset encoding) {
+ this.encoding = encoding.name();
+ return this;
+ }
+
+
+ /**
+ * Sets the separator for fields in a CSV file. BigQuery converts the string to ISO-8859-1
+ * encoding, and then uses the first byte of the encoded string to split the data in its raw,
+ * binary state. BigQuery also supports the escape sequence "\t" to specify a tab separator.
+ * The default value is a comma (',').
+ */
+ public Builder setFieldDelimiter(String fieldDelimiter) {
+ this.fieldDelimiter = fieldDelimiter;
+ return this;
+ }
+
+
+ /**
+ * Sets the value that is used to quote data sections in a CSV file. BigQuery converts the
+ * string to ISO-8859-1 encoding, and then uses the first byte of the encoded string to split
+ * the data in its raw, binary state. The default value is a double-quote ('"'). If your data
+ * does not contain quoted sections, set the property value to an empty string. If your data
+ * contains quoted newline characters, you must also set
+ * {@link #setAllowQuotedNewLines(boolean)} property to {@code true}.
+ */
+ public Builder setQuote(String quote) {
+ this.quote = quote;
+ return this;
+ }
+
+
+ /**
+ * Sets the number of rows at the top of a CSV file that BigQuery will skip when reading the
+ * data. The default value is 0. This property is useful if you have header rows in the file
+ * that should be skipped.
+ */
+ public Builder setSkipLeadingRows(long skipLeadingRows) {
+ this.skipLeadingRows = skipLeadingRows;
+ return this;
+ }
+
+ /**
+ * Creates a {@code CsvOptions} object.
+ */
+ public CsvOptions build() {
+ return new CsvOptions(this);
+ }
+ }
+
+ private CsvOptions(Builder builder) {
+ super(FormatOptions.CSV);
+ this.allowJaggedRows = builder.allowJaggedRows;
+ this.allowQuotedNewLines = builder.allowQuotedNewLines;
+ this.encoding = builder.encoding;
+ this.fieldDelimiter = builder.fieldDelimiter;
+ this.quote = builder.quote;
+ this.skipLeadingRows = builder.skipLeadingRows;
+ }
+
+ /**
+ * Returns whether BigQuery should accept rows that are missing trailing optional columns. If
+ * {@code true}, BigQuery treats missing trailing columns as null values. If {@code false},
+ * records with missing trailing columns are treated as bad records, and if the number of bad
+ * records exceeds {@link ExternalTableDefinition#getMaxBadRecords()}, an invalid error is
+ * returned in the job result.
+ */
+ public Boolean allowJaggedRows() {
+ return allowJaggedRows;
+ }
+
+ /**
+ * Returns whether BigQuery should allow quoted data sections that contain newline characters in a
+ * CSV file.
+ */
+ public Boolean allowQuotedNewLines() {
+ return allowQuotedNewLines;
+ }
+
+
+ /**
+ * Returns the character encoding of the data. The supported values are UTF-8 or ISO-8859-1. If
+ * not set, UTF-8 is used. BigQuery decodes the data after the raw, binary data has been split
+ * using the values set in {@link #getQuote()} and {@link #getFieldDelimiter()}.
+ */
+ public String getEncoding() {
+ return encoding;
+ }
+
+
+ /**
+ * Returns the separator for fields in a CSV file.
+ */
+ public String getFieldDelimiter() {
+ return fieldDelimiter;
+ }
+
+
+ /**
+ * Returns the value that is used to quote data sections in a CSV file.
+ */
+ public String getQuote() {
+ return quote;
+ }
+
+
+ /**
+ * Returns the number of rows at the top of a CSV file that BigQuery will skip when reading the
+ * data.
+ */
+ public Long getSkipLeadingRows() {
+ return skipLeadingRows;
+ }
+
+ /**
+ * Returns a builder for the {@code CsvOptions} object.
+ */
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("type", getType())
+ .add("allowJaggedRows", allowJaggedRows)
+ .add("allowQuotedNewLines", allowQuotedNewLines)
+ .add("encoding", encoding)
+ .add("fieldDelimiter", fieldDelimiter)
+ .add("quote", quote)
+ .add("skipLeadingRows", skipLeadingRows)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), allowJaggedRows, allowQuotedNewLines, encoding, fieldDelimiter,
+ quote, skipLeadingRows);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof CsvOptions
+ && Objects.equals(toPb(), ((CsvOptions) obj).toPb());
+ }
+
+ com.google.api.services.bigquery.model.CsvOptions toPb() {
+ com.google.api.services.bigquery.model.CsvOptions csvOptions =
+ new com.google.api.services.bigquery.model.CsvOptions();
+ csvOptions.setAllowJaggedRows(allowJaggedRows);
+ csvOptions.setAllowQuotedNewlines(allowQuotedNewLines);
+ csvOptions.setEncoding(encoding);
+ csvOptions.setFieldDelimiter(fieldDelimiter);
+ csvOptions.setQuote(quote);
+ csvOptions.setSkipLeadingRows(skipLeadingRows);
+ return csvOptions;
+ }
+
+
+ /**
+ * Returns a builder for a CsvOptions object.
+ */
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ static CsvOptions fromPb(com.google.api.services.bigquery.model.CsvOptions csvOptions) {
+ Builder builder = newBuilder();
+ if (csvOptions.getAllowJaggedRows() != null) {
+ builder.setAllowJaggedRows(csvOptions.getAllowJaggedRows());
+ }
+ if (csvOptions.getAllowQuotedNewlines() != null) {
+ builder.setAllowQuotedNewLines(csvOptions.getAllowQuotedNewlines());
+ }
+ if (csvOptions.getEncoding() != null) {
+ builder.setEncoding(csvOptions.getEncoding());
+ }
+ if (csvOptions.getFieldDelimiter() != null) {
+ builder.setFieldDelimiter(csvOptions.getFieldDelimiter());
+ }
+ if (csvOptions.getQuote() != null) {
+ builder.setQuote(csvOptions.getQuote());
+ }
+ if (csvOptions.getSkipLeadingRows() != null) {
+ builder.setSkipLeadingRows(csvOptions.getSkipLeadingRows());
+ }
+ return builder.build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
new file mode 100644
index 000000000000..12208a645faf
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Dataset.java
@@ -0,0 +1,339 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.gax.paging.Page;
+import com.google.cloud.bigquery.BigQuery.DatasetDeleteOption;
+import com.google.cloud.bigquery.BigQuery.DatasetOption;
+import com.google.cloud.bigquery.BigQuery.TableListOption;
+import com.google.cloud.bigquery.BigQuery.TableOption;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * A Google BigQuery Dataset.
+ *
+ *
Objects of this class are immutable. Operations that modify the dataset like {@link #update}
+ * return a new object. To get a {@code Dataset} object with the most recent information use
+ * {@link #reload}. {@code Dataset} adds a layer of service-related functionality over
+ * {@link DatasetInfo}.
+ *
{@code
+ * boolean exists = dataset.exists();
+ * if (exists) {
+ * // the dataset exists
+ * } else {
+ * // the dataset was not found
+ * }
+ * }
+ *
+ * @return {@code true} if this dataset exists, {@code false} otherwise
+ * @throws BigQueryException upon failure
+ */
+ public boolean exists() {
+ return bigquery.getDataset(getDatasetId(), DatasetOption.fields()) != null;
+ }
+
+ /**
+ * Fetches current dataset's latest information. Returns {@code null} if the dataset does not
+ * exist.
+ *
+ *
Example of reloading a dataset.
+ *
{@code
+ * Dataset latestDataset = dataset.reload();
+ * if (latestDataset == null) {
+ * // The dataset was not found
+ * }
+ * }
+ *
+ * @param options dataset options
+ * @return a {@code Dataset} object with latest information or {@code null} if not found
+ * @throws BigQueryException upon failure
+ */
+ public Dataset reload(DatasetOption... options) {
+ return bigquery.getDataset(getDatasetId().getDataset(), options);
+ }
+
+ /**
+ * Updates the dataset's information with this dataset's information. Dataset's user-defined id
+ * cannot be changed. A new {@code Dataset} object is returned.
+ *
+ *
+ *
+ * @param options dataset options
+ * @return a {@code Dataset} object with updated information
+ * @throws BigQueryException upon failure
+ */
+ public Dataset update(DatasetOption... options) {
+ return bigquery.update(this, options);
+ }
+
+ /**
+ * Deletes this dataset.
+ *
+ *
Example of deleting a dataset.
+ *
{@code
+ * boolean deleted = dataset.delete();
+ * if (deleted) {
+ * // The dataset was deleted
+ * } else {
+ * // The dataset was not found
+ * }
+ * }
+ *
+ * @return {@code true} if dataset was deleted, {@code false} if it was not found
+ * @throws BigQueryException upon failure
+ */
+ public boolean delete(DatasetDeleteOption... options) {
+ return bigquery.delete(getDatasetId(), options);
+ }
+
+ /**
+ * Returns the paginated list of tables in this dataset.
+ *
+ *
Example of listing tables in the dataset.
+ *
{@code
+ * Page
tables = dataset.list();
+ * for (Table table : tables.iterateAll()) {
+ * // do something with the table
+ * }
+ * }
+ *
+ * @param options options for listing tables
+ * @throws BigQueryException upon failure
+ */
+ public Page
list(TableListOption... options) {
+ return bigquery.listTables(getDatasetId(), options);
+ }
+
+ /**
+ * Returns the requested table in this dataset or {@code null} if not found.
+ *
+ *
+ *
+ * @param tableId the table's user-defined id
+ * @param definition the table's definition
+ * @param options options for table creation
+ * @return a {@code Table} object for the created table
+ * @throws BigQueryException upon failure
+ */
+ public Table create(String tableId, TableDefinition definition, TableOption... options) {
+ TableInfo tableInfo =
+ TableInfo.of(TableId.of(getDatasetId().getDataset(), tableId), definition);
+ return bigquery.create(tableInfo, options);
+ }
+
+
+ /**
+ * Returns the dataset's {@code BigQuery} object used to issue requests.
+ */
+ public BigQuery getBigQuery() {
+ return bigquery;
+ }
+
+ @Override
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ public final boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (obj == null || !obj.getClass().equals(Dataset.class)) {
+ return false;
+ }
+ Dataset other = (Dataset) obj;
+ return Objects.equals(toPb(), other.toPb())
+ && Objects.equals(options, other.options);
+ }
+
+ @Override
+ public final int hashCode() {
+ return Objects.hash(super.hashCode(), options);
+ }
+
+ private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
+ in.defaultReadObject();
+ this.bigquery = options.getService();
+ }
+
+ static Dataset fromPb(BigQuery bigquery,
+ com.google.api.services.bigquery.model.Dataset datasetPb) {
+ return new Dataset(bigquery, new DatasetInfo.BuilderImpl(datasetPb));
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java
new file mode 100644
index 000000000000..0b99a4048652
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetId.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.services.bigquery.model.DatasetReference;
+
+import java.io.Serializable;
+import java.util.Objects;
+
+/**
+ * Google BigQuery Dataset identity.
+ */
+public final class DatasetId implements Serializable {
+
+ private static final long serialVersionUID = -6186254820908152300L;
+
+ private final String project;
+ private final String dataset;
+
+
+ /**
+ * Returns project's user-defined id.
+ */
+ public String getProject() {
+ return project;
+ }
+
+
+ /**
+ * Returns dataset's user-defined id.
+ */
+ public String getDataset() {
+ return dataset;
+ }
+
+ private DatasetId(String project, String dataset) {
+ this.project = project;
+ this.dataset = dataset;
+ }
+
+ /**
+ * Creates a dataset identity given project's and dataset's user-defined ids.
+ */
+ public static DatasetId of(String project, String dataset) {
+ return new DatasetId(checkNotNull(project), checkNotNull(dataset));
+ }
+
+ /**
+ * Creates a dataset identity given only its user-defined id.
+ */
+ public static DatasetId of(String dataset) {
+ return new DatasetId(null, checkNotNull(dataset));
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof DatasetId
+ && Objects.equals(toPb(), ((DatasetId) obj).toPb());
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(project, dataset);
+ }
+
+ @Override
+ public String toString() {
+ return toPb().toString();
+ }
+
+ DatasetId setProjectId(String projectId) {
+ return getProject() != null ? this : DatasetId.of(projectId, getDataset());
+ }
+
+ DatasetReference toPb() {
+ return new DatasetReference().setProjectId(project).setDatasetId(dataset);
+ }
+
+ static DatasetId fromPb(DatasetReference datasetRef) {
+ return new DatasetId(
+ datasetRef.getProjectId(),
+ datasetRef.getDatasetId());
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
new file mode 100644
index 000000000000..702b8360b251
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatasetInfo.java
@@ -0,0 +1,570 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.client.util.Data;
+import com.google.api.services.bigquery.model.Dataset;
+import com.google.api.services.bigquery.model.TableReference;
+import com.google.common.base.Function;
+import com.google.common.base.MoreObjects;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Google BigQuery Dataset information. A dataset is a grouping mechanism that holds zero or more
+ * tables. Datasets are the lowest level unit of access control; you cannot control access at the
+ * table level.
+ *
+ * @see
+ * Managing Jobs, Datasets, and Projects
+ */
+public class DatasetInfo implements Serializable {
+
+ static final Function FROM_PB_FUNCTION =
+ new Function() {
+ @Override
+ public DatasetInfo apply(Dataset pb) {
+ return DatasetInfo.fromPb(pb);
+ }
+ };
+ static final Function TO_PB_FUNCTION =
+ new Function() {
+ @Override
+ public Dataset apply(DatasetInfo datasetInfo) {
+ return datasetInfo.toPb();
+ }
+ };
+ private static final long serialVersionUID = 8469473744160758489L;
+
+ private final DatasetId datasetId;
+ private final List acl;
+ private final Long creationTime;
+ private final Long defaultTableLifetime;
+ private final String description;
+ private final String etag;
+ private final String friendlyName;
+ private final String generatedId;
+ private final Long lastModified;
+ private final String location;
+ private final String selfLink;
+ private final Labels labels;
+
+ /**
+ * A builder for {@code DatasetInfo} objects.
+ */
+ public abstract static class Builder {
+
+
+ /**
+ * Sets the dataset identity.
+ */
+ public abstract Builder setDatasetId(DatasetId datasetId);
+
+
+ /**
+ * Sets the dataset's access control configuration.
+ *
+ * @see Access Control
+ */
+ public abstract Builder setAcl(List acl);
+
+ abstract Builder setCreationTime(Long creationTime);
+
+
+ /**
+ * Sets the default lifetime of all tables in the dataset, in milliseconds. The minimum value is
+ * 3600000 milliseconds (one hour). Once this property is set, all newly-created tables in the
+ * dataset will have an expirationTime property set to the creation time plus the value in this
+ * property, and changing the value will only affect new tables, not existing ones. When the
+ * expirationTime for a given table is reached, that table will be deleted automatically. If a
+ * table's expirationTime is modified or removed before the table expires, or if you provide an
+ * explicit expirationTime when creating a table, that value takes precedence over the default
+ * expiration time indicated by this property. This property is experimental and might be
+ * subject to change or removed.
+ */
+ public abstract Builder setDefaultTableLifetime(Long defaultTableLifetime);
+
+
+ /**
+ * Sets a user-friendly description for the dataset.
+ */
+ public abstract Builder setDescription(String description);
+
+ abstract Builder setEtag(String etag);
+
+
+ /**
+ * Sets a user-friendly name for the dataset.
+ */
+ public abstract Builder setFriendlyName(String friendlyName);
+
+ abstract Builder setGeneratedId(String generatedId);
+
+ abstract Builder setLastModified(Long lastModified);
+
+
+ /**
+ * Sets the geographic location where the dataset should reside. This property is experimental
+ * and might be subject to change or removed.
+ *
+ * @see Dataset
+ * Location
+ */
+ public abstract Builder setLocation(String location);
+
+ abstract Builder setSelfLink(String selfLink);
+
+ public abstract Builder setLabels(Map labels);
+
+ /**
+ * Creates a {@code DatasetInfo} object.
+ */
+ public abstract DatasetInfo build();
+ }
+
+ static final class BuilderImpl extends Builder {
+
+ private DatasetId datasetId;
+ private List acl;
+ private Long creationTime;
+ private Long defaultTableLifetime;
+ private String description;
+ private String etag;
+ private String friendlyName;
+ private String generatedId;
+ private Long lastModified;
+ private String location;
+ private String selfLink;
+ private Labels labels = Labels.ZERO;
+
+ BuilderImpl() {}
+
+ BuilderImpl(DatasetInfo datasetInfo) {
+ this.datasetId = datasetInfo.datasetId;
+ this.acl = datasetInfo.acl;
+ this.creationTime = datasetInfo.creationTime;
+ this.defaultTableLifetime = datasetInfo.defaultTableLifetime;
+ this.description = datasetInfo.description;
+ this.etag = datasetInfo.etag;
+ this.friendlyName = datasetInfo.friendlyName;
+ this.generatedId = datasetInfo.generatedId;
+ this.lastModified = datasetInfo.lastModified;
+ this.location = datasetInfo.location;
+ this.selfLink = datasetInfo.selfLink;
+ this.labels = datasetInfo.labels;
+ }
+
+ BuilderImpl(com.google.api.services.bigquery.model.Dataset datasetPb) {
+ if (datasetPb.getDatasetReference() != null) {
+ this.datasetId = DatasetId.fromPb(datasetPb.getDatasetReference());
+ }
+ if (datasetPb.getAccess() != null) {
+ this.acl = Lists.transform(datasetPb.getAccess(), new Function() {
+ @Override
+ public Acl apply(Dataset.Access accessPb) {
+ return Acl.fromPb(accessPb);
+ }
+ });
+ }
+ this.creationTime = datasetPb.getCreationTime();
+ this.defaultTableLifetime = datasetPb.getDefaultTableExpirationMs();
+ this.description = datasetPb.getDescription();
+ this.etag = datasetPb.getEtag();
+ this.friendlyName = datasetPb.getFriendlyName();
+ this.generatedId = datasetPb.getId();
+ this.lastModified = datasetPb.getLastModifiedTime();
+ this.location = datasetPb.getLocation();
+ this.selfLink = datasetPb.getSelfLink();
+ this.labels = Labels.fromPb(datasetPb.getLabels());
+ }
+
+
+ @Override
+ public Builder setDatasetId(DatasetId datasetId) {
+ this.datasetId = checkNotNull(datasetId);
+ return this;
+ }
+
+
+ @Override
+ public Builder setAcl(List acl) {
+ this.acl = acl != null ? ImmutableList.copyOf(acl) : null;
+ return this;
+ }
+
+ @Override
+ Builder setCreationTime(Long creationTime) {
+ this.creationTime = creationTime;
+ return this;
+ }
+
+
+ @Override
+ public Builder setDefaultTableLifetime(Long defaultTableLifetime) {
+ this.defaultTableLifetime =
+ firstNonNull(defaultTableLifetime, Data.nullOf(Long.class));
+ return this;
+ }
+
+
+ @Override
+ public Builder setDescription(String description) {
+ this.description = firstNonNull(description, Data.nullOf(String.class));
+ return this;
+ }
+
+ @Override
+ Builder setEtag(String etag) {
+ this.etag = etag;
+ return this;
+ }
+
+
+ @Override
+ public Builder setFriendlyName(String friendlyName) {
+ this.friendlyName = firstNonNull(friendlyName, Data.nullOf(String.class));
+ return this;
+ }
+
+ @Override
+ Builder setGeneratedId(String generatedId) {
+ this.generatedId = generatedId;
+ return this;
+ }
+
+ @Override
+ Builder setLastModified(Long lastModified) {
+ this.lastModified = lastModified;
+ return this;
+ }
+
+
+ @Override
+ public Builder setLocation(String location) {
+ this.location = firstNonNull(location, Data.nullOf(String.class));
+ return this;
+ }
+
+ @Override
+ Builder setSelfLink(String selfLink) {
+ this.selfLink = selfLink;
+ return this;
+ }
+
+ /**
+ * Sets the labels applied to this dataset.
+ *
+ *
When used with {@link BigQuery#update(DatasetInfo, DatasetOption...)}, setting {@code
+ * labels} to {@code null} removes all labels; otherwise all keys that are mapped to {@code
+ * null} values are removed and other keys are updated to their respective values.
+ */
+ @Override
+ public Builder setLabels(Map labels) {
+ this.labels = Labels.fromUser(labels);
+ return this;
+ }
+
+ @Override
+ public DatasetInfo build() {
+ return new DatasetInfo(this);
+ }
+ }
+
+ DatasetInfo(BuilderImpl builder) {
+ datasetId = checkNotNull(builder.datasetId);
+ acl = builder.acl;
+ creationTime = builder.creationTime;
+ defaultTableLifetime = builder.defaultTableLifetime;
+ description = builder.description;
+ etag = builder.etag;
+ friendlyName = builder.friendlyName;
+ generatedId = builder.generatedId;
+ lastModified = builder.lastModified;
+ location = builder.location;
+ selfLink = builder.selfLink;
+ labels = builder.labels;
+ }
+
+
+ /**
+ * Returns the dataset identity.
+ */
+ public DatasetId getDatasetId() {
+ return datasetId;
+ }
+
+
+ /**
+ * Returns the dataset's access control configuration.
+ *
+ *
Update the ACLs for a dataset.
+ *
{@code
+ * Dataset dataset = bigquery.getDataset(DatasetId.of("my_dataset"));
+ * List beforeAcls = dataset.getAcl();
+ *
+ * // Make a copy of the ACLs so that they can be modified.
+ * ArrayList acls = new ArrayList<>(beforeAcls);
+ * acls.add(Acl.of(new Acl.User("sample.bigquery.dev@gmail.com"), Acl.Role.READER));
+ * Dataset.Builder builder = dataset.toBuilder();
+ * builder.setAcl(acls);
+ *
+ * bigquery.update(builder.build()); // API request.
+ * }
+ *
+ * @see Access Control
+ */
+ public List getAcl() {
+ return acl;
+ }
+
+
+ /**
+ * Returns the time when this dataset was created, in milliseconds since the epoch.
+ */
+ public Long getCreationTime() {
+ return creationTime;
+ }
+
+
+ /**
+ * Returns the default lifetime of all tables in the dataset, in milliseconds. Once this property
+ * is set, all newly-created tables in the dataset will have an expirationTime property set to the
+ * creation time plus the value in this property, and changing the value will only affect new
+ * tables, not existing ones. When the expirationTime for a given table is reached, that table
+ * will be deleted automatically. If a table's expirationTime is modified or removed before the
+ * table expires, or if you provide an explicit expirationTime when creating a table, that value
+ * takes precedence over the default expiration time indicated by this property.
+ *
+ *
Update the default table expiration time for a dataset.
+ *
+ *
+ */
+ public Long getDefaultTableLifetime() {
+ return defaultTableLifetime;
+ }
+
+
+ /**
+ * Returns a user-friendly description for the dataset.
+ */
+ public String getDescription() {
+ return description;
+ }
+
+
+ /**
+ * Returns the hash of the dataset resource.
+ */
+ public String getEtag() {
+ return etag;
+ }
+
+
+ /**
+ * Returns a user-friendly name for the dataset.
+ */
+ public String getFriendlyName() {
+ return friendlyName;
+ }
+
+
+ /**
+ * Returns the service-generated id for the dataset.
+ */
+ public String getGeneratedId() {
+ return generatedId;
+ }
+
+
+ /**
+ * Returns the time when this dataset or any of its tables was last modified, in milliseconds
+ * since the epoch.
+ */
+ public Long getLastModified() {
+ return lastModified;
+ }
+
+
+ /**
+ * Returns the geographic location where the dataset should reside.
+ *
+ * @see
+ * Dataset Location
+ */
+ public String getLocation() {
+ return location;
+ }
+
+
+ /**
+ * Returns an URL that can be used to access the resource again. The returned URL can be used for
+ * get or update requests.
+ */
+ public String getSelfLink() {
+ return selfLink;
+ }
+
+ /**
+ * Return a map for labels applied to the dataset.
+ *
+ * @see Labeling Datasets
+ */
+ public Map getLabels() {
+ return labels.userMap();
+ }
+
+ /**
+ * Returns a builder for the dataset object.
+ */
+ public Builder toBuilder() {
+ return new BuilderImpl(this);
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("datasetId", datasetId)
+ .add("creationTime", creationTime)
+ .add("defaultTableLifetime", defaultTableLifetime)
+ .add("description", description)
+ .add("etag", etag)
+ .add("friendlyName", friendlyName)
+ .add("generatedId", generatedId)
+ .add("lastModified", lastModified)
+ .add("location", location)
+ .add("selfLink", selfLink)
+ .add("acl", acl)
+ .add("labels", labels)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(datasetId);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj != null
+ && obj.getClass().equals(DatasetInfo.class)
+ && Objects.equals(toPb(), ((DatasetInfo) obj).toPb());
+ }
+
+ DatasetInfo setProjectId(String projectId) {
+ Builder builder = toBuilder();
+ builder.setDatasetId(getDatasetId().setProjectId(projectId));
+ if (getAcl() != null) {
+ List acls = Lists.newArrayListWithCapacity(getAcl().size());
+ for (Acl acl : getAcl()) {
+ if (acl.getEntity().getType() == Acl.Entity.Type.VIEW) {
+ Dataset.Access accessPb = acl.toPb();
+ TableReference viewReferencePb = accessPb.getView();
+ if (viewReferencePb.getProjectId() == null) {
+ viewReferencePb.setProjectId(projectId);
+ }
+ acls.add(Acl.of(new Acl.View(TableId.fromPb(viewReferencePb))));
+ } else {
+ acls.add(acl);
+ }
+ }
+ builder.setAcl(acls);
+ }
+ return builder.build();
+ }
+
+ Dataset toPb() {
+ Dataset datasetPb = new Dataset();
+ datasetPb.setDatasetReference(datasetId.toPb());
+ datasetPb.setCreationTime(creationTime);
+ datasetPb.setDefaultTableExpirationMs(defaultTableLifetime);
+ datasetPb.setDescription(description);
+ datasetPb.setEtag(etag);
+ datasetPb.setFriendlyName(friendlyName);
+ datasetPb.setId(generatedId);
+ datasetPb.setLastModifiedTime(lastModified);
+ datasetPb.setLocation(location);
+ datasetPb.setSelfLink(selfLink);
+ if (acl != null) {
+ datasetPb.setAccess(Lists.transform(acl, new Function() {
+ @Override
+ public Dataset.Access apply(Acl acl) {
+ return acl.toPb();
+ }
+ }));
+ }
+ datasetPb.setLabels(labels.toPb());
+ return datasetPb;
+ }
+
+
+ /**
+ * Returns a builder for a {@code DatasetInfo} object given it's identity.
+ */
+ public static Builder newBuilder(DatasetId datasetId) {
+ return new BuilderImpl().setDatasetId(datasetId);
+ }
+
+
+ /**
+ * Returns a builder for a {@code DatasetInfo} object given it's user-defined id.
+ */
+ public static Builder newBuilder(String datasetId) {
+ return newBuilder(DatasetId.of(datasetId));
+ }
+
+
+ /**
+ * Returns a builder for the DatasetInfo object given it's user-defined project and dataset ids.
+ */
+ public static Builder newBuilder(String projectId, String datasetId) {
+ return newBuilder(DatasetId.of(projectId, datasetId));
+ }
+
+ /**
+ * Returns a {@code DatasetInfo} object given it's identity.
+ */
+ public static DatasetInfo of(DatasetId datasetId) {
+ return newBuilder(datasetId).build();
+ }
+
+ /**
+ * Returns a {@code DatasetInfo} object given it's user-defined id.
+ */
+ public static DatasetInfo of(String datasetId) {
+ return newBuilder(datasetId).build();
+ }
+
+ static DatasetInfo fromPb(Dataset datasetPb) {
+ return new BuilderImpl(datasetPb).build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatastoreBackupOptions.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatastoreBackupOptions.java
new file mode 100644
index 000000000000..0f2e37cd1077
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/DatastoreBackupOptions.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2017 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.common.base.MoreObjects;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Google BigQuery options for Cloud Datastore backup.
+ */
+public final class DatastoreBackupOptions extends FormatOptions {
+
+ private final List projectionFields;
+
+ private static final long serialVersionUID = -5302774763661451947L;
+
+ public static final class Builder {
+ private List projectionFields;
+
+ private Builder() {}
+
+ private Builder(DatastoreBackupOptions options) {
+ projectionFields = options.projectionFields;
+ }
+
+ /**
+ * Sets which entity properties to load into BigQuery from a Cloud Datastore backup. Property
+ * names are case sensitive and must be top-level properties.
+ * If no properties are specified, BigQuery loads all properties. If any named property isn't
+ * found in the Cloud Datastore backup, an invalid error is returned in the job result.
+ */
+ public Builder setProjectionFields(List projectionFields) {
+ this.projectionFields = projectionFields;
+ return this;
+ }
+
+ /**
+ * Creates a {@code DatastoreBackupOptions} object.
+ */
+ public DatastoreBackupOptions build() {
+ return new DatastoreBackupOptions(this);
+ }
+ }
+
+ private DatastoreBackupOptions(Builder builder) {
+ super(FormatOptions.DATASTORE_BACKUP);
+ this.projectionFields = builder.projectionFields;
+ }
+
+ /**
+ * Returns the value of which entity properties to load into BigQuery from a Cloud Datastore
+ * backup.
+ */
+ public List getProjectionFields() {
+ return projectionFields;
+ }
+
+ /**
+ * Returns a builder for the {@code DatastoreBackupOptions} object.
+ */
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ /**
+ * Returns a builder for a {@code DatastoreBackupOptions} object.
+ */
+ public static Builder newBuilder() {
+ return new Builder();
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("projectionFields", projectionFields)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(getType(), projectionFields);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof DatastoreBackupOptions
+ && Objects.equals(projectionFields, ((DatastoreBackupOptions) obj).getProjectionFields());
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EncryptionConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EncryptionConfiguration.java
new file mode 100644
index 000000000000..7b6e39113434
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/EncryptionConfiguration.java
@@ -0,0 +1,101 @@
+/*
+ * Copyright 2018 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.common.base.MoreObjects;
+import java.io.Serializable;
+import java.util.Objects;
+
+public final class EncryptionConfiguration implements Serializable {
+ private static final long serialVersionUID = -100849671239964L;
+
+ private final String kmsKeyName;
+
+ public String getKmsKeyName() {
+ return this.kmsKeyName;
+ }
+
+ public com.google.api.services.bigquery.model.EncryptionConfiguration toPb() {
+ com.google.api.services.bigquery.model.EncryptionConfiguration encryptionConfigurationPb
+ = new com.google.api.services.bigquery.model.EncryptionConfiguration();
+ encryptionConfigurationPb.setKmsKeyName(kmsKeyName);
+ return encryptionConfigurationPb;
+ }
+
+ public static final class Builder {
+ private String kmsKeyName;
+
+ private Builder() {}
+
+ private Builder(EncryptionConfiguration options) {
+ kmsKeyName = options.kmsKeyName;
+ }
+
+ public Builder(com.google.api.services.bigquery.model.EncryptionConfiguration encryptionConfigurationPb) {
+ setKmsKeyName(encryptionConfigurationPb.getKmsKeyName());
+ }
+
+ public EncryptionConfiguration.Builder setKmsKeyName(String kmsKeyName) {
+ this.kmsKeyName = kmsKeyName;
+ return this;
+ }
+
+ /**
+ * Creates a {@code EncryptionConfiguration} object.
+ */
+ public EncryptionConfiguration build() {
+ return new EncryptionConfiguration(this);
+ }
+ }
+
+ private EncryptionConfiguration(EncryptionConfiguration.Builder builder) {
+ this.kmsKeyName = builder.kmsKeyName;
+ }
+
+ /**
+ * Returns a builder for the {@code EncryptionConfiguration} object.
+ */
+ public EncryptionConfiguration.Builder toBuilder() {
+ return new EncryptionConfiguration.Builder(this);
+ }
+
+ /**
+ * Returns a builder for a {@code EncryptionConfiguration} object.
+ */
+ public static EncryptionConfiguration.Builder newBuilder() {
+ return new EncryptionConfiguration.Builder();
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("kmsKeyName", kmsKeyName)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(kmsKeyName);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof EncryptionConfiguration
+ && Objects.equals(kmsKeyName, ((EncryptionConfiguration) obj).getKmsKeyName());
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java
new file mode 100644
index 000000000000..d63918b9bb45
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExternalTableDefinition.java
@@ -0,0 +1,387 @@
+/*
+ * Copyright 2016 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.api.services.bigquery.model.ExternalDataConfiguration;
+import com.google.api.services.bigquery.model.Table;
+import com.google.auto.value.AutoValue;
+import com.google.common.base.Function;
+import com.google.common.collect.ImmutableList;
+import java.util.List;
+import javax.annotation.Nullable;
+
+/**
+ * Google BigQuery external table definition. BigQuery's external tables are tables whose data
+ * reside outside of BigQuery but can be queried as normal BigQuery tables. External tables are
+ * experimental and might be subject to change or removed.
+ *
+ * @see Federated Data Sources
+ *
+ */
+@AutoValue
+public abstract class ExternalTableDefinition extends TableDefinition {
+
+ static final Function
+ FROM_EXTERNAL_DATA_FUNCTION =
+ new Function() {
+ @Override
+ public ExternalTableDefinition apply(ExternalDataConfiguration pb) {
+ return ExternalTableDefinition.fromExternalDataConfiguration(pb);
+ }
+ };
+ static final Function
+ TO_EXTERNAL_DATA_FUNCTION =
+ new Function() {
+ @Override
+ public ExternalDataConfiguration apply(ExternalTableDefinition tableInfo) {
+ return tableInfo.toExternalDataConfigurationPb();
+ }
+ };
+
+ private static final long serialVersionUID = -5951580238459622025L;
+
+ @AutoValue.Builder
+ public abstract static class Builder
+ extends TableDefinition.Builder {
+
+ /**
+ * Sets the fully-qualified URIs that point to your data in Google Cloud Storage (e.g.
+ * gs://bucket/path). Each URI can contain one '*' wildcard character that must come after the
+ * bucket's name. Size limits related to load jobs apply to external data sources, plus an
+ * additional limit of 10 GB maximum size across all URIs.
+ *
+ * @see Quota
+ */
+ public Builder setSourceUris(List sourceUris) {
+ return setSourceUrisImmut(ImmutableList.copyOf(sourceUris));
+ }
+
+ abstract Builder setSourceUrisImmut(ImmutableList sourceUris);
+
+ /**
+ * Sets the source format, and possibly some parsing options, of the external data. Supported
+ * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}.
+ *
+ *
+ * Source Format
+ */
+ public Builder setFormatOptions(FormatOptions formatOptions) {
+ return setFormatOptionsInner(formatOptions);
+ }
+
+ abstract Builder setFormatOptionsInner(FormatOptions formatOptions);
+
+ /**
+ * Sets the maximum number of bad records that BigQuery can ignore when reading data. If the
+ * number of bad records exceeds this value, an invalid error is returned in the job result. The
+ * default value is 0, which requires that all records are valid.
+ */
+ public abstract Builder setMaxBadRecords(Integer maxBadRecords);
+
+ /**
+ * Sets whether BigQuery should allow extra values that are not represented in the table schema.
+ * If true, the extra values are ignored. If false, records with extra columns are treated as
+ * bad records, and if there are too many bad records, an invalid error is returned in the job
+ * result. The default value is false. The value set with {@link
+ * #setFormatOptions(FormatOptions)} property determines what BigQuery treats as an extra value.
+ *
+ * @see
+ * Ignore Unknown Values
+ */
+ public abstract Builder setIgnoreUnknownValues(Boolean ignoreUnknownValues);
+
+ /**
+ * Sets compression type of the data source. By default no compression is assumed.
+ *
+ * @see
+ * Compression
+ */
+ public abstract Builder setCompression(String compression);
+
+ /**
+ * [Experimental] Sets detection of schema and format options automatically. Any option
+ * specified explicitly will be honored.
+ */
+ public abstract Builder setAutodetect(Boolean autodetect);
+
+ public abstract Builder setType(Type type);
+
+ /** Sets the table schema. */
+ public abstract Builder setSchema(Schema schema);
+
+ /** Creates an {@code ExternalTableDefinition} object. */
+ @Override
+ public abstract ExternalTableDefinition build();
+ }
+
+ /**
+ * Returns the compression type of the data source.
+ *
+ * @see
+ * Compression
+ */
+ @Nullable
+ public abstract String getCompression();
+
+ /**
+ * Returns whether BigQuery should allow extra values that are not represented in the table
+ * schema. If true, the extra values are ignored. If false, records with extra columns are treated
+ * as bad records, and if there are too many bad records, an invalid error is returned in the job
+ * result. The default value is false. The value of {@link #getFormatOptions()} determines what
+ * BigQuery treats as an extra value.
+ *
+ * @see
+ * Ignore Unknown Values
+ */
+ @Nullable
+ public Boolean ignoreUnknownValues() {
+ return getIgnoreUnknownValues();
+ };
+
+ @Nullable
+ public abstract Boolean getIgnoreUnknownValues();
+
+ /**
+ * Returns the maximum number of bad records that BigQuery can ignore when reading data. If the
+ * number of bad records exceeds this value, an invalid error is returned in the job result.
+ */
+ @Nullable
+ public abstract Integer getMaxBadRecords();
+
+ /**
+ * Returns the fully-qualified URIs that point to your data in Google Cloud Storage. Each URI can
+ * contain one '*' wildcard character that must come after the bucket's name. Size limits related
+ * to load jobs apply to external data sources, plus an additional limit of 10 GB maximum size
+ * across all URIs.
+ *
+ * @see Quota
+ */
+ @Nullable
+ public List getSourceUris() {
+ return getSourceUrisImmut();
+ }
+
+ @Nullable
+ public abstract ImmutableList getSourceUrisImmut();
+
+ /**
+ * Returns the source format, and possibly some parsing options, of the external data. Supported
+ * formats are {@code CSV} and {@code NEWLINE_DELIMITED_JSON}.
+ */
+ @SuppressWarnings("unchecked")
+ @Nullable
+ public F getFormatOptions() {
+ return (F) getFormatOptionsInner();
+ }
+
+ @Nullable
+ abstract FormatOptions getFormatOptionsInner();
+
+ /**
+ * [Experimental] Returns whether automatic detection of schema and format options should be
+ * performed.
+ */
+ @Nullable
+ public abstract Boolean getAutodetect();
+
+ /** Returns a builder for the {@code ExternalTableDefinition} object. */
+ public abstract Builder toBuilder();
+
+ @Override
+ com.google.api.services.bigquery.model.Table toPb() {
+ Table tablePb = super.toPb();
+ tablePb.setExternalDataConfiguration(toExternalDataConfigurationPb());
+ return tablePb;
+ }
+
+ com.google.api.services.bigquery.model.ExternalDataConfiguration toExternalDataConfigurationPb() {
+ com.google.api.services.bigquery.model.ExternalDataConfiguration externalConfigurationPb =
+ new com.google.api.services.bigquery.model.ExternalDataConfiguration();
+ if (getCompression() != null) {
+ externalConfigurationPb.setCompression(getCompression());
+ }
+ if (ignoreUnknownValues() != null) {
+ externalConfigurationPb.setIgnoreUnknownValues(ignoreUnknownValues());
+ }
+ if (getMaxBadRecords() != null) {
+ externalConfigurationPb.setMaxBadRecords(getMaxBadRecords());
+ }
+ if (getSchema() != null) {
+ externalConfigurationPb.setSchema(getSchema().toPb());
+ }
+ if (getFormatOptions() != null) {
+ externalConfigurationPb.setSourceFormat(getFormatOptions().getType());
+ }
+ if (getSourceUris() != null) {
+ externalConfigurationPb.setSourceUris(getSourceUris());
+ }
+ if (getFormatOptions() != null && FormatOptions.CSV.equals(getFormatOptions().getType())) {
+ externalConfigurationPb.setCsvOptions(((CsvOptions) getFormatOptions()).toPb());
+ }
+ if (getFormatOptions() != null && FormatOptions.GOOGLE_SHEETS.equals(getFormatOptions().getType())) {
+ externalConfigurationPb.setGoogleSheetsOptions(((GoogleSheetsOptions) getFormatOptions()).toPb());
+ }
+ if (getAutodetect() != null) {
+ externalConfigurationPb.setAutodetect(getAutodetect());
+ }
+ return externalConfigurationPb;
+ }
+
+ static Builder newBuilder() {
+ return new AutoValue_ExternalTableDefinition.Builder().setType(Type.EXTERNAL);
+ }
+
+ /**
+ * Creates a builder for an ExternalTableDefinition object.
+ *
+ * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage.
+ * Each URI can contain one '*' wildcard character that must come after the bucket's name.
+ * Size limits related to load jobs apply to external data sources, plus an additional limit
+ * of 10 GB maximum size across all URIs.
+ * @param schema the schema for the external data
+ * @param format the source format of the external data
+ * @return a builder for an ExternalTableDefinition object given source URIs, schema and format
+ *
+ * @see Quota
+ * @see
+ * Source Format
+ */
+ public static Builder newBuilder(List sourceUris, Schema schema, FormatOptions format) {
+ return newBuilder().setSourceUris(sourceUris).setSchema(schema).setFormatOptions(format);
+ }
+
+
+ /**
+ * Creates a builder for an ExternalTableDefinition object.
+ *
+ * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The
+ * URI can contain one '*' wildcard character that must come after the bucket's name. Size
+ * limits related to load jobs apply to external data sources.
+ * @param schema the schema for the external data
+ * @param format the source format of the external data
+ * @return a builder for an ExternalTableDefinition object given source URI, schema and format
+ *
+ * @see Quota
+ * @see
+ * Source Format
+ */
+ public static Builder newBuilder(String sourceUri, Schema schema, FormatOptions format) {
+ return newBuilder(ImmutableList.of(sourceUri), schema, format);
+ }
+
+ /**
+ * Creates an ExternalTableDefinition object.
+ *
+ * @param sourceUris the fully-qualified URIs that point to your data in Google Cloud Storage.
+ * Each URI can contain one '*' wildcard character that must come after the bucket's name.
+ * Size limits related to load jobs apply to external data sources, plus an additional limit
+ * of 10 GB maximum size across all URIs.
+ * @param schema the schema for the external data
+ * @param format the source format of the external data
+ * @return an ExternalTableDefinition object given source URIs, schema and format
+ *
+ * @see Quota
+ * @see
+ * Source Format
+ */
+ public static ExternalTableDefinition of(List sourceUris, Schema schema,
+ FormatOptions format) {
+ return newBuilder(sourceUris, schema, format).build();
+ }
+
+ /**
+ * Creates an ExternalTableDefinition object.
+ *
+ * @param sourceUri a fully-qualified URI that points to your data in Google Cloud Storage. The
+ * URI can contain one '*' wildcard character that must come after the bucket's name. Size
+ * limits related to load jobs apply to external data sources.
+ * @param schema the schema for the external data
+ * @param format the source format of the external data
+ * @return an ExternalTableDefinition object given source URIs, schema and format
+ *
+ * @see Quota
+ * @see
+ * Source Format
+ */
+ public static ExternalTableDefinition of(String sourceUri, Schema schema, FormatOptions format) {
+ return newBuilder(sourceUri, schema, format).build();
+ }
+
+ @SuppressWarnings("unchecked")
+ static ExternalTableDefinition fromPb(Table tablePb) {
+ Builder builder = newBuilder().table(tablePb);
+
+ com.google.api.services.bigquery.model.ExternalDataConfiguration externalDataConfiguration =
+ tablePb.getExternalDataConfiguration();
+ if (externalDataConfiguration != null) {
+ if (externalDataConfiguration.getSourceUris() != null) {
+ builder.setSourceUris(ImmutableList.copyOf(externalDataConfiguration.getSourceUris()));
+ }
+ if (externalDataConfiguration.getSourceFormat() != null) {
+ builder.setFormatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat()));
+ }
+ builder.setCompression(externalDataConfiguration.getCompression());
+ builder.setIgnoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues());
+ if (externalDataConfiguration.getCsvOptions() != null) {
+ builder.setFormatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions()));
+ }
+ if (externalDataConfiguration.getGoogleSheetsOptions() != null) {
+ builder.setFormatOptions(GoogleSheetsOptions.fromPb(externalDataConfiguration.getGoogleSheetsOptions()));
+ }
+ builder.setMaxBadRecords(externalDataConfiguration.getMaxBadRecords());
+ builder.setAutodetect(externalDataConfiguration.getAutodetect());
+ }
+ return builder.build();
+ }
+
+ static ExternalTableDefinition fromExternalDataConfiguration(
+ ExternalDataConfiguration externalDataConfiguration) {
+ Builder builder = newBuilder();
+ if (externalDataConfiguration.getSourceUris() != null) {
+ builder.setSourceUris(externalDataConfiguration.getSourceUris());
+ }
+ if (externalDataConfiguration.getSchema() != null) {
+ builder.setSchema(Schema.fromPb(externalDataConfiguration.getSchema()));
+ }
+ if (externalDataConfiguration.getSourceFormat() != null) {
+ builder.setFormatOptions(FormatOptions.of(externalDataConfiguration.getSourceFormat()));
+ }
+ if (externalDataConfiguration.getCompression() != null) {
+ builder.setCompression(externalDataConfiguration.getCompression());
+ }
+ if (externalDataConfiguration.getIgnoreUnknownValues() != null) {
+ builder.setIgnoreUnknownValues(externalDataConfiguration.getIgnoreUnknownValues());
+ }
+ if (externalDataConfiguration.getCsvOptions() != null) {
+ builder.setFormatOptions(CsvOptions.fromPb(externalDataConfiguration.getCsvOptions()));
+ }
+ if (externalDataConfiguration.getGoogleSheetsOptions() != null) {
+ builder.setFormatOptions(GoogleSheetsOptions.fromPb(externalDataConfiguration.getGoogleSheetsOptions()));
+ }
+ if (externalDataConfiguration.getMaxBadRecords() != null) {
+ builder.setMaxBadRecords(externalDataConfiguration.getMaxBadRecords());
+ }
+ if (externalDataConfiguration.getAutodetect() != null) {
+ builder.setAutodetect(externalDataConfiguration.getAutodetect());
+ }
+ return builder.build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java
new file mode 100644
index 000000000000..76d5688ce2d1
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/ExtractJobConfiguration.java
@@ -0,0 +1,309 @@
+/*
+ * Copyright 2016 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.services.bigquery.model.JobConfigurationExtract;
+import com.google.common.base.MoreObjects.ToStringHelper;
+import com.google.common.collect.ImmutableList;
+
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Google BigQuery extract job configuration. An extract job exports a BigQuery table to Google
+ * Cloud Storage. The extract destination provided as URIs that point to objects in Google Cloud
+ * Storage. Extract job configurations have {@link JobConfiguration.Type#EXTRACT} type.
+ */
+public final class ExtractJobConfiguration extends JobConfiguration {
+
+ private static final long serialVersionUID = 4147749733166593761L;
+
+ private final TableId sourceTable;
+ private final List destinationUris;
+ private final Boolean printHeader;
+ private final String fieldDelimiter;
+ private final String format;
+ private final String compression;
+
+ public static final class Builder
+ extends JobConfiguration.Builder {
+
+ private TableId sourceTable;
+ private List destinationUris;
+ private Boolean printHeader;
+ private String fieldDelimiter;
+ private String format;
+ private String compression;
+
+ private Builder() {
+ super(Type.EXTRACT);
+ }
+
+ private Builder(ExtractJobConfiguration jobInfo) {
+ this();
+ this.sourceTable = jobInfo.sourceTable;
+ this.destinationUris = jobInfo.destinationUris;
+ this.printHeader = jobInfo.printHeader;
+ this.fieldDelimiter = jobInfo.fieldDelimiter;
+ this.format = jobInfo.format;
+ this.compression = jobInfo.compression;
+ }
+
+ private Builder(com.google.api.services.bigquery.model.JobConfiguration configurationPb) {
+ this();
+ JobConfigurationExtract extractConfigurationPb = configurationPb.getExtract();
+ this.sourceTable = TableId.fromPb(extractConfigurationPb.getSourceTable());
+ this.destinationUris = extractConfigurationPb.getDestinationUris();
+ this.printHeader = extractConfigurationPb.getPrintHeader();
+ this.fieldDelimiter = extractConfigurationPb.getFieldDelimiter();
+ this.format = extractConfigurationPb.getDestinationFormat();
+ this.compression = extractConfigurationPb.getCompression();
+ }
+
+
+ /**
+ * Sets the table to export.
+ */
+ public Builder setSourceTable(TableId sourceTable) {
+ this.sourceTable = sourceTable;
+ return this;
+ }
+
+
+ /**
+ * Sets the list of fully-qualified Google Cloud Storage URIs (e.g. gs://bucket/path) where the
+ * extracted table should be written.
+ */
+ public Builder setDestinationUris(List destinationUris) {
+ this.destinationUris = destinationUris != null ? ImmutableList.copyOf(destinationUris) : null;
+ return this;
+ }
+
+
+ /**
+ * Sets whether to print out a header row in the results. By default an header is printed.
+ */
+ public Builder setPrintHeader(Boolean printHeader) {
+ this.printHeader = printHeader;
+ return this;
+ }
+
+
+ /**
+ * Sets the delimiter to use between fields in the exported data. By default "," is used.
+ */
+ public Builder setFieldDelimiter(String fieldDelimiter) {
+ this.fieldDelimiter = fieldDelimiter;
+ return this;
+ }
+
+
+ /**
+ * Sets the exported file format. If not set table is exported in CSV format.
+ *
+ *
+ * Destination Format
+ */
+ public Builder setFormat(String format) {
+ this.format = format;
+ return this;
+ }
+
+
+ /**
+ * Sets the compression value to use for exported files. If not set exported files are not
+ * compressed.
+ *
+ *
+ * Compression
+ */
+ public Builder setCompression(String compression) {
+ this.compression = compression;
+ return this;
+ }
+
+ public ExtractJobConfiguration build() {
+ return new ExtractJobConfiguration(this);
+ }
+ }
+
+ private ExtractJobConfiguration(Builder builder) {
+ super(builder);
+ this.sourceTable = checkNotNull(builder.sourceTable);
+ this.destinationUris = checkNotNull(builder.destinationUris);
+ this.printHeader = builder.printHeader;
+ this.fieldDelimiter = builder.fieldDelimiter;
+ this.format = builder.format;
+ this.compression = builder.compression;
+ }
+
+
+ /**
+ * Returns the table to export.
+ */
+ public TableId getSourceTable() {
+ return sourceTable;
+ }
+
+
+ /**
+ * Returns the list of fully-qualified Google Cloud Storage URIs where the extracted table should
+ * be written.
+ *
+ * @see
+ * Exporting Data Into One or More Files
+ */
+ public List getDestinationUris() {
+ return destinationUris;
+ }
+
+ /**
+ * Returns whether an header row is printed with the result.
+ */
+ public Boolean printHeader() {
+ return printHeader;
+ }
+
+
+ /**
+ * Returns the delimiter used between fields in the exported data.
+ */
+ public String getFieldDelimiter() {
+ return fieldDelimiter;
+ }
+
+
+ /**
+ * Returns the exported files format.
+ */
+ public String getFormat() {
+ return format;
+ }
+
+
+ /**
+ * Returns the compression value of exported files.
+ */
+ public String getCompression() {
+ return compression;
+ }
+
+ @Override
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ ToStringHelper toStringHelper() {
+ return super.toStringHelper()
+ .add("sourceTable", sourceTable)
+ .add("destinationUris", destinationUris)
+ .add("format", format)
+ .add("printHeader", printHeader)
+ .add("fieldDelimiter", fieldDelimiter)
+ .add("compression", compression);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj == this
+ || obj instanceof ExtractJobConfiguration
+ && baseEquals((ExtractJobConfiguration) obj);
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(baseHashCode(), sourceTable, destinationUris, printHeader, fieldDelimiter,
+ format, compression);
+ }
+
+ @Override
+ ExtractJobConfiguration setProjectId(String projectId) {
+ return toBuilder().setSourceTable(getSourceTable().setProjectId(projectId)).build();
+ }
+
+ @Override
+ com.google.api.services.bigquery.model.JobConfiguration toPb() {
+ JobConfigurationExtract extractConfigurationPb = new JobConfigurationExtract();
+ extractConfigurationPb.setDestinationUris(destinationUris);
+ extractConfigurationPb.setSourceTable(sourceTable.toPb());
+ extractConfigurationPb.setPrintHeader(printHeader);
+ extractConfigurationPb.setFieldDelimiter(fieldDelimiter);
+ extractConfigurationPb.setDestinationFormat(format);
+ extractConfigurationPb.setCompression(compression);
+ return new com.google.api.services.bigquery.model.JobConfiguration()
+ .setExtract(extractConfigurationPb);
+ }
+
+
+ /**
+ * Creates a builder for a BigQuery Extract Job configuration given source table and destination
+ * URI.
+ */
+ public static Builder newBuilder(TableId sourceTable, String destinationUri) {
+ return newBuilder(sourceTable, ImmutableList.of(checkNotNull(destinationUri)));
+ }
+
+
+ /**
+ * Creates a builder for a BigQuery Extract Job configuration given source table and destination
+ * URIs.
+ */
+ public static Builder newBuilder(TableId sourceTable, List destinationUris) {
+ return new Builder().setSourceTable(sourceTable).setDestinationUris(destinationUris);
+ }
+
+ /**
+ * Returns a BigQuery Extract Job configuration for the given source table and destination URI.
+ */
+ public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri) {
+ return newBuilder(sourceTable, destinationUri).build();
+ }
+
+ /**
+ * Returns a BigQuery Extract Job configuration for the given source table and destination URIs.
+ */
+ public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris) {
+ return newBuilder(sourceTable, destinationUris).build();
+ }
+
+ /**
+ * Returns a BigQuery Extract Job configuration for the given source table, format and destination
+ * URI.
+ */
+ public static ExtractJobConfiguration of(TableId sourceTable, String destinationUri,
+ String format) {
+ return newBuilder(sourceTable, destinationUri).setFormat(format).build();
+ }
+
+ /**
+ * Returns a BigQuery Extract Job configuration for the given source table, format and destination
+ * URIs.
+ */
+ public static ExtractJobConfiguration of(TableId sourceTable, List destinationUris,
+ String format) {
+ return newBuilder(sourceTable, destinationUris).setFormat(format).build();
+ }
+
+ @SuppressWarnings("unchecked")
+ static ExtractJobConfiguration fromPb(
+ com.google.api.services.bigquery.model.JobConfiguration confPb) {
+ return new Builder(confPb).build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java
new file mode 100644
index 000000000000..3fbad523e859
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/Field.java
@@ -0,0 +1,310 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.MoreObjects.firstNonNull;
+import static com.google.common.base.Preconditions.checkNotNull;
+
+import com.google.api.client.util.Data;
+import com.google.api.services.bigquery.model.TableFieldSchema;
+import com.google.common.base.Function;
+import com.google.common.base.MoreObjects;
+import com.google.common.collect.Lists;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.Objects;
+
+/**
+ * Google BigQuery Table schema field. A table field has a name, a type, a mode and possibly a
+ * description.
+ */
+public final class Field implements Serializable {
+
+ static final Function FROM_PB_FUNCTION =
+ new Function() {
+ @Override
+ public Field apply(TableFieldSchema pb) {
+ return Field.fromPb(pb);
+ }
+ };
+ static final Function TO_PB_FUNCTION =
+ new Function() {
+ @Override
+ public TableFieldSchema apply(Field field) {
+ return field.toPb();
+ }
+ };
+
+ private static final long serialVersionUID = 8827990270251118556L;
+
+ private final String name;
+ private final LegacySQLTypeName type;
+ private final FieldList subFields;
+ private final String mode;
+ private final String description;
+
+ /**
+ * Mode for a BigQuery Table field. {@link Mode#NULLABLE} fields can be set to {@code null},
+ * {@link Mode#REQUIRED} fields must be provided. {@link Mode#REPEATED} fields can contain more
+ * than one value.
+ */
+ public enum Mode {
+ NULLABLE, REQUIRED, REPEATED
+ }
+
+ public static final class Builder {
+
+ private String name;
+ private LegacySQLTypeName type;
+ private FieldList subFields;
+ private String mode;
+ private String description;
+
+ private Builder() {}
+
+ private Builder(Field field) {
+ this.name = field.name;
+ this.type = field.type;
+ this.subFields = field.subFields;
+ this.mode = field.mode;
+ this.description = field.description;
+ }
+
+
+ /**
+ * Sets the field name. The name must contain only letters (a-z, A-Z), numbers (0-9), or
+ * underscores (_), and must start with a letter or underscore. The maximum length is 128
+ * characters.
+ */
+ public Builder setName(String name) {
+ this.name = checkNotNull(name);
+ return this;
+ }
+
+
+ /**
+ * Sets the type of the field.
+ *
+ * @param type BigQuery data type
+ * @param subFields nested schema fields in case if {@code type} is
+ * {@link LegacySQLTypeName#RECORD}, empty otherwise
+ * @throws IllegalArgumentException
+ * if {@code type == LegacySQLTypeName.RECORD && subFields.length == 0}
+ * or if {@code type != LegacySQLTypeName.RECORD && subFields.length != 0}
+ * @see
+ * Data Types
+ */
+ public Builder setType(LegacySQLTypeName type, Field... subFields) {
+ return setType(type, subFields.length > 0 ? FieldList.of(subFields) : null);
+ }
+
+ /**
+ * Sets the type of the field.
+ *
+ * @param type BigQuery data type
+ * @param subFields nested schema fields, in case if {@code type} is
+ * {@link LegacySQLTypeName#RECORD}, {@code null} otherwise.
+ * @throws IllegalArgumentException
+ * if {@code type == LegacySQLTypeName.RECORD && (subFields == null || subFields.isEmpty())}
+ * or if {@code type != LegacySQLTypeName.RECORD && subFields != null}
+ * @see
+ * Data Types
+ */
+
+ public Builder setType(LegacySQLTypeName type, FieldList subFields) {
+ if (type == LegacySQLTypeName.RECORD) {
+ if (subFields == null || subFields.isEmpty()) {
+ throw new IllegalArgumentException(
+ "The " + type + " field must have at least one sub-field");
+ }
+ } else {
+ if (subFields != null) {
+ throw new IllegalArgumentException(
+ "Only " + LegacySQLTypeName.RECORD + " fields can have sub-fields");
+ }
+ }
+ this.type = type;
+ this.subFields = subFields;
+ return this;
+ }
+
+ /**
+ * Sets the mode of the field. When not specified {@link Mode#NULLABLE} is used.
+ */
+ public Builder setMode(Mode mode) {
+ this.mode = mode != null ? mode.name() : Data.nullOf(String.class);
+ return this;
+ }
+
+
+ /**
+ * Sets the field description. The maximum length is 16K characters.
+ */
+ public Builder setDescription(String description) {
+ this.description = firstNonNull(description, Data.nullOf(String.class));
+ return this;
+ }
+
+ /**
+ * Creates a {@code Field} object.
+ */
+ public Field build() {
+ return new Field(this);
+ }
+ }
+
+ private Field(Builder builder) {
+ this.name = checkNotNull(builder.name);
+ this.type = checkNotNull(builder.type);
+ this.subFields = builder.subFields;
+ this.mode = builder.mode;
+ this.description = builder.description;
+ }
+
+ /**
+ * Returns the field name.
+ */
+ public String getName() {
+ return name;
+ }
+
+
+ /**
+ * Returns the field type.
+ *
+ * @see
+ * Data Types
+ */
+ public LegacySQLTypeName getType() {
+ return type;
+ }
+
+
+ /**
+ * Returns the field mode. By default {@link Mode#NULLABLE} is used.
+ */
+ public Mode getMode() {
+ return mode != null ? Mode.valueOf(mode) : null;
+ }
+
+
+ /**
+ * Returns the field description.
+ */
+ public String getDescription() {
+ return Data.isNull(description) ? null : description;
+ }
+
+
+ /**
+ * Returns the list of sub-fields if {@link #getType()} is a {@link LegacySQLTypeName#RECORD}.
+ * Returns {@code null} otherwise.
+ */
+ public FieldList getSubFields() {
+ return subFields;
+ }
+
+ /**
+ * Returns a builder for the {@code Field} object.
+ */
+ public Builder toBuilder() {
+ return new Builder(this);
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("name", name)
+ .add("type", type)
+ .add("mode", mode)
+ .add("description", description)
+ .toString();
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(name, type, mode, description);
+ }
+
+ @Override
+ public boolean equals(Object obj) {
+ return obj instanceof Field && Objects.equals(toPb(), ((Field) obj).toPb());
+ }
+
+
+ /**
+ * Returns a Field object with given name and type.
+ */
+ public static Field of(String name, LegacySQLTypeName type, Field... subFields) {
+ return newBuilder(name, type, subFields).build();
+ }
+
+ /**
+ * Returns a Field object with given name and type.
+ */
+ public static Field of(String name, LegacySQLTypeName type, FieldList subFields) {
+ return newBuilder(name, type, subFields).build();
+ }
+
+ /**
+ * Returns a builder for a Field object with given name and type.
+ */
+ public static Builder newBuilder(String name, LegacySQLTypeName type, Field... subFields) {
+ return new Builder().setName(name).setType(type, subFields);
+ }
+
+ /**
+ * Returns a builder for a Field object with given name and type.
+ */
+ public static Builder newBuilder(String name, LegacySQLTypeName type, FieldList subFields) {
+ return new Builder().setName(name).setType(type, subFields);
+ }
+
+ TableFieldSchema toPb() {
+ TableFieldSchema fieldSchemaPb = new TableFieldSchema();
+ fieldSchemaPb.setName(name);
+ fieldSchemaPb.setType(type.name());
+ if (mode != null) {
+ fieldSchemaPb.setMode(mode);
+ }
+ if (description != null) {
+ fieldSchemaPb.setDescription(description);
+ }
+ if (getSubFields() != null) {
+ List fieldsPb = Lists.transform(getSubFields(), TO_PB_FUNCTION);
+ fieldSchemaPb.setFields(fieldsPb);
+ }
+ return fieldSchemaPb;
+ }
+
+ static Field fromPb(TableFieldSchema fieldSchemaPb) {
+ Builder fieldBuilder = new Builder();
+ fieldBuilder.setName(fieldSchemaPb.getName());
+ if (fieldSchemaPb.getMode() != null) {
+ fieldBuilder.setMode(Mode.valueOf(fieldSchemaPb.getMode()));
+ }
+ if (fieldSchemaPb.getDescription() != null) {
+ fieldBuilder.setDescription(fieldSchemaPb.getDescription());
+ }
+ FieldList subFields = fieldSchemaPb.getFields() != null
+ ? FieldList.of(Lists.transform(fieldSchemaPb.getFields(), FROM_PB_FUNCTION))
+ : null;
+ fieldBuilder.setType(LegacySQLTypeName.valueOf(fieldSchemaPb.getType()), subFields);
+ return fieldBuilder.build();
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldList.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldList.java
new file mode 100644
index 000000000000..19793ab74b69
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldList.java
@@ -0,0 +1,118 @@
+/*
+ * Copyright 2017 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import com.google.api.services.bigquery.model.TableFieldSchema;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import java.io.Serializable;
+import java.util.AbstractList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Google BigQuery Table schema fields (columns). Each field has a unique name and index. Fields
+ * with duplicate names are not allowed in BigQuery schema.
+ */
+public final class FieldList extends AbstractList implements Serializable {
+
+ private static final long serialVersionUID = 8736258375638733316L;
+
+ private final List fields;
+ private final Map nameIndex;
+
+ private FieldList(Iterable fields) {
+ this.fields = ImmutableList.copyOf(fields);
+ ImmutableMap.Builder nameIndexBuilder = ImmutableMap.builder();
+ int index = 0;
+ for (Field field : fields) {
+ nameIndexBuilder.put(field.getName(), index);
+ index++;
+ }
+
+ this.nameIndex = nameIndexBuilder.build();
+ }
+
+ /**
+ * Get schema field by index.
+ *
+ * @param index field (column) index
+ */
+ @Override
+ public Field get(int index) {
+ return fields.get(index);
+ }
+
+ /**
+ * Get schema field by name.
+ *
+ * @param name field (column) name
+ */
+ public Field get(String name) {
+ return get(getIndex(name));
+ }
+
+ /**
+ * Get schema field's index by name.
+ *
+ * @param name field (column) name
+ */
+ public int getIndex(String name) {
+ Integer index = nameIndex.get(name);
+ if (index == null) {
+ throw new IllegalArgumentException("Field with name '" + name + "' was not found");
+ }
+ return index;
+ }
+
+ /**
+ * Total number of fields (columns) in the schema.
+ */
+ @Override
+ public int size() {
+ return fields.size();
+ }
+
+ /**
+ * Returns a new {@code FieldList} object, which contains a collection of {@code Field} objects in
+ * preserved order and represent schema columns.
+ *
+ * @param fields the schema fields
+ */
+ public static FieldList of(Field... fields) {
+ return new FieldList(ImmutableList.copyOf(fields));
+ }
+
+ /**
+ * Returns a new {@code FieldList} object, which contains a collection of {@code Field} objects in
+ * preserved order and represent schema columns.
+ *
+ * @param fields the schema fields
+ */
+ public static FieldList of(Iterable fields) {
+ return new FieldList(fields);
+ }
+
+ List toPb() {
+ return Lists.transform(fields, Field.TO_PB_FUNCTION);
+ }
+
+ static FieldList fromPb(List fields) {
+ return FieldList.of(Lists.transform(fields, Field.FROM_PB_FUNCTION));
+ }
+}
diff --git a/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java
new file mode 100644
index 000000000000..42d1993900a5
--- /dev/null
+++ b/google-cloud-bigquery/src/main/java/com/google/cloud/bigquery/FieldValue.java
@@ -0,0 +1,304 @@
+/*
+ * Copyright 2015 Google LLC
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.google.cloud.bigquery;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.api.client.util.Data;
+import com.google.api.core.BetaApi;
+import com.google.common.base.MoreObjects;
+import com.google.common.io.BaseEncoding;
+import java.io.Serializable;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+/**
+ * Google BigQuery Table Field Value class. Objects of this class represent values of a BigQuery
+ * Table Field. A list of values forms a table row. Tables rows can be gotten as the result of a
+ * query or when listing table data.
+ */
+public class FieldValue implements Serializable {
+ private static final int MICROSECONDS = 1000000;
+ private static final long serialVersionUID = 469098630191710061L;
+
+ private final Attribute attribute;
+ private final Object value;
+
+ /**
+ * The field value's attribute, giving information on the field's content type.
+ */
+ public enum Attribute {
+ /**
+ * A primitive field value. A {@code FieldValue} is primitive when the corresponding field has
+ * type {@link LegacySQLTypeName#BYTES}, {@link LegacySQLTypeName#BOOLEAN},
+ * {@link LegacySQLTypeName#STRING}, {@link LegacySQLTypeName#FLOAT},
+ * {@link LegacySQLTypeName#INTEGER}, {@link LegacySQLTypeName#TIMESTAMP} or the value is set to
+ * {@code null}.
+ */
+ PRIMITIVE,
+
+ /**
+ * A {@code FieldValue} for a field with {@link Field.Mode#REPEATED} mode.
+ */
+ REPEATED,
+
+ /**
+ * A {@code FieldValue} for a field of type {@link LegacySQLTypeName#RECORD}.
+ */
+ RECORD
+ }
+
+ private FieldValue(Attribute attribute, Object value) {
+ this.attribute = checkNotNull(attribute);
+ this.value = value;
+ }
+
+
+ /**
+ * Returns the attribute of this Field Value.
+ *
+ * @return {@link Attribute#PRIMITIVE} if the field is a primitive type
+ * ({@link LegacySQLTypeName#BYTES}, {@link LegacySQLTypeName#BOOLEAN}, {@link LegacySQLTypeName#STRING},
+ * {@link LegacySQLTypeName#FLOAT}, {@link LegacySQLTypeName#INTEGER},
+ * {@link LegacySQLTypeName#TIMESTAMP}) or is {@code null}. Returns {@link Attribute#REPEATED} if
+ * the corresponding field has ({@link Field.Mode#REPEATED}) mode. Returns
+ * {@link Attribute#RECORD} if the corresponding field is a
+ * {@link LegacySQLTypeName#RECORD} type.
+ */
+ public Attribute getAttribute() {
+ return attribute;
+ }
+
+ /**
+ * Returns {@code true} if this field's value is {@code null}, {@code false} otherwise.
+ */
+ public boolean isNull() {
+ return value == null;
+ }
+
+
+ /**
+ * Returns this field's value as an {@link Object}. If {@link #isNull()} is {@code true} this
+ * method returns {@code null}.
+ */
+ public Object getValue() {
+ return value;
+ }
+
+
+ /**
+ * Returns this field's value as a {@link String}. This method should only be used if the
+ * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES},
+ * {@link LegacySQLTypeName#BOOLEAN}, {@link LegacySQLTypeName#STRING},
+ * {@link LegacySQLTypeName#FLOAT}, {@link LegacySQLTypeName#INTEGER},
+ * {@link LegacySQLTypeName#TIMESTAMP}).
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public String getStringValue() {
+ checkNotNull(value);
+ return (String) value;
+ }
+
+
+ /**
+ * Returns this field's value as a byte array. This method should only be used if the
+ * corresponding field has primitive type ({@link LegacySQLTypeName#BYTES}.
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ * @throws IllegalStateException if the field value is not encoded in base64
+ */
+ public byte[] getBytesValue() {
+ try {
+ return BaseEncoding.base64().decode(getStringValue());
+ } catch (IllegalArgumentException ex) {
+ throw new IllegalStateException(ex);
+ }
+ }
+
+
+ /**
+ * Returns this field's value as a {@code long}. This method should only be used if the
+ * corresponding field has {@link LegacySQLTypeName#INTEGER} type.
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws NumberFormatException if the field's value could not be converted to {@link Integer}
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public long getLongValue() {
+ return Long.parseLong(getStringValue());
+ }
+
+
+ /**
+ * Returns this field's value as a {@link Double}. This method should only be used if the
+ * corresponding field has {@link LegacySQLTypeName#FLOAT} type.
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws NumberFormatException if the field's value could not be converted to {@link Double}
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public double getDoubleValue() {
+ return Double.parseDouble(getStringValue());
+ }
+
+
+ /**
+ * Returns this field's value as a {@link Boolean}. This method should only be used if the
+ * corresponding field has {@link LegacySQLTypeName#BOOLEAN} type.
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws IllegalStateException if the field's value could not be converted to {@link Boolean}
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public boolean getBooleanValue() {
+ String stringValue = getStringValue();
+ checkState(stringValue.equalsIgnoreCase("true") || stringValue.equalsIgnoreCase("false"),
+ "Field value is not of boolean type");
+ return Boolean.parseBoolean(stringValue);
+ }
+
+
+ /**
+ * Returns this field's value as a {@code long}, representing a timestamp in microseconds since
+ * epoch (UNIX time). This method should only be used if the corresponding field has
+ * {@link LegacySQLTypeName#TIMESTAMP} type.
+ *
+ * @throws ClassCastException if the field is not a primitive type
+ * @throws NumberFormatException if the field's value could not be converted to {@link Long}
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public long getTimestampValue() {
+ // timestamps are encoded in the format 1408452095.22 where the integer part is seconds since
+ // epoch (e.g. 1408452095.22 == 2014-08-19 07:41:35.220 -05:00)
+ return new Double(Double.valueOf(getStringValue()) * MICROSECONDS).longValue();
+ }
+
+
+ /**
+ * Returns this field's value as a list of {@link FieldValue}. This method should only be used if
+ * the corresponding field has {@link Field.Mode#REPEATED} mode (i.e. {@link #getAttribute()} is
+ * {@link Attribute#REPEATED}).
+ *
+ * @throws ClassCastException if the field has not {@link Field.Mode#REPEATED} mode
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ @SuppressWarnings("unchecked")
+ public List getRepeatedValue() {
+ checkNotNull(value);
+ return (List) value;
+ }
+
+
+ /**
+ * Returns this field's value as a {@link FieldValueList} instance. This method should only be used if
+ * the corresponding field has {@link LegacySQLTypeName#RECORD} type (i.e.
+ * {@link #getAttribute()} is {@link Attribute#RECORD}).
+ *
+ * @throws ClassCastException if the field is not a {@link LegacySQLTypeName#RECORD} type
+ * @throws NullPointerException if {@link #isNull()} returns {@code true}
+ */
+ public FieldValueList getRecordValue() {
+ checkNotNull(value);
+ return (FieldValueList) value;
+ }
+
+ @Override
+ public String toString() {
+ return MoreObjects.toStringHelper(this)
+ .add("attribute", attribute)
+ .add("value", value)
+ .toString();
+ }
+
+ @Override
+ public final int hashCode() {
+ return Objects.hash(attribute, value);
+ }
+
+ @Override
+ public final boolean equals(Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (obj == null || !obj.getClass().equals(FieldValue.class)) {
+ return false;
+ }
+ FieldValue other = (FieldValue) obj;
+ return attribute == other.attribute && Objects.equals(value, other.value);
+ }
+
+ /**
+ * Creates an instance of {@code FieldValue}, useful for testing.
+ *
+ *
If the {@code attribute} is {@link Attribute#PRIMITIVE}, the {@code value} should be the
+ * string representation of the underlying value, eg {@code "123"} for number {@code 123}.
+ *
+ *
If the {@code attribute} is {@link Attribute#REPEATED} or {@link Attribute#RECORD}, the
+ * {@code value} should be {@code List} of {@link FieldValue}s or {@link FieldValueList},
+ * respectively.
+ *
+ *
This method is unstable. See this discussion
+ * for more context.
+ */
+ @BetaApi
+ public static FieldValue of(Attribute attribute, Object value) {
+ return new FieldValue(attribute, value);
+ }
+
+ static FieldValue fromPb(Object cellPb) {
+ return fromPb(cellPb, null);
+ }
+
+ @SuppressWarnings("unchecked")
+ static FieldValue fromPb(Object cellPb, Field recordSchema) {
+ if (Data.isNull(cellPb)) {
+ return FieldValue.of(Attribute.PRIMITIVE, null);
+ }
+ if (cellPb instanceof String) {
+ return FieldValue.of(Attribute.PRIMITIVE, cellPb);
+ }
+ if (cellPb instanceof List) {
+ return FieldValue.of(Attribute.REPEATED, FieldValueList.fromPb((List