Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -38,3 +38,8 @@ AdAdHocITSuite.scala

# toolchains file
toolchains.xml

# Emacs
*~
\#*\#
.\#*
11 changes: 11 additions & 0 deletions bigquery-connector-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,17 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>test-jar</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import static com.google.cloud.bigquery.connector.common.BigQueryErrorCode.UNSUPPORTED;
import static java.lang.String.format;

import com.google.cloud.bigquery.BigQueryException;
import com.google.cloud.bigquery.TableDefinition;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.bigquery.TableInfo;
Expand Down Expand Up @@ -47,6 +48,7 @@ public class ReadSessionCreator {
public static final int DEFAULT_MAX_PARALLELISM = 20_000;
public static final int MINIMAL_PARALLELISM = 1;
public static final int DEFAULT_MIN_PARALLELISM_FACTOR = 3;
private static final String ACCESS_DENIED_REASON = "accessDenied";

private static final Logger log = LoggerFactory.getLogger(ReadSessionCreator.class);
private static boolean initialized = false;
Expand Down Expand Up @@ -92,8 +94,48 @@ public ReadSessionResponse create(
TableId table, ImmutableList<String> selectedFields, Optional<String> filter) {
Instant sessionPrepStartTime = Instant.now();
TableInfo tableDetails = bigQueryClient.getTable(table);

if (tableDetails.getDefinition().getType() == TableDefinition.Type.MATERIALIZED_VIEW) {
try {
// Attempt to read the Materialized View directly. This will fail if the required
// permissions are not present.
return createReadSession(
tableDetails, tableDetails, selectedFields, filter, sessionPrepStartTime);
} catch (BigQueryException e) {
if (!isPermissionDeniedError(e)) {
// Not a permission error, so re-throw it.
throw e;
}
// This is a permission error. Log a warning and fall back to materializing the view.
log.warn(
"Failed to initiate a direct read from Materialized View '{}' due to a permission"
+ " error. The service account likely lacks 'bigquery.tables.getData'"
+ " permission. Falling back to re-executing the view's underlying query. This"
+ " will incur additional BigQuery costs and impact performance. For optimal"
+ " performance, grant the 'roles/bigquery.dataViewer' role to the principal at"
+ " the dataset or table level.",
tableDetails.getTableId().toString());
// Execution will now fall through to the getActualTable() call below.
}
}

TableInfo actualTable = getActualTable(tableDetails, selectedFields, filter);
return createReadSession(
actualTable, tableDetails, selectedFields, filter, sessionPrepStartTime);
}

private boolean isPermissionDeniedError(BigQueryException e) {
return e.getCode() == java.net.HttpURLConnection.HTTP_FORBIDDEN
&& e.getError() != null
&& ACCESS_DENIED_REASON.equals(e.getError().getReason());
}

private ReadSessionResponse createReadSession(
TableInfo actualTable,
TableInfo tableDetails,
ImmutableList<String> selectedFields,
Optional<String> filter,
Instant sessionPrepStartTime) {
BigQueryReadClient bigQueryReadClient = bigQueryReadClientFactory.getBigQueryReadClient();
log.info(
"|creation a read session for table {}, parameters: "
Expand Down Expand Up @@ -203,7 +245,7 @@ public ReadSessionResponse create(
if (config.isReadSessionCachingEnabled()
&& getReadSessionCache().asMap().containsKey(createReadSessionRequest)) {
ReadSession readSession = getReadSessionCache().asMap().get(createReadSessionRequest);
log.info("Reusing read session: {}, for table: {}", readSession.getName(), table);
log.info("Reusing read session: {}, for table: {}", readSession.getName(), actualTable);
return new ReadSessionResponse(readSession, actualTable);
}
ReadSession readSession = bigQueryReadClient.createReadSession(createReadSessionRequest);
Expand Down
7 changes: 7 additions & 0 deletions spark-bigquery-dsv2/spark-3.5-bigquery/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,12 @@
<version>${spark.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>com.google.cloud.spark</groupId>
<artifactId>bigquery-connector-common</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/*
* Copyright 2025 Google LLC and Contributors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.spark.bigquery.integration;

import static com.google.common.truth.Truth.assertThat;

import com.google.cloud.bigquery.JobInfo;
import com.google.cloud.bigquery.QueryJobConfiguration;
import com.google.cloud.bigquery.TableId;
import com.google.cloud.spark.bigquery.ReadSessionCreator;
import com.google.cloud.spark.bigquery.connector.common.integration.IntegrationTestUtils;
import com.google.cloud.spark.bigquery.connector.common.integration.SparkBigQueryIntegrationTestBase;
import java.io.StringWriter;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import java.util.stream.Collectors;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.log4j.SimpleLayout;
import org.apache.log4j.WriterAppender;
import org.apache.spark.sql.Dataset;
import org.apache.spark.sql.Row;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;

public class MaterializedViewReadIT extends SparkBigQueryIntegrationTestBase {

private final Logger logger = Logger.getLogger(ReadSessionCreator.class);
private WriterAppender appender;
private StringWriter stringWriter;
private String testDataset;
private String mvName;
private TableId sourceTableId;
private String testServiceAccount;

@Before
public void setUp() throws InterruptedException {
// Set up a custom log appender to capture logs
stringWriter = new StringWriter();
appender = new WriterAppender(new SimpleLayout(), stringWriter);
appender.setThreshold(Level.WARN);
logger.addAppender(appender);

// Create a dedicated service account for this test
String projectId = System.getenv("GOOGLE_CLOUD_PROJECT");
assertThat(projectId).isNotNull();
testServiceAccount =
String.format(
"mv-test-%s@%s.iam.gserviceaccount.com",
UUID.randomUUID().toString().substring(0, 8), projectId);
IntegrationTestUtils.createServiceAccount(testServiceAccount);

// Create a temporary dataset and grant the new SA the BQ User role
// This provides permissions to run jobs (for the fallback) but not to read table data directly
testDataset = "mv_read_it_" + System.nanoTime();
IntegrationTestUtils.createDataset(testDataset);
IntegrationTestUtils.grantServiceAccountRole(
testServiceAccount, "roles/bigquery.user", testDataset);
IntegrationTestUtils.grantServiceAccountRole(
testServiceAccount, "roles/bigquery.metadataViewer", testDataset);

// Create a source table
sourceTableId = TableId.of(testDataset, "source_table_" + System.nanoTime());
IntegrationTestUtils.createTable(sourceTableId, "name:string, value:integer", "name,value");

// Create a Materialized View
mvName = "test_mv_" + System.nanoTime();
String createMvSql =
String.format(
"CREATE MATERIALIZED VIEW `%s.%s` AS SELECT name, value FROM `%s.%s`",
testDataset, mvName, testDataset, sourceTableId.getTable());
QueryJobConfiguration createMvJob = QueryJobConfiguration.newBuilder(createMvSql).build();
bigquery.create(JobInfo.of(createMvJob)).waitFor();
}

@After
public void tearDown() {
logger.removeAppender(appender);
try {
if (testDataset != null) {
IntegrationTestUtils.deleteDatasetAndTables(testDataset);
}
} finally {
if (testServiceAccount != null) {
IntegrationTestUtils.deleteServiceAccount(testServiceAccount);
}
}
}

@Test
public void testReadMaterializedView_lackingPermission_logsWarningAndFallsBack() {
// This test confirms that when reading a Materialized View with a service account
// that lacks `bigquery.tables.getData` permission, the connector:
// 1. Logs a specific WARN message indicating the permission issue and the fallback.
// 2. Successfully reads the data by falling back to materializing the view's query.

// Arrange: Use the dedicated service account with insufficient permissions for a direct read.
String mvToRead = testDataset + "." + mvName;

// Act: Read the materialized view, impersonating the test service account
Dataset<Row> df =
spark
.read()
.format("bigquery")
.option("viewsEnabled", "true") // Required to read any kind of view
.option("impersonationServiceAccount", testServiceAccount)
.load(mvToRead);

List<Row> result = df.collectAsList();

// Assert
// 1. Assert that the read was successful via fallback
assertThat(result).hasSize(2);
List<String> names = result.stream().map(row -> row.getString(0)).collect(Collectors.toList());
assertThat(names).containsExactlyElementsIn(Arrays.asList("name1", "name2"));

// 2. Assert that the specific warning was logged
String logOutput = stringWriter.toString();
assertThat(logOutput).contains("Failed to initiate a direct read from Materialized View");
assertThat(logOutput)
.contains("The service account likely lacks 'bigquery.tables.getData' permission");
assertThat(logOutput).contains("Falling back to re-executing the view's underlying query");
}
}