Skip to content

Commit

Permalink
reverting
Browse files Browse the repository at this point in the history
  • Loading branch information
davidrabinowitz committed Nov 27, 2024
1 parent 6a9640f commit 6dfce0d
Showing 1 changed file with 18 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -86,9 +86,9 @@

public class SparkBigQueryConfig
implements BigQueryConfig,
BigQueryClient.CreateTableOptions,
BigQueryClient.LoadDataOptions,
Serializable {
BigQueryClient.CreateTableOptions,
BigQueryClient.LoadDataOptions,
Serializable {

public static final int MAX_TRACE_ID_LENGTH = 256;

Expand Down Expand Up @@ -171,8 +171,6 @@ public static WriteMethod from(@Nullable String writeMethod) {
public static final String BIG_NUMERIC_DEFAULT_PRECISION = "bigNumericDefaultPrecision";
public static final String BIG_NUMERIC_DEFAULT_SCALE = "bigNumericDefaultScale";

private static final String DATAPROC_SYSTEM_BUCKET_CONFIGURATION = "fs.gs.system.bucket";

TableId tableId;
// as the config needs to be Serializable, internally it uses
// com.google.common.base.Optional<String> but externally it uses the regular java.util.Optional
Expand Down Expand Up @@ -400,10 +398,7 @@ public static SparkBigQueryConfig from(
.orNull();
config.defaultParallelism = defaultParallelism;
config.temporaryGcsBucket =
stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket"))
.or(
com.google.common.base.Optional.fromNullable(
hadoopConfiguration.get(DATAPROC_SYSTEM_BUCKET_CONFIGURATION)));
stripPrefix(getAnyOption(globalOptions, options, "temporaryGcsBucket"));
config.persistentGcsBucket =
stripPrefix(getAnyOption(globalOptions, options, "persistentGcsBucket"));
config.persistentGcsPath = getOption(options, "persistentGcsPath");
Expand Down Expand Up @@ -610,6 +605,7 @@ public static SparkBigQueryConfig from(

config.partitionOverwriteModeValue =
getAnyOption(globalOptions, options, partitionOverwriteModeProperty)
.transform(String::toUpperCase)
.transform(PartitionOverwriteMode::valueOf)
.or(PartitionOverwriteMode.STATIC);

Expand Down Expand Up @@ -723,19 +719,19 @@ static ImmutableMap<String, String> normalizeConf(Map<String, String> conf) {
public Credentials createCredentials() {

return new BigQueryCredentialsSupplier(
accessTokenProviderFQCN.toJavaUtil(),
accessTokenProviderConfig.toJavaUtil(),
accessToken.toJavaUtil(),
credentialsKey.toJavaUtil(),
credentialsFile.toJavaUtil(),
loggedInUserName,
loggedInUserGroups,
impersonationServiceAccountsForUsers.toJavaUtil(),
impersonationServiceAccountsForGroups.toJavaUtil(),
impersonationServiceAccount.toJavaUtil(),
sparkBigQueryProxyAndHttpConfig.getProxyUri(),
sparkBigQueryProxyAndHttpConfig.getProxyUsername(),
sparkBigQueryProxyAndHttpConfig.getProxyPassword())
accessTokenProviderFQCN.toJavaUtil(),
accessTokenProviderConfig.toJavaUtil(),
accessToken.toJavaUtil(),
credentialsKey.toJavaUtil(),
credentialsFile.toJavaUtil(),
loggedInUserName,
loggedInUserGroups,
impersonationServiceAccountsForUsers.toJavaUtil(),
impersonationServiceAccountsForGroups.toJavaUtil(),
impersonationServiceAccount.toJavaUtil(),
sparkBigQueryProxyAndHttpConfig.getProxyUri(),
sparkBigQueryProxyAndHttpConfig.getProxyUsername(),
sparkBigQueryProxyAndHttpConfig.getProxyPassword())
.getCredentials();
}

Expand Down

0 comments on commit 6dfce0d

Please sign in to comment.