@@ -212,17 +212,17 @@ def commonLegacyExcludeCategories = [
212212]
213213
214214def commonRunnerV2ExcludeCategories = [
215- ' org.apache.beam.sdk.testing.UsesExternalService' ,
216- ' org.apache.beam.sdk.testing.UsesGaugeMetrics' ,
217- ' org.apache.beam.sdk.testing.UsesSetState' ,
218- ' org.apache.beam.sdk.testing.UsesMapState' ,
219- ' org.apache.beam.sdk.testing.UsesMultimapState' ,
220- ' org.apache.beam.sdk.testing.UsesMetricsPusher' ,
221- ' org.apache.beam.sdk.testing.UsesOrderedListState' ,
222- ' org.apache.beam.sdk.testing.UsesTestStream' ,
223- ' org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime' ,
224- ' org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput' ,
225- ' org.apache.beam.sdk.testing.UsesBoundedTrieMetrics' , // Dataflow QM as of now does not support returning back BoundedTrie in metric result.
215+ ' org.apache.beam.sdk.testing.UsesExternalService' ,
216+ ' org.apache.beam.sdk.testing.UsesGaugeMetrics' ,
217+ ' org.apache.beam.sdk.testing.UsesSetState' ,
218+ ' org.apache.beam.sdk.testing.UsesMapState' ,
219+ ' org.apache.beam.sdk.testing.UsesMultimapState' ,
220+ ' org.apache.beam.sdk.testing.UsesMetricsPusher' ,
221+ ' org.apache.beam.sdk.testing.UsesOrderedListState' ,
222+ ' org.apache.beam.sdk.testing.UsesTestStream' ,
223+ ' org.apache.beam.sdk.testing.UsesTestStreamWithProcessingTime' ,
224+ ' org.apache.beam.sdk.testing.UsesRequiresTimeSortedInput' ,
225+ ' org.apache.beam.sdk.testing.UsesBoundedTrieMetrics' , // Dataflow QM as of now does not support returning back BoundedTrie in metric result.
226226]
227227
228228def createLegacyWorkerValidatesRunnerTest = { Map args ->
@@ -241,7 +241,7 @@ def createLegacyWorkerValidatesRunnerTest = { Map args ->
241241 maxParallelForks Integer . MAX_VALUE
242242 classpath = configurations. validatesRunner
243243 testClassesDirs = files(project(" :sdks:java:core" ). sourceSets. test. output. classesDirs) +
244- files(project(project. path). sourceSets. test. output. classesDirs)
244+ files(project(project. path). sourceSets. test. output. classesDirs)
245245 useJUnit {
246246 includeCategories ' org.apache.beam.sdk.testing.ValidatesRunner'
247247 commonLegacyExcludeCategories. each {
@@ -274,7 +274,7 @@ def createRunnerV2ValidatesRunnerTest = { Map args ->
274274 maxParallelForks Integer . MAX_VALUE
275275 classpath = configurations. validatesRunner
276276 testClassesDirs = files(project(" :sdks:java:core" ). sourceSets. test. output. classesDirs) +
277- files(project(project. path). sourceSets. test. output. classesDirs)
277+ files(project(project. path). sourceSets. test. output. classesDirs)
278278 useJUnit {
279279 includeCategories ' org.apache.beam.sdk.testing.ValidatesRunner'
280280 commonRunnerV2ExcludeCategories. each {
@@ -306,9 +306,9 @@ def buildAndPushDockerJavaContainer = tasks.register("buildAndPushDockerJavaCont
306306
307307 dependsOn " :sdks:java:container:${ javaVer} :docker"
308308 def defaultDockerImageName = containerImageName(
309- name : " ${ project.docker_image_default_repo_prefix}${ javaVer} _sdk" ,
310- root : " apache" ,
311- tag : project. sdk_version)
309+ name : " ${ project.docker_image_default_repo_prefix}${ javaVer} _sdk" ,
310+ root : " apache" ,
311+ tag : project. sdk_version)
312312 doLast {
313313 exec {
314314 commandLine " docker" , " tag" , " ${ defaultDockerImageName} " , " ${ dockerJavaImageName} "
@@ -368,14 +368,37 @@ def buildAndPushDockerPythonContainer = tasks.create("buildAndPushDockerPythonCo
368368 def pythonVer = project. project(' :sdks:python' ). pythonVersion
369369 dependsOn " :sdks:python:container:py" + pythonVer. replace(' .' , ' ' )+ " :docker"
370370 def defaultDockerImageName = containerImageName(
371- name : " ${ project.docker_image_default_repo_prefix} python${ pythonVer} _sdk" ,
372- root : " apache" ,
373- tag : project. sdk_version)
371+ name : " ${ project.docker_image_default_repo_prefix} python${ pythonVer} _sdk" ,
372+ root : " apache" ,
373+ tag : project. sdk_version)
374+ doFirst {
375+ def cloudsdkConfig = System . getenv(" CLOUDSDK_CONFIG" )
376+ if (cloudsdkConfig == null || ! new File (cloudsdkConfig). canWrite()) {
377+ cloudsdkConfig = " /tmp/gcloud"
378+ }
379+ if (cloudsdkConfig == " /tmp/gcloud" ) {
380+ def tmpGcloudDir = new File (cloudsdkConfig)
381+ tmpGcloudDir. mkdirs()
382+ System . setProperty(" CLOUDSDK_CONFIG" , cloudsdkConfig)
383+ }
384+ exec {
385+ environment " CLOUDSDK_CONFIG" , cloudsdkConfig
386+ commandLine " gcloud" , " --quiet" , " auth" , " configure-docker" , " us.gcr.io"
387+ ignoreExitValue = false
388+ }
389+ exec {
390+ environment " CLOUDSDK_CONFIG" , cloudsdkConfig
391+ commandLine " gcloud" , " --quiet" , " auth" , " configure-docker" , " gcr.io"
392+ ignoreExitValue = false
393+ }
394+ }
374395 doLast {
375396 exec {
376397 commandLine " docker" , " tag" , " ${ defaultDockerImageName} " , " ${ dockerPythonImageName} "
377398 }
399+ def cloudsdkConfig = System . getenv(" CLOUDSDK_CONFIG" ) ?: System . getProperty(" CLOUDSDK_CONFIG" ) ?: " /tmp/gcloud"
378400 exec {
401+ environment " CLOUDSDK_CONFIG" , cloudsdkConfig
379402 commandLine " gcloud" , " docker" , " --" , " push" , " ${ dockerPythonImageName} "
380403 }
381404 }
@@ -594,13 +617,13 @@ task googleCloudPlatformLegacyWorkerIntegrationTest(type: Test, dependsOn: copyG
594617 group = " Verification"
595618 dependsOn " :runners:google-cloud-dataflow-java:worker:shadowJar"
596619 systemProperty " beamTestPipelineOptions" , JsonOutput . toJson([
597- " --runner=TestDataflowRunner" ,
598- " --project=${ gcpProject} " ,
599- " --region=${ gcpRegion} " ,
600- " --tempRoot=${ dataflowPostCommitTempRoot} " ,
601- " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
602- " --workerHarnessContainerImage=" ,
603- " --firestoreDb=${ firestoreDb} " ,
620+ " --runner=TestDataflowRunner" ,
621+ " --project=${ gcpProject} " ,
622+ " --region=${ gcpRegion} " ,
623+ " --tempRoot=${ dataflowPostCommitTempRoot} " ,
624+ " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
625+ " --workerHarnessContainerImage=" ,
626+ " --firestoreDb=${ firestoreDb} " ,
604627 ])
605628
606629 include ' **/*IT.class'
@@ -633,14 +656,14 @@ task googleCloudPlatformLegacyWorkerKmsIntegrationTest(type: Test) {
633656 group = " Verification"
634657 dependsOn " :runners:google-cloud-dataflow-java:worker:shadowJar"
635658 systemProperty " beamTestPipelineOptions" , JsonOutput . toJson([
636- " --runner=TestDataflowRunner" ,
637- " --project=${ gcpProject} " ,
638- " --region=${ gcpRegion} " ,
639- " --tempRoot=${ dataflowPostCommitTempRootKms} " ,
640- " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
641- " --workerHarnessContainerImage=" ,
642- " --dataflowKmsKey=${ dataflowKmsKey} " ,
643- " --firestoreDb=${ firestoreDb} " ,
659+ " --runner=TestDataflowRunner" ,
660+ " --project=${ gcpProject} " ,
661+ " --region=${ gcpRegion} " ,
662+ " --tempRoot=${ dataflowPostCommitTempRootKms} " ,
663+ " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
664+ " --workerHarnessContainerImage=" ,
665+ " --dataflowKmsKey=${ dataflowKmsKey} " ,
666+ " --firestoreDb=${ firestoreDb} " ,
644667 ])
645668
646669 include ' **/*IT.class'
@@ -738,12 +761,12 @@ task coreSDKJavaLegacyWorkerIntegrationTest(type: Test) {
738761 dependsOn " :runners:google-cloud-dataflow-java:worker:shadowJar"
739762
740763 systemProperty " beamTestPipelineOptions" , JsonOutput . toJson([
741- " --runner=TestDataflowRunner" ,
742- " --project=${ gcpProject} " ,
743- " --region=${ gcpRegion} " ,
744- " --tempRoot=${ dataflowPostCommitTempRoot} " ,
745- " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
746- " --workerHarnessContainerImage=" ,
764+ " --runner=TestDataflowRunner" ,
765+ " --project=${ gcpProject} " ,
766+ " --region=${ gcpRegion} " ,
767+ " --tempRoot=${ dataflowPostCommitTempRoot} " ,
768+ " --dataflowWorkerJar=${ dataflowLegacyWorkerJar} " ,
769+ " --workerHarnessContainerImage=" ,
747770 ])
748771
749772 include ' **/*IT.class'
@@ -843,17 +866,17 @@ createJavaExamplesArchetypeValidationTask(type: 'MobileGaming',
843866
844867// Generates :runners:google-cloud-dataflow-java:runMobileGamingJavaDataflowBom
845868createJavaExamplesArchetypeValidationTask(type : ' MobileGaming' ,
846- runner : ' DataflowBom' ,
847- gcpProject : gcpProject,
848- gcpRegion : gcpRegion,
849- gcsBucket : gcsBucket,
850- bqDataset : bqDataset,
851- pubsubTopic : pubsubTopic)
869+ runner : ' DataflowBom' ,
870+ gcpProject : gcpProject,
871+ gcpRegion : gcpRegion,
872+ gcsBucket : gcsBucket,
873+ bqDataset : bqDataset,
874+ pubsubTopic : pubsubTopic)
852875
853876// Standalone task for testing GCS upload, use with -PfilesToStage and -PgcpTempRoot.
854877task GCSUpload (type : JavaExec ) {
855878 mainClass = ' org.apache.beam.runners.dataflow.util.GCSUploadMain'
856879 classpath = sourceSets. test. runtimeClasspath
857880 args " --stagingLocation=${ dataflowUploadTemp} /staging" ,
858- " --filesToStage=${ testFilesToStage} "
881+ " --filesToStage=${ testFilesToStage} "
859882}
0 commit comments