Skip to content

Commit 101dd32

Browse files
authored
WX-1595 GCP Batch cleanup (#7428)
- GcpBatchAsyncBackendJobExecutionActor -> pollBackOff had maxInterval value hardcoded instead of using the config entry. - GcpBatchTestConfig was still referencing papi instead of batch. - Rename PipelinesApiEmptyMountedDisk to BatchApiEmptyMountedDisk. - GcpBatchAsyncBackendJobExecutionActorSpec was still referencing pipelines instead of batch. - Localization was referencing papi instead of batch. - RunnableUtils had unused definitions which are now deleted.
1 parent 7cacb8f commit 101dd32

File tree

8 files changed

+15
-22
lines changed

8 files changed

+15
-22
lines changed

supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -987,7 +987,11 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar
987987
Future.successful(handle)
988988
}
989989

990-
override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(5.second, 5.minutes, 1.1)
990+
override lazy val pollBackOff: SimpleExponentialBackoff = SimpleExponentialBackoff(
991+
initialInterval = 5.second,
992+
maxInterval = batchAttributes.maxPollingInterval.seconds,
993+
multiplier = 1.1
994+
)
991995

992996
override lazy val executeOrRecoverBackOff: SimpleExponentialBackoff =
993997
SimpleExponentialBackoff(initialInterval = 5.seconds, maxInterval = 20.seconds, multiplier = 1.1)

supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDisk.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ object GcpBatchAttachedDisk {
2525
}
2626
case MountedDiskPattern(mountPoint, sizeGb, diskType) =>
2727
(sizeGbValidation(sizeGb), diskTypeValidation(diskType)) mapN { (s, dt) =>
28-
PipelinesApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint))
28+
BatchApiEmptyMountedDisk(dt, s, DefaultPathBuilder.get(mountPoint))
2929
}
3030
case _ =>
3131
s"Disk strings should be of the format 'local-disk SIZE TYPE' or '/mount/point SIZE TYPE' but got: '$s'".invalidNel
@@ -65,8 +65,7 @@ trait GcpBatchAttachedDisk {
6565
def mountPoint: Path
6666
}
6767

68-
case class PipelinesApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path)
69-
extends GcpBatchAttachedDisk {
68+
case class BatchApiEmptyMountedDisk(diskType: DiskType, sizeGb: Int, mountPoint: Path) extends GcpBatchAttachedDisk {
7069
val name = s"d-${mountPoint.pathAsString.md5Sum}"
7170

7271
override def toString: String = s"$mountPoint $sizeGb ${diskType.diskTypeName}"

supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/Localization.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ trait Localization {
7171
List(localizeDrsLocalizationManifest, runDrsLocalization)
7272
} else List[Runnable.Builder]()
7373

74-
// Any "classic" PAPI v2 one-at-a-time localizations for non-GCS inputs.
74+
// Any "classic" Batch one-at-a-time localizations for non-GCS inputs.
7575
val singletonLocalizations =
7676
createParameters.inputOutputParameters.fileInputParameters.flatMap(_.toRunnables(volumes))
7777

supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/runnable/RunnableUtils.scala

Lines changed: 0 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -7,15 +7,6 @@ import org.apache.commons.text.StringEscapeUtils
77

88
object RunnableUtils {
99

10-
/** Image to use for ssh access. */
11-
val sshImage = "gcr.io/cloud-genomics-pipelines/tools"
12-
13-
/** Entry point on the ssh image. */
14-
val sshEntryPoint = "ssh-server"
15-
16-
/** Port mappings for the ssh container. */
17-
val sshPortMappings = Map("22" -> Int.box(22))
18-
1910
private val config = ConfigFactory.load().getConfig("google")
2011

2112
/**

supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -415,7 +415,7 @@ class GcpBatchAsyncBackendJobExecutionActorSpec
415415

416416
val drsReadInterpreter: DrsReadInterpreter = (_, _) =>
417417
throw new UnsupportedOperationException(
418-
"PipelinesApiAsyncBackendJobExecutionActorSpec doesn't need to use drs read interpreter."
418+
"GcpBatchAsyncBackendJobExecutionActorSpec doesn't need to use drs read interpreter."
419419
)
420420

421421
DrsPathBuilder(
@@ -1018,13 +1018,12 @@ class GcpBatchAsyncBackendJobExecutionActorSpec
10181018
"strs" -> WomArray(WomArrayType(WomStringType), Seq("A", "B", "C").map(WomString))
10191019
)
10201020

1021-
class TestPipelinesApiExpressionFunctions
1022-
extends BatchExpressionFunctions(TestableStandardExpressionFunctionsParams) {
1021+
class TestBatchApiExpressionFunctions extends BatchExpressionFunctions(TestableStandardExpressionFunctionsParams) {
10231022
override def writeFile(path: String, content: String): Future[WomSingleFile] =
10241023
Future.fromTry(Success(WomSingleFile(s"gs://some/path/file.txt")))
10251024
}
10261025

1027-
val functions = new TestPipelinesApiExpressionFunctions
1026+
val functions = new TestBatchApiExpressionFunctions
10281027
val jesBackend = makeJesActorRef(SampleWdl.ArrayIO, Map.empty, "serialize", inputs, functions).underlyingActor
10291028
val jobDescriptor = jesBackend.jobDescriptor
10301029
val jesInputs = jesBackend.generateInputs(jobDescriptor)

supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/io/GcpBatchAttachedDiskSpec.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ import scala.util.Failure
1313
class GcpBatchAttachedDiskSpec extends AnyFlatSpec with CromwellTimeoutSpec with Matchers with TryValues {
1414
val validTable = Table(
1515
("unparsed", "parsed"),
16-
("/mnt 3 SSD", PipelinesApiEmptyMountedDisk(DiskType.SSD, 3, DefaultPathBuilder.get("/mnt"))),
17-
("/mnt/my_path 10 HDD", PipelinesApiEmptyMountedDisk(DiskType.HDD, 10, DefaultPathBuilder.get("/mnt/my_path"))),
16+
("/mnt 3 SSD", BatchApiEmptyMountedDisk(DiskType.SSD, 3, DefaultPathBuilder.get("/mnt"))),
17+
("/mnt/my_path 10 HDD", BatchApiEmptyMountedDisk(DiskType.HDD, 10, DefaultPathBuilder.get("/mnt/my_path"))),
1818
("local-disk 100 SSD", GcpBatchWorkingDisk(DiskType.SSD, 100)),
1919
("local-disk 100 LOCAL", GcpBatchWorkingDisk(DiskType.LOCAL, 100))
2020
)

supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationSpec.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -111,7 +111,7 @@ class GcpBatchConfigurationSpec
111111
forAll(configs) { (backend, global) =>
112112
an[Exception] shouldBe thrownBy {
113113
val failingGoogleConf = GoogleConfiguration(global)
114-
val failingAttributes = GcpBatchConfigurationAttributes(failingGoogleConf, backend, "papi")
114+
val failingAttributes = GcpBatchConfigurationAttributes(failingGoogleConf, backend, "batch")
115115
new GcpBatchConfiguration(BackendConfigurationDescriptor(backend, global), failingGoogleConf, failingAttributes)
116116
}
117117
}

supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchTestConfig.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ object GcpBatchTestConfig {
8585
| default = "batch"
8686
| providers {
8787
| batch {
88-
| actor-factory = "cromwell.backend.google.pipelines.batch.GcpBatchBackendLifecycleActorFactory"
88+
| actor-factory = "cromwell.backend.google.batch.GcpBatchBackendLifecycleActorFactory"
8989
| config {
9090
| $BatchBackendConfigString
9191
| }

0 commit comments

Comments
 (0)