@@ -46,6 +46,8 @@ import java.time.Instant
46
46
import scala .concurrent .ExecutionContext .Implicits .global
47
47
import scala .concurrent .Future
48
48
import scala .concurrent .duration ._
49
+ import org .scalatest .prop .TableDrivenPropertyChecks .Table
50
+ import org .scalatest .prop .TableDrivenPropertyChecks .forAll
49
51
50
52
class DataprocInterpreterSpec
51
53
extends TestKit (ActorSystem (" leonardotest" ))
@@ -214,7 +216,7 @@ class DataprocInterpreterSpec
214
216
)
215
217
216
218
val runtimeConfig = RuntimeConfig .DataprocConfig (0 ,
217
- MachineTypeName (" n1 -standard-4" ),
219
+ MachineTypeName (" n2 -standard-4" ),
218
220
DiskSize (500 ),
219
221
None ,
220
222
None ,
@@ -251,9 +253,71 @@ class DataprocInterpreterSpec
251
253
252
254
}
253
255
256
+ it should " calculate cluster resource constraints and software config correctly for n1-standard and n1-highmem machine types" in isolatedDbTest {
257
+ val highMemGoogleComputeService = new FakeGoogleComputeService {
258
+ override def getMachineType (project : GoogleProject , zone : ZoneName , machineTypeName : MachineTypeName )(implicit
259
+ ev : Ask [IO , TraceId ]
260
+ ): IO [Option [MachineType ]] =
261
+ IO .pure(Some (MachineType .newBuilder().setName(" pass" ).setMemoryMb(104 * 1024 ).setGuestCpus(4 ).build()))
262
+ }
263
+
264
+ def dataprocInterpHighMem (computeService : GoogleComputeService [IO ] = highMemGoogleComputeService,
265
+ dataprocCluster : GoogleDataprocService [IO ] = MockGoogleDataprocService ,
266
+ googleDirectoryDao : GoogleDirectoryDAO = mockGoogleDirectoryDAO
267
+ ) =
268
+ new DataprocInterpreter [IO ](
269
+ Config .dataprocInterpreterConfig,
270
+ bucketHelper,
271
+ vpcInterp,
272
+ dataprocCluster,
273
+ computeService,
274
+ MockGoogleDiskService ,
275
+ googleDirectoryDao,
276
+ mockGoogleIamDAO,
277
+ mockGoogleResourceService,
278
+ MockWelderDAO
279
+ )
280
+
281
+ val machineTypes = Table (" machineType" , MachineTypeName (" n1-standard-4" ), MachineTypeName (" n1-highmem-64" ))
282
+ forAll(machineTypes) { machineType : MachineTypeName =>
283
+ val runtimeConfig = RuntimeConfig .DataprocConfig (0 ,
284
+ machineType,
285
+ DiskSize (500 ),
286
+ None ,
287
+ None ,
288
+ None ,
289
+ None ,
290
+ Map .empty[String , String ],
291
+ RegionName (" us-central1" ),
292
+ true ,
293
+ false
294
+ )
295
+ val resourceConstraints = dataprocInterpHighMem()
296
+ .getDataprocRuntimeResourceContraints(testClusterClusterProjectAndName,
297
+ runtimeConfig.machineType,
298
+ RegionName (" us-central1" )
299
+ )
300
+ .unsafeRunSync()(cats.effect.unsafe.IORuntime .global)
301
+
302
+ val dataProcSoftwareConfig = dataprocInterp().getSoftwareConfig(
303
+ GoogleProject (" MyGoogleProject" ),
304
+ RuntimeName (" MyRuntimeName" ),
305
+ runtimeConfig,
306
+ resourceConstraints
307
+ )
308
+
309
+ val propertyMap = dataProcSoftwareConfig.getPropertiesMap()
310
+ val expectedMemory =
311
+ if (machineType == MachineTypeName (" n1-standard-4" )) (104 - 7 ) * 0.9 * 1024 else (104 - 11 ) * 0.9 * 1024
312
+ propertyMap.get(
313
+ " spark:spark.driver.memory"
314
+ ) shouldBe s " ${expectedMemory.toInt}m "
315
+ }
316
+ }
317
+
254
318
it should " create correct softwareConfig - minimum runtime memory 4gb" in isolatedDbTest {
255
319
val runtimeConfig = RuntimeConfig .DataprocConfig (0 ,
256
- MachineTypeName (" n1 -highmem-64" ),
320
+ MachineTypeName (" n2 -highmem-64" ),
257
321
DiskSize (500 ),
258
322
None ,
259
323
None ,
0 commit comments