Skip to content
This repository has been archived by the owner on May 31, 2024. It is now read-only.

Commit

Permalink
Allow The Compute Environment Only to EFS Security Group
Browse files Browse the repository at this point in the history
Why
---
- By default AGC comes with allowing all IPv4 to the EFS security group.
- Executing API through VPC endpoint for API Gateway yields following error:
```
HTTP 403 Forbidden error when connecting to my API Gateway APIs from a VPC
```

How
---
- Use `ComputeEnvironment` only instead of `IComputeEnvironment` from `aws-cdk/aws-batch-alpha`.
- The ComputeEnvironment has `Connections` type property which allows `engineBatch` & `workBatch` to set as the inputs for the EFS security group inbound role.
- Change the endpoint type for API Gateway from `Regional` to `Private`.
  • Loading branch information
souravskr committed Apr 10, 2023
1 parent da596d0 commit 1e95903
Show file tree
Hide file tree
Showing 7 changed files with 713 additions and 380 deletions.
2 changes: 1 addition & 1 deletion packages/cdk/lib/constructs/api-proxy.ts
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ export class ApiProxy extends Construct {
this.accessLogGroup = new LogGroup(this, "AccessLogGroup");
this.restApi = new RestApi(this, "Resource", {
restApiName: props.apiName,
endpointTypes: [EndpointType.REGIONAL],
endpointTypes: [EndpointType.PRIVATE],
description: "API proxy endpoint for a service",
apiKeySourceType: ApiKeySourceType.HEADER,
deployOptions: {
Expand Down
6 changes: 3 additions & 3 deletions packages/cdk/lib/constructs/batch.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import { Fn, Names, Stack } from "aws-cdk-lib";
import { ComputeEnvironment, ComputeResourceType, IComputeEnvironment, IJobQueue, JobQueue } from "@aws-cdk/aws-batch-alpha";
import { ComputeEnvironment, ComputeResourceType, IJobQueue, JobQueue } from "@aws-cdk/aws-batch-alpha";
import { CfnLaunchTemplate, IMachineImage, InstanceType, IVpc, SubnetSelection } from "aws-cdk-lib/aws-ec2";
import {
CfnInstanceProfile,
Expand Down Expand Up @@ -105,7 +105,7 @@ const defaultComputeType = ComputeResourceType.ON_DEMAND;
export class Batch extends Construct {
// This is the role that the backing instances use, not the role that batch jobs run as.
public readonly role: IRole;
public readonly computeEnvironment: IComputeEnvironment;
public readonly computeEnvironment: ComputeEnvironment;
public readonly jobQueue: IJobQueue;

constructor(scope: Construct, id: string, props: BatchProps) {
Expand Down Expand Up @@ -188,7 +188,7 @@ export class Batch extends Construct {
});
}

private renderComputeEnvironment(options: ComputeOptions): IComputeEnvironment {
private renderComputeEnvironment(options: ComputeOptions): ComputeEnvironment {
const computeType = options.computeType || defaultComputeType;
if (computeType == ComputeResourceType.FARGATE || computeType == ComputeResourceType.FARGATE_SPOT) {
return new ComputeEnvironment(this, "ComputeEnvironment", {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import { Batch } from "../../batch";
import { FargatePlatformVersion } from "aws-cdk-lib/aws-ecs";
import { AccessPoint, FileSystem } from "aws-cdk-lib/aws-efs";
import { Size } from "aws-cdk-lib";
import { SecurityGroup } from "aws-cdk-lib/aws-ec2";

export interface SnakemakeEngineProps extends EngineProps {
readonly engineBatch: Batch;
Expand All @@ -22,6 +23,7 @@ export class SnakemakeEngine extends Engine {
private readonly engineMemoryMiB = 4096;
public readonly fsap: AccessPoint;
public readonly fileSystem: FileSystem;
private readonly securityGroup: SecurityGroup;

constructor(scope: Construct, id: string, props: SnakemakeEngineProps) {
super(scope, id);
Expand All @@ -33,8 +35,8 @@ export class SnakemakeEngine extends Engine {
this.fileSystem = this.createFileSystemIOPS(vpc, subnets, iops);
}
this.fsap = this.createAccessPoint(this.fileSystem);

this.fileSystem.connections.allowDefaultPortFromAnyIpv4();
this.fileSystem.connections.allowDefaultPortFrom(engineBatch.computeEnvironment.connections);
this.fileSystem.connections.allowDefaultPortFrom(workerBatch.computeEnvironment.connections);
this.fileSystem.grant(engineBatch.role, "elasticfilesystem:DescribeMountTargets", "elasticfilesystem:DescribeFileSystems");
this.fileSystem.grant(workerBatch.role, "elasticfilesystem:DescribeMountTargets", "elasticfilesystem:DescribeFileSystems");
this.headJobDefinition = new EngineJobDefinition(this, "SnakemakeHeadJobDef", {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,10 @@ export class SnakemakeEngineConstruct extends EngineConstruct {

// Generate the engine that will run snakemake on batch
this.snakemakeEngine = this.createSnakemakeEngine(props, this.batchHead, this.batchWorkers);

// Adds necessary policies to our snakemake batch engine
this.attachAdditionalBatchPolicies();

// Generate the role the Wes lambda will use + add additonal policies
// Generate the role the Wes lambda will use + add additional policies
const adapterRole = this.createAdapterRole();
this.outputBucket = Bucket.fromBucketName(this, "OutputBucket", params.outputBucketName);
this.outputBucket.grantRead(adapterRole);
Expand Down
66 changes: 24 additions & 42 deletions packages/cdk/npm-shrinkwrap.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

46 changes: 23 additions & 23 deletions packages/wes_adapter/amazon_genomics/wes/adapters/BatchAdapter.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,11 @@ class BatchAdapter(AbstractWESAdapter):
"""

def __init__(
self,
job_queue: str,
job_definition: str,
aws_batch: BatchClient = None,
logger=None,
self,
job_queue: str,
job_definition: str,
aws_batch: BatchClient = None,
logger=None,
):
super().__init__(logger)
self.job_queue = job_queue
Expand All @@ -49,14 +49,14 @@ def __init__(

@abstractmethod
def command(
self,
workflow_params=None,
workflow_type=None,
workflow_type_version=None,
tags=None,
workflow_engine_parameters=None,
workflow_url=None,
workflow_attachment=None,
self,
workflow_params=None,
workflow_type=None,
workflow_type_version=None,
tags=None,
workflow_engine_parameters=None,
workflow_url=None,
workflow_attachment=None,
):
pass

Expand Down Expand Up @@ -141,14 +141,14 @@ def list_runs(self, page_size=None, page_token=None) -> RunListResponse:
return RunListResponse(runs=runs, next_page_token=next_token)

def run_workflow(
self,
workflow_params=None,
workflow_type=None,
workflow_type_version=None,
tags=None,
workflow_engine_parameters=None,
workflow_url=None,
workflow_attachment=None,
self,
workflow_params=None,
workflow_type=None,
workflow_type_version=None,
tags=None,
workflow_engine_parameters=None,
workflow_url=None,
workflow_attachment=None,
) -> RunId:
"""
Submit "workflow job" based on given configuration details; return the Batch job uuid
Expand Down Expand Up @@ -205,7 +205,7 @@ def describe_jobs(self, job_ids: typing.List[str]) -> typing.List[JobDetailTypeD

@abstractmethod
def get_child_tasks(
self, head_job: JobDetailTypeDef
self, head_job: JobDetailTypeDef
) -> typing.List[JobDetailTypeDef]:
pass

Expand Down Expand Up @@ -271,4 +271,4 @@ def to_iso(epoch: Optional[int]) -> Optional[str]:
def chunks(l: list, n: int) -> Iterable[list]:
"""split list l into chunks of size n"""
for i in range(0, len(l), n):
yield l[i : i + n]
yield l[i: i + n]
Loading

0 comments on commit 1e95903

Please sign in to comment.