Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(ecs): Add ECS cluster and task to CDK #3

Open
wants to merge 5 commits into
base: feat/spotfleet-mgmt-ui
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
env
40 changes: 17 additions & 23 deletions DeadlineStack/package/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,20 @@
Mapping,
)


class AppConfig:
"""
Configuration values for the sample app.

TODO: Fill these in with your own values.
"""

def __init__(self):
# A map of regions to Deadline Client Linux AMIs. As an example, the base Linux Deadline 10.1.19.4 AMI ID
# from us-west-2 is filled in. It can be used as-is, added to, or replaced. Ideally the version here
# should match the one used for staging the render queue and usage based licensing recipes.
self.deadline_client_linux_ami_map: Mapping[str, str] = {'us-west-2': 'ami-067d780e98fe3b09f'}
self.deadline_client_linux_ami_map: Mapping[str, str] = {
'eu-west-3': 'ami-08afbca41a57b6e72'}

# Whether the DeadlineResourceTrackerAccessRole IAM role required by Deadline's Resource Tracker should be created in this CDK app.
#
Expand All @@ -33,17 +36,17 @@ def __init__(self):
# Note: Deadline's Resource Tracker only supports being used by a single Deadline Repository per AWS account.
self.create_resource_tracker_role: bool = True
# AWS region deadline is deployed into (ex: "us-west-2")
self.aws_region:str = ""
self.aws_region: str = "eu-west-3"
# Deadline VPC CIDR required (ex:"172.0.0.0/16")
self.vpc_cidr: str = ""
self.vpc_cidr: str = "10.2.0.0/16"
# Bucket for workers script
self.s3_bucket_workers: str = ""
self.s3_bucket_workers: str = "deadline-workers-scripts-test"
# S3 bucket worker region (verifiy this on S3 service)
self.s3_bucket_workers_region: str = ""
self.s3_bucket_workers_region: str = "eu-west-3"
# EC2 test instance AMI
self.custom_ami_id: str = ""
self.custom_ami_id: str = "ami-0c75d0e0e7489cfbe"
# EC2 test instance key pair
self.ec2_key_pair_name: str = ""
self.ec2_key_pair_name: str = "deadlinetest"

# Spot instance fleet configuration
# For each fleet, use those parameters:
Expand All @@ -56,23 +59,14 @@ def __init__(self):
# "user_data_script" expecting filename (sh for Linux, ps1 for Windows) is an additional script file you uploaded to the worker S3 bucket
self.fleet_config: dict = {
"fleet1": {
"name":"Blender",
"is_linux":1,
"name": "Blender",
"is_linux": 1,
# "instance_types":[InstanceType.of(InstanceClass.BURSTABLE3, InstanceSize.LARGE)],
"instance_types":["m5.large","m5.2xlarge"],
"worker_machine_image":"",
"max_capacity":1,
"allocation_strategy":SpotFleetAllocationStrategy.CAPACITY_OPTIMIZED,
"user_data_script":""
},
"fleet2": {
"name":"Maya",
"is_linux":1,
"instance_types":["m5.large","m5.2xlarge"],
"worker_machine_image":"",
"max_capacity":1,
"allocation_strategy":SpotFleetAllocationStrategy.CAPACITY_OPTIMIZED,
"user_data_script":""
"instance_types": ["m5.large", "m5.2xlarge"],
"worker_machine_image": "ami-08afbca41a57b6e72",
"max_capacity": 1,
"allocation_strategy": SpotFleetAllocationStrategy.CAPACITY_OPTIMIZED,
"user_data_script": ""
}
}

Expand Down
Loading