Skip to content

Commit

Permalink
Mass rename of shard -> split
Browse files Browse the repository at this point in the history
  • Loading branch information
matburt authored and AlanCoding committed Oct 31, 2018
1 parent 475a701 commit 46d6dce
Show file tree
Hide file tree
Showing 21 changed files with 86 additions and 86 deletions.
10 changes: 5 additions & 5 deletions awx/api/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -3008,7 +3008,7 @@ class Meta:
fields = ('*', 'host_config_key', 'ask_diff_mode_on_launch', 'ask_variables_on_launch', 'ask_limit_on_launch', 'ask_tags_on_launch',
'ask_skip_tags_on_launch', 'ask_job_type_on_launch', 'ask_verbosity_on_launch', 'ask_inventory_on_launch',
'ask_credential_on_launch', 'survey_enabled', 'become_enabled', 'diff_mode',
'allow_simultaneous', 'custom_virtualenv', 'job_shard_count')
'allow_simultaneous', 'custom_virtualenv', 'job_split_count')

def get_related(self, obj):
res = super(JobTemplateSerializer, self).get_related(obj)
Expand All @@ -3025,7 +3025,7 @@ def get_related(self, obj):
labels = self.reverse('api:job_template_label_list', kwargs={'pk': obj.pk}),
object_roles = self.reverse('api:job_template_object_roles_list', kwargs={'pk': obj.pk}),
instance_groups = self.reverse('api:job_template_instance_groups_list', kwargs={'pk': obj.pk}),
sharded_jobs = self.reverse('api:job_template_sharded_jobs_list', kwargs={'pk': obj.pk}),
split_jobs = self.reverse('api:job_template_split_jobs_list', kwargs={'pk': obj.pk}),
))
if self.version > 1:
res['copy'] = self.reverse('api:job_template_copy', kwargs={'pk': obj.pk})
Expand Down Expand Up @@ -3201,9 +3201,9 @@ def get_summary_fields(self, obj):
summary_fields = super(JobSerializer, self).get_summary_fields(obj)
if obj.internal_limit:
summary_fields['internal_limit'] = {}
if obj.internal_limit.startswith('shard'):
offset, step = Inventory.parse_shard_params(obj.internal_limit)
summary_fields['internal_limit']['shard'] = {'offset': offset, 'step': step}
if obj.internal_limit.startswith('split'):
offset, step = Inventory.parse_split_params(obj.internal_limit)
summary_fields['internal_limit']['split'] = {'offset': offset, 'step': step}
else:
summary_fields['internal_limit']['unknown'] = self.internal_limit
all_creds = []
Expand Down
2 changes: 1 addition & 1 deletion awx/api/templates/api/inventory_script_view.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ string of `?all=1` to return all hosts, including disabled ones.
Specify a query string of `?towervars=1` to add variables
to the hostvars of each host that specifies its enabled state and database ID.

Specify a query string of `?subset=shard2of5` to produce an inventory that
Specify a query string of `?subset=split2of5` to produce an inventory that
has a restricted number of hosts according to the rules of job splitting.

To apply multiple query strings, join them with the `&` character, like `?hostvars=1&all=1`.
Expand Down
4 changes: 2 additions & 2 deletions awx/api/urls/job_template.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
JobTemplateDetail,
JobTemplateLaunch,
JobTemplateJobsList,
JobTemplateShardedJobsList,
JobTemplateSplitJobsList,
JobTemplateCallback,
JobTemplateSchedulesList,
JobTemplateSurveySpec,
Expand All @@ -29,7 +29,7 @@
url(r'^(?P<pk>[0-9]+)/$', JobTemplateDetail.as_view(), name='job_template_detail'),
url(r'^(?P<pk>[0-9]+)/launch/$', JobTemplateLaunch.as_view(), name='job_template_launch'),
url(r'^(?P<pk>[0-9]+)/jobs/$', JobTemplateJobsList.as_view(), name='job_template_jobs_list'),
url(r'^(?P<pk>[0-9]+)/sharded_jobs/$', JobTemplateShardedJobsList.as_view(), name='job_template_sharded_jobs_list'),
url(r'^(?P<pk>[0-9]+)/split_jobs/$', JobTemplateSplitJobsList.as_view(), name='job_template_split_jobs_list'),
url(r'^(?P<pk>[0-9]+)/callback/$', JobTemplateCallback.as_view(), name='job_template_callback'),
url(r'^(?P<pk>[0-9]+)/schedules/$', JobTemplateSchedulesList.as_view(), name='job_template_schedules_list'),
url(r'^(?P<pk>[0-9]+)/survey_spec/$', JobTemplateSurveySpec.as_view(), name='job_template_survey_spec'),
Expand Down
6 changes: 3 additions & 3 deletions awx/api/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3369,7 +3369,7 @@ def post(self, request, *args, **kwargs):
if extra_vars is not None and job_template.ask_variables_on_launch:
extra_vars_redacted, removed = extract_ansible_vars(extra_vars)
kv['extra_vars'] = extra_vars_redacted
kv['_prevent_sharding'] = True # will only run against 1 host, so no point
kv['_prevent_splitting'] = True # will only run against 1 host, so no point
with transaction.atomic():
job = job_template.create_job(**kv)

Expand Down Expand Up @@ -3401,12 +3401,12 @@ def allowed_methods(self):
return methods


class JobTemplateShardedJobsList(SubListCreateAPIView):
class JobTemplateSplitJobsList(SubListCreateAPIView):

model = WorkflowJob
serializer_class = WorkflowJobListSerializer
parent_model = JobTemplate
relationship = 'sharded_jobs'
relationship = 'split_jobs'
parent_key = 'job_template'


Expand Down
4 changes: 2 additions & 2 deletions awx/main/migrations/0050_v340_split_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,13 @@ class Migration(migrations.Migration):
operations = [
migrations.AddField(
model_name='jobtemplate',
name='job_shard_count',
name='job_split_count',
field=models.IntegerField(blank=True, default=0, help_text='The number of jobs to split into at runtime. Will cause the Job Template to launch a workflow if value is non-zero.'),
),
migrations.AddField(
model_name='workflowjob',
name='job_template',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='sharded_jobs', to='main.JobTemplate'),
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='split_jobs', to='main.JobTemplate'),
),
migrations.AlterField(
model_name='unifiedjob',
Expand Down
12 changes: 6 additions & 6 deletions awx/main/models/inventory.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,14 +221,14 @@ def get_group_children_map(self):
return group_children_map

@staticmethod
def parse_shard_params(shard_str):
m = re.match(r"shard(?P<offset>\d+)of(?P<step>\d+)", shard_str)
def parse_split_params(split_str):
m = re.match(r"split(?P<offset>\d+)of(?P<step>\d+)", split_str)
if not m:
raise ParseError(_('Could not parse subset as shard specification.'))
raise ParseError(_('Could not parse subset as split specification.'))
offset = int(m.group('offset'))
step = int(m.group('step'))
if offset > step:
raise ParseError(_('Shard offset must be greater than total number of shards.'))
raise ParseError(_('Split offset must be greater than total number of splits.'))
return (offset, step)

def get_script_data(self, hostvars=False, towervars=False, show_all=False, subset=None):
Expand All @@ -242,8 +242,8 @@ def get_script_data(self, hostvars=False, towervars=False, show_all=False, subse
if subset:
if not isinstance(subset, six.string_types):
raise ParseError(_('Inventory subset argument must be a string.'))
if subset.startswith('shard'):
offset, step = Inventory.parse_shard_params(subset)
if subset.startswith('split'):
offset, step = Inventory.parse_split_params(subset)
hosts = hosts[offset::step]
else:
raise ParseError(_('Subset does not use any supported syntax.'))
Expand Down
14 changes: 7 additions & 7 deletions awx/main/models/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ class Meta:
default=False,
allows_field='credentials'
)
job_shard_count = models.IntegerField(
job_split_count = models.IntegerField(
blank=True,
default=0,
help_text=_("The number of jobs to split into at runtime. "
Expand Down Expand Up @@ -328,10 +328,10 @@ def create_job(self, **kwargs):
return self.create_unified_job(**kwargs)

def create_unified_job(self, **kwargs):
prevent_sharding = kwargs.pop('_prevent_sharding', False)
split_event = bool(self.job_shard_count > 1 and (not prevent_sharding))
prevent_splitting = kwargs.pop('_prevent_splitting', False)
split_event = bool(self.job_split_count > 1 and (not prevent_splitting))
if split_event:
# A sharded Job Template will generate a WorkflowJob rather than a Job
# A Split Job Template will generate a WorkflowJob rather than a Job
from awx.main.models.workflow import WorkflowJobTemplate, WorkflowJobNode
kwargs['_unified_job_class'] = WorkflowJobTemplate._get_unified_job_class()
kwargs['_parent_field_name'] = "job_template"
Expand All @@ -342,11 +342,11 @@ def create_unified_job(self, **kwargs):
except JobLaunchConfig.DoesNotExist:
wj_config = JobLaunchConfig()
actual_inventory = wj_config.inventory if wj_config.inventory else self.inventory
for idx in xrange(min(self.job_shard_count,
for idx in xrange(min(self.job_split_count,
actual_inventory.hosts.count())):
create_kwargs = dict(workflow_job=job,
unified_job_template=self,
ancestor_artifacts=dict(job_shard=idx))
ancestor_artifacts=dict(job_split=idx))
WorkflowJobNode.objects.create(**create_kwargs)
return job

Expand Down Expand Up @@ -580,7 +580,7 @@ def event_class(self):
return JobEvent

def copy_unified_job(self, **new_prompts):
new_prompts['_prevent_sharding'] = True
new_prompts['_prevent_splitting'] = True
if self.internal_limit:
new_prompts.setdefault('_eager_fields', {})
new_prompts['_eager_fields']['internal_limit'] = self.internal_limit # oddball, not from JT or prompts
Expand Down
4 changes: 2 additions & 2 deletions awx/main/models/unified_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def create_unified_job(self, **kwargs):
parent_field_name = None
if "_unified_job_class" in kwargs:
# Special case where spawned job is different type than usual
# Only used for sharded jobs
# Only used for split jobs
unified_job_class = kwargs.pop("_unified_job_class")
fields = unified_job_class._get_unified_job_field_names() & fields
parent_field_name = kwargs.pop('_parent_field_name')
Expand All @@ -354,7 +354,7 @@ def create_unified_job(self, **kwargs):
for fd, val in eager_fields.items():
setattr(unified_job, fd, val)

# NOTE: sharded workflow jobs _get_parent_field_name method
# NOTE: split workflow jobs _get_parent_field_name method
# is not correct until this is set
if not parent_field_name:
parent_field_name = unified_job._get_parent_field_name()
Expand Down
20 changes: 10 additions & 10 deletions awx/main/models/workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -251,19 +251,19 @@ def get_job_kwargs(self):
data['extra_vars'] = extra_vars
# ensure that unified jobs created by WorkflowJobs are marked
data['_eager_fields'] = {'launch_type': 'workflow'}
# Extra processing in the case that this is a sharded job
if 'job_shard' in self.ancestor_artifacts:
shard_str = six.text_type(self.ancestor_artifacts['job_shard'] + 1)
# Extra processing in the case that this is a split job
if 'job_split' in self.ancestor_artifacts:
split_str = six.text_type(self.ancestor_artifacts['job_split'] + 1)
data['_eager_fields']['name'] = six.text_type("{} - {}").format(
self.unified_job_template.name[:512 - len(shard_str) - len(' - ')],
shard_str
self.unified_job_template.name[:512 - len(split_str) - len(' - ')],
split_str
)
data['_eager_fields']['allow_simultaneous'] = True
data['_eager_fields']['internal_limit'] = 'shard{0}of{1}'.format(
self.ancestor_artifacts['job_shard'],
data['_eager_fields']['internal_limit'] = 'split{0}of{1}'.format(
self.ancestor_artifacts['job_split'],
self.workflow_job.workflow_job_nodes.count()
)
data['_prevent_sharding'] = True
data['_prevent_splitting'] = True
return data


Expand Down Expand Up @@ -459,7 +459,7 @@ class Meta:
)
job_template = models.ForeignKey(
'JobTemplate',
related_name='sharded_jobs',
related_name='split_jobs',
blank=True,
null=True,
default=None,
Expand All @@ -472,7 +472,7 @@ def workflow_nodes(self):

def _get_parent_field_name(self):
if self.job_template_id:
# This is a workflow job which is a container for sharded jobs
# This is a workflow job which is a container for split jobs
return 'job_template'
return 'workflow_job_template'

Expand Down
8 changes: 4 additions & 4 deletions awx/main/tests/functional/api/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,11 +123,11 @@ def test_job_relaunch_on_failed_hosts(post, inventory, project, machine_credenti


@pytest.mark.django_db
def test_shard_jt_recent_jobs(shard_job_factory, admin_user, get):
workflow_job = shard_job_factory(3, spawn=True)
shard_jt = workflow_job.job_template
def test_split_jt_recent_jobs(split_job_factory, admin_user, get):
workflow_job = split_job_factory(3, spawn=True)
split_jt = workflow_job.job_template
r = get(
url=shard_jt.get_absolute_url(),
url=split_jt.get_absolute_url(),
user=admin_user,
expect=200
)
Expand Down
16 changes: 8 additions & 8 deletions awx/main/tests/functional/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -779,34 +779,34 @@ def disable_database_settings(mocker):


@pytest.fixture
def shard_jt_factory(inventory):
def split_jt_factory(inventory):
def r(N, jt_kwargs=None):
for i in range(N):
inventory.hosts.create(name='foo{}'.format(i))
if not jt_kwargs:
jt_kwargs = {}
return JobTemplate.objects.create(
name='shard-jt-from-factory',
job_shard_count=N,
name='split-jt-from-factory',
job_split_count=N,
inventory=inventory,
**jt_kwargs
)
return r


@pytest.fixture
def shard_job_factory(shard_jt_factory):
def split_job_factory(split_jt_factory):
def r(N, jt_kwargs=None, prompts=None, spawn=False):
shard_jt = shard_jt_factory(N, jt_kwargs=jt_kwargs)
split_jt = split_jt_factory(N, jt_kwargs=jt_kwargs)
if not prompts:
prompts = {}
shard_job = shard_jt.create_unified_job(**prompts)
split_job = split_jt.create_unified_job(**prompts)
if spawn:
for node in shard_job.workflow_nodes.all():
for node in split_job.workflow_nodes.all():
# does what the task manager does for spawning workflow jobs
kv = node.get_job_kwargs()
job = node.unified_job_template.create_unified_job(**kv)
node.job = job
node.save()
return shard_job
return split_job
return r
4 changes: 2 additions & 2 deletions awx/main/tests/functional/models/test_inventory.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,11 +38,11 @@ def test_towervars(self, inventory):
'remote_tower_id': host.id
}

def test_shard_subset(self, inventory):
def test_split_subset(self, inventory):
for i in range(3):
inventory.hosts.create(name='host{}'.format(i))
for i in range(3):
assert inventory.get_script_data(subset='shard{}of3'.format(i)) == {
assert inventory.get_script_data(subset='split{}of3'.format(i)) == {
'all': {'hosts': ['host{}'.format(i)]}
}

Expand Down
16 changes: 8 additions & 8 deletions awx/main/tests/functional/models/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,18 +84,18 @@ def test_job_host_summary_representation(host):


@pytest.mark.django_db
class TestShardingModels:
class TestSplittingModels:

def test_shard_workflow_spawn(self, shard_jt_factory):
shard_jt = shard_jt_factory(3)
job = shard_jt.create_unified_job()
def test_split_workflow_spawn(self, split_jt_factory):
split_jt = split_jt_factory(3)
job = split_jt.create_unified_job()
assert isinstance(job, WorkflowJob)
assert job.job_template == shard_jt
assert job.unified_job_template == shard_jt
assert job.job_template == split_jt
assert job.unified_job_template == split_jt
assert job.workflow_nodes.count() == 3

def test_shards_with_JT_and_prompts(self, shard_job_factory):
job = shard_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True)
def test_splits_with_JT_and_prompts(self, split_job_factory):
job = split_job_factory(3, jt_kwargs={'ask_limit_on_launch': True}, prompts={'limit': 'foobar'}, spawn=True)
assert job.launch_config.prompts_dict() == {'limit': 'foobar'}
for node in job.workflow_nodes.all():
assert node.limit is None # data not saved in node prompts
Expand Down
8 changes: 4 additions & 4 deletions awx/main/tests/functional/test_rbac_job_start.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,18 +49,18 @@ def test_inventory_use_access(inventory, user):


@pytest.mark.django_db
def test_sharded_job(shard_job_factory, rando):
workflow_job = shard_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True)
def test_split_job(split_job_factory, rando):
workflow_job = split_job_factory(2, jt_kwargs={'created_by': rando}, spawn=True)
workflow_job.job_template.execute_role.members.add(rando)

# Abilities of user with execute_role for shard workflow job container
# Abilities of user with execute_role for split workflow job container
assert WorkflowJobAccess(rando).can_start(workflow_job) # relaunch allowed
for access_cls in (UnifiedJobAccess, WorkflowJobAccess):
access = access_cls(rando)
assert access.can_read(workflow_job)
assert workflow_job in access.get_queryset()

# Abilities of user with execute_role for all the shards of the job
# Abilities of user with execute_role for all the split of the job
for node in workflow_job.workflow_nodes.all():
access = WorkflowJobNodeAccess(rando)
assert access.can_read(node)
Expand Down
2 changes: 1 addition & 1 deletion awx/ui/client/features/jobs/jobsList.controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ function ListJobsController (
return null;
}

const splitJobDetails = internalLimitDetails.shard;
const splitJobDetails = internalLimitDetails.split;

if (!splitJobDetails) {
return null;
Expand Down
2 changes: 1 addition & 1 deletion awx/ui/client/features/output/details.component.js
Original file line number Diff line number Diff line change
Expand Up @@ -133,7 +133,7 @@ function getSplitJobDetails () {
return null;
}

const splitJobDetails = resource.model.get('summary_fields.internal_limit.shard');
const splitJobDetails = resource.model.get('summary_fields.internal_limit.split');

if (!splitJobDetails) {
return null;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -257,7 +257,7 @@ function(NotificationsList, i18n) {
dataPlacement: 'right',
control: '<instance-groups-multiselect instance-groups="instance_groups" field-is-disabled="!(job_template_obj.summary_fields.user_capabilities.edit || canAddJobTemplate)"></instance-groups-multiselect>',
},
job_shard_count: {
job_split_count: {
label: i18n._('Job Splitting'),
type: 'number',
integer: true,
Expand Down
Loading

0 comments on commit 46d6dce

Please sign in to comment.