fix bug when skipping workload mapping

This commit is contained in:
yangdsh 2020-02-21 20:26:32 +00:00 committed by Dana Van Aken
parent 07425da6e8
commit 2293fac4d3
2 changed files with 7 additions and 8 deletions

View File

@ -242,7 +242,7 @@ CELERYD_MAX_TASKS_PER_CHILD = 20
# Late ack means the task messages will be acknowledged after
# the task has been executed, not just before
CELERY_ACKS_LATE = True
CELERY_ACKS_LATE = False
djcelery.setup_loader()

View File

@ -857,12 +857,6 @@ def map_workload(map_workload_input):
pruned_metric_idxs = None
unique_workloads = pipeline_data.values_list('workload', flat=True).distinct()
if unique_workloads == 0:
# The background task that aggregates the data has not finished running yet
target_data.update(mapped_workload=None, scores=None)
LOG.debug('%s: Skipping workload mapping because there is no workload.\n',
AlgorithmType.name(algorithm))
return target_data, algorithm
workload_data = {}
# Compute workload mapping data for each unique workload
@ -918,7 +912,12 @@ def map_workload(map_workload_input):
'rowlabels': rowlabels,
}
assert len(workload_data) > 0
if len(workload_data) == 0:
# The background task that aggregates the data has not finished running yet
target_data.update(mapped_workload=None, scores=None)
LOG.debug('%s: Skipping workload mapping because there is no parsed workload.\n',
AlgorithmType.name(algorithm))
return target_data, algorithm
# Stack all X & y matrices for preprocessing
Xs = np.vstack([entry['X_matrix'] for entry in list(workload_data.values())])