@inproceedings{0d0c2da8ad8e484fb7a82d83746ad7b4,
title = "Optimizing task assignment in hadoop using an efficient job size-based scheduler",
abstract = "Hadoop is a distributed master-slave platform that comprises of two main components viz. Hadoop Distributed File System (HDFS) and MapReduce. HDFS provides distributed storage whereas MapReduce is useful for computational processing. A MapReduce cluster when receives multiple jobs simultaneously, the whole system performance might seriously deteriorate because of poor job response time. Thus, a real challenging issue in the MapReduce world is the efficient scheduling of jobs. Nevertheless, we see that traditional scheduling algorithms that work with Hadoop does not always assure significant average job-response times under distinct workloads. In order to address this problem, we put forward an efficient Hadoop scheduler that collects the information of workload patterns and distributes the jobs according to our hybrid scheduling technique. The experimental results exhibit that our scheduler enhances the average job-response time for MapReduce systems with different workload patterns.",
keywords = "different workloads, hadoop, MapReduce, scheduling, user priority",
author = "Masarrat Mirza and Nagori, {M. B.}",
note = "Publisher Copyright: {\textcopyright} 2017 IEEE.; 2017 International Conference on Intelligent Computing and Control Systems, ICICCS 2017 ; Conference date: 15-06-2017 Through 16-06-2017",
year = "2017",
month = jul,
day = "1",
doi = "10.1109/ICCONS.2017.8250676",
language = "English",
series = "Proceedings of the 2017 International Conference on Intelligent Computing and Control Systems, ICICCS 2017",
publisher = "Institute of Electrical and Electronics Engineers Inc.",
pages = "1287--1292",
booktitle = "Proceedings of the 2017 International Conference on Intelligent Computing and Control Systems, ICICCS 2017",
}