I am a Research Associate working with Prof. M. Hanmandlu, developing human-centered robotic systems that learn efficiently and act safely in dynamic environments. My research lies at the intersection of learning, perception, and control, with the goal of building robots that can adapt to uncertainty, handle latency, and remain stable in real-world human settings.
Earlier, at the I3D Lab, Indian Institute of Science (IISc), Bangalore, under Prof. Pradipta Biswas, I developed an eye-gaze-controlled assistive robotic arm for individuals with severe speech and motor impairments and contributed to a LiDAR–camera fusion system for autonomous rover navigation.
I earned my Bachelor’s degree in Electronics and Communication Engineering from Rajiv Gandhi Institute of Technology, Kottayam (RIT). As an undergraduate researcher at the Centre for Advanced Signal Processing (CASP Lab) under Dr. Manju Manuel, I worked on CNN accelerator design and built an interactive 3D holographic display prototype using the Pepper’s Ghost technique.
@misc{h2025ulaguncertaintyawarelagadaptivegoal,
title={U-LAG: Uncertainty-Aware, Lag-Adaptive Goal Retargeting for Robotic Manipulation},
author={Anamika J H and Anujith Muraleedharan},
year={2025},
eprint={2510.02526},
archivePrefix={arXiv},
primaryClass={cs.RO},
url={https://arxiv.org/abs/2510.02526},
}
@misc{muraleedharan2025selectiveprogressawarequeryinghumanintheloop,
title={Selective Progress-Aware Querying for Human-in-the-Loop Reinforcement Learning},
author={Anujith Muraleedharan and Anamika J H},
year={2025},
eprint={2509.20541},
archivePrefix={arXiv},
primaryClass={cs.RO},
url={https://arxiv.org/abs/2509.20541},
}
@inproceedings{10.1145/3640544.3645236,
author = {Muraleedharan, Anujith and J H, Anamika and Vishwakarma, Himanshu and Kashyap, Kudrat and Biswas, Pradipta},
title = {Eye-Gaze-Enabled Assistive Robotic Stamp Printing System for Individuals with Severe Speech and Motor Impairment},
year = {2024},
isbn = {9798400705090},
publisher = {Association for Computing Machinery},
address = {New York, NY, USA},
url = {https://doi.org/10.1145/3640544.3645236},
doi = {10.1145/3640544.3645236},
abstract = {Robotics is a trailblazing technology that has found extensive applications in the field of assistive aids for individuals with severe speech and motor impairment (SSMI). This article describes the design and development of an eye gaze-controlled user interface to manipulate the robotic arm. User studies were reported to engage users through eye gaze input to select stamps from the two designs and select the stamping location on cards using three designated boxes present in the User Interface. The entire process, from stamp selection to stamping location selection, is controlled by eye movements. The user interface contains the print button to initiate the robotic arm that enables the user to independently create personalized stamped cards. Extensive user interface trials revealed that individuals with severe speech and motor impairment showed improvements with a 33.2\% reduction in the average time taken and a 42.8\% reduction in the standard deviation for the completion of the task. This suggests the effectiveness and potential to enhance the autonomy and creativity of individuals with SSMI, contributing to the development of inclusive assistive technologies.},
booktitle = {Companion Proceedings of the 29th International Conference on Intelligent User Interfaces},
pages = {24–29},
numpages = {6},
keywords = {Assistive technology, Eye gaze tracking, Human-Robot Interaction},
location = {Greenville, SC, USA},
series = {IUI '24 Companion}
}