1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22
| @article{ZHANG2024107518, title = {Are your apps accessible? A GCN-based accessibility checker for low vision users}, journal = {Information and Software Technology}, volume = {174}, pages = {107518}, year = {2024}, issn = {0950-5849}, doi = {https://doi.org/10.1016/j.infsof.2024.107518}, url = {https://www.sciencedirect.com/science/article/pii/S095058492400123X}, author = {Mengxi Zhang and Huaxiao Liu and Shenning Song and Chunyang Chen and Pei Huang and Jian Zhao}, keywords = {GUI, Accessibility, Graph convolutional neural networks, Low vision users}, abstract = {Context: Accessibility issues (e.g., small size and narrow interval) in mobile applications (apps) lead to obstacles for billions of low vision users in interacting with Graphical User Interfaces (GUIs). Although GUI accessibility scanning tools exist, most of them perform rule-based check relying on complex GUI hierarchies. This might make them detect invisible redundant information, cannot handle small deviations, omit similar components, and is hard to extend. Objective: In this paper, we propose a novel approach, named ALVIN (Accessibility Checker for Low Vision), which represents the GUI as a graph and adopts the Graph Convolutional Neural Networks (GCN) to label inaccessible components. Method: ALVIN removes invisible views to prevent detecting redundancy and uses annotations from low vision users to handle small deviations. Also, the GCN model could consider the relations between GUI components, connecting similar components and reducing the possibility of omission. ALVIN only requires users to annotate the relevant dataset when detecting new kinds of issues. Results: Our experiments on 48 apps demonstrate the effectiveness of ALVIN, with precision of 83.5 Conclusion: To summarize, our proposed approach can effectively detect accessibility issues in GUIs for low vision users, thereby guiding developers in fixing them efficiently.} }
|