<?xml version="1.0" encoding="UTF-8"?><xml><records><record><source-app name="Biblio" version="7.x">Drupal-Biblio</source-app><ref-type>17</ref-type><contributors><authors><author><style face="normal" font="default" size="100%">Dylan R. Fox</style></author><author><style face="normal" font="default" size="100%">Ahmad Ahmadzada</style></author><author><style face="normal" font="default" size="100%">Clara Tenia Wang</style></author><author><style face="normal" font="default" size="100%">Shiri Azenkot</style></author><author><style face="normal" font="default" size="100%">Marlena A. Chu</style></author><author><style face="normal" font="default" size="100%">Roberto Manduchi</style></author><author><style face="normal" font="default" size="100%">Emily A. Cooper</style></author></authors></contributors><titles><title><style face="normal" font="default" size="100%">Using augmented reality to cue obstacles for people with low vision</style></title><secondary-title><style face="normal" font="default" size="100%">Opt. Express</style></secondary-title></titles><keywords><keyword><style  face="normal" font="default" size="100%">Contact lenses</style></keyword><keyword><style  face="normal" font="default" size="100%">Head mounted displays</style></keyword><keyword><style  face="normal" font="default" size="100%">Low vision</style></keyword><keyword><style  face="normal" font="default" size="100%">Peripheral vision</style></keyword><keyword><style  face="normal" font="default" size="100%">Vision correction</style></keyword><keyword><style  face="normal" font="default" size="100%">Visual acuity</style></keyword></keywords><dates><year><style  face="normal" font="default" size="100%">2023</style></year><pub-dates><date><style  face="normal" font="default" size="100%">Feb</style></date></pub-dates></dates><urls><web-urls><url><style face="normal" font="default" size="100%">https://opg.optica.org/oe/abstract.cfm?URI=oe-31-4-6827</style></url></web-urls></urls><volume><style face="normal" font="default" size="100%">31</style></volume><pages><style face="normal" font="default" size="100%">6827–6848</style></pages><language><style face="normal" font="default" size="100%">eng</style></language><abstract><style face="normal" font="default" size="100%">&lt;p&gt;Detecting and avoiding obstacles while navigating can pose a challenge for people with low vision, but augmented reality (AR) has the potential to assist by enhancing obstacle visibility. Perceptual and user experience research is needed to understand how to craft effective AR visuals for this purpose. We developed a prototype AR application capable of displaying multiple kinds of visual cues for obstacles on an optical see-through head-mounted display. We assessed the usability of these cues via a study in which participants with low vision navigated an obstacle course. The results suggest that 3D world-locked AR cues were superior to directional heads-up cues for most participants during this activity.&lt;/p&gt;</style></abstract></record></records></xml>