<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Aging</journal-id><journal-id journal-id-type="publisher-id">aging</journal-id><journal-id journal-id-type="index">31</journal-id><journal-title>JMIR Aging</journal-title><abbrev-journal-title>JMIR Aging</abbrev-journal-title><issn pub-type="epub">2561-7605</issn></journal-meta><article-meta><article-id pub-id-type="publisher-id">52443</article-id><article-id pub-id-type="doi">10.2196/52443</article-id><title-group><article-title>Positive Emotional Responses to Socially Assistive Robots in People With Dementia: Pilot Study</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Otaka</surname><given-names>Eri</given-names></name><degrees>MD, PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Osawa</surname><given-names>Aiko</given-names></name><degrees>MD, PhD</degrees><xref ref-type="aff" rid="aff2">2</xref><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Kato</surname><given-names>Kenji</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff4">4</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Obayashi</surname><given-names>Yota</given-names></name><degrees>OT, PhD</degrees><xref ref-type="aff" rid="aff5">5</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Uehara</surname><given-names>Shintaro</given-names></name><degrees>PT, PhD</degrees><xref ref-type="aff" rid="aff6">6</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Kamiya</surname><given-names>Masaki</given-names></name><degrees>OT, MS</degrees><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Mizuno</surname><given-names>Katsuhiro</given-names></name><degrees>MD, PhD</degrees><xref ref-type="aff" rid="aff7">7</xref><xref ref-type="aff" rid="aff8">8</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Hashide</surname><given-names>Shusei</given-names></name><degrees>MD</degrees><xref ref-type="aff" rid="aff7">7</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Kondo</surname><given-names>Izumi</given-names></name><degrees>MD, PhD</degrees><xref ref-type="aff" rid="aff9">9</xref></contrib></contrib-group><aff id="aff1"><institution>Laboratory of Practical Technology in Community, Assistive Robot Center, National Center for Geriatrics and Gerontology Research Institute</institution>, <addr-line>Obu, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff2"><institution>Department of Rehabilitation Medicine, National Center for Geriatrics and Gerontology</institution>, <addr-line>Obu, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff3"><institution>Laboratory of Cognitive Rehabilitation and Robotics, Assistive Robot Center, National Center for Geriatrics and Gerontology Research Institute</institution>, <addr-line>Obu, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff4"><institution>Laboratory of Clinical Evaluation with Robotics, Assistive Robot Center, National Center for Geriatrics and Gerontology Research Institute</institution>, <addr-line>Obu, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff5"><institution>Department of Rehabilitation, Fujita Health University Hospital</institution>, <addr-line>Toyoake, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff6"><institution>Faculty of Rehabilitation, Fujita Health University School of Health Sciences</institution>, <addr-line>Toyoake, Aichi</addr-line>, <country>Japan</country></aff><aff id="aff7"><institution>Department of Physical Rehabilitation, National Center Hospital, National Center of Neurology and Psychiatry</institution>, <addr-line>Kodaira, Tokyo</addr-line>, <country>Japan</country></aff><aff id="aff8"><institution>Department of Rehabilitation Medicine, Tokai University School of Medicine</institution>, <addr-line>Isehara, Kanagawa</addr-line>, <country>Japan</country></aff><aff id="aff9"><institution>Assistive Robot Center, National Center for Geriatrics and Gerontology Research Institute</institution>, <addr-line>Obu, Aichi</addr-line>, <country>Japan</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Bjarnadottir</surname><given-names>Ragnhildur</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Magistro</surname><given-names>Daniele</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Jeong</surname><given-names>Sooyeon</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Eri Otaka, MD, PhD<email>eotaka@ncgg.go.jp</email></corresp></author-notes><pub-date pub-type="collection"><year>2024</year></pub-date><pub-date pub-type="epub"><day>11</day><month>4</month><year>2024</year></pub-date><volume>7</volume><elocation-id>e52443</elocation-id><history><date date-type="received"><day>04</day><month>09</month><year>2023</year></date><date date-type="rev-recd"><day>06</day><month>03</month><year>2024</year></date><date date-type="accepted"><day>07</day><month>03</month><year>2024</year></date></history><copyright-statement>&#x00A9; Eri Otaka, Aiko Osawa, Kenji Kato, Yota Obayashi, Shintaro Uehara, Masaki Kamiya, Katsuhiro Mizuno, Shusei Hashide, Izumi Kondo. Originally published in JMIR Aging (<ext-link ext-link-type="uri" xlink:href="https://aging.jmir.org">https://aging.jmir.org</ext-link>), 11.4.2024. </copyright-statement><copyright-year>2024</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Aging, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://aging.jmir.org">https://aging.jmir.org</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://aging.jmir.org/2024/1/e52443"/><abstract><sec><title>Background</title><p>Interventions and care that can evoke positive emotions and reduce apathy or agitation are important for people with dementia. In recent years, socially assistive robots used for better dementia care have been found to be feasible. However, the immediate responses of people with dementia when they are given multiple sensory modalities from socially assistive robots have not yet been sufficiently elucidated.</p></sec><sec><title>Objective</title><p>This study aimed to quantitatively examine the immediate emotional responses of people with dementia to stimuli presented by socially assistive robots using facial expression analysis in order to determine whether they elicited positive emotions.</p></sec><sec sec-type="methods"><title>Methods</title><p>This pilot study adopted a single-arm interventional design. Socially assistive robots were presented to nursing home residents in a three-step procedure: (1) the robot was placed in front of participants (visual stimulus), (2) the robot was manipulated to produce sound (visual and auditory stimuli), and (3) participants held the robot in their hands (visual, auditory, and tactile stimuli). Expression intensity values for &#x201C;happy,&#x201D; &#x201C;sad,&#x201D; &#x201C;angry,&#x201D; &#x201C;surprised,&#x201D; &#x201C;scared,&#x201D; and &#x201C;disgusted&#x201D; were calculated continuously using facial expression analysis with FaceReader. Additionally, self-reported feelings were assessed using a 5-point Likert scale. In addition to the comparison between the subjective and objective emotional assessments, expression intensity values were compared across the aforementioned 3 stimuli patterns within each session. Finally, the expression intensity value for &#x201C;happy&#x201D; was compared between the different types of robots.</p></sec><sec sec-type="results"><title>Results</title><p>A total of 29 participants (mean age 88.7, SD 6.2 years; n=27 female; Japanese version of Mini-Mental State Examination mean score 18.2, SD 5.1) were recruited. The expression intensity value for &#x201C;happy&#x201D; was the largest in both the subjective and objective assessments and increased significantly when all sensory modalities (visual, auditory, and tactile) were presented (median expression intensity 0.21, IQR 0.09-0.35) compared to the other 2 patterns (visual alone: median expression intensity 0.10, IQR 0.03-0.22<italic>; P</italic>&#x003C;.001; visual and auditory: median expression intensity 0.10, IQR 0.04-0.23; <italic>P</italic>&#x003C;.001). The comparison of different types of robots revealed a significant increase when all stimuli were presented by doll-type and animal-type robots, but not humanoid-type robots.</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>By quantifying the emotional responses of people with dementia, this study highlighted that socially assistive robots may be more effective in eliciting positive emotions when multiple sensory stimuli, including tactile stimuli, are involved. More studies, including randomized controlled trials, are required to further explore the effectiveness of using socially assistive robots in dementia care.</p></sec><sec><title>Trial Registration</title><p>UMIN Clinical Trials Registry UMIN000046256; https://tinyurl.com/yw37auan</p></sec></abstract><kwd-group><kwd>dementia care</kwd><kwd>robotics</kwd><kwd>emotion</kwd><kwd>facial expression</kwd><kwd>expression intensity</kwd><kwd>long-term care</kwd><kwd>sensory modality</kwd><kwd>gerontology</kwd><kwd>gerontechnology</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><p>The number of people with dementia is increasing globally, and it is estimated that it will reach 152 million by 2050 [<xref ref-type="bibr" rid="ref1">1</xref>]. The provision of adequate social care for people with dementia is a major public health concern in many countries. The neurodegenerative nature of dementia affects memory, cognitive function, and more, resulting in a range of noncognitive symptoms, including changes in behavior, emotion, and social functioning. The most frequent changes include agitation, depression, and apathy. These emotional changes are reported as the most challenging aspect of dementia care by many caregivers [<xref ref-type="bibr" rid="ref2">2</xref>]. On the other hand, positive emotions such as joy or comfort are relatively preserved until the terminal stage of the disease [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref4">4</xref>]. Previous studies suggest that the arousal of positive emotions may enhance cognitive function, presumably through amygdala activation; therefore, stimuli with a positive valence may enhance the effect of the rehabilitative approach for patients with dementia [<xref ref-type="bibr" rid="ref3">3</xref>,<xref ref-type="bibr" rid="ref5">5</xref>]. Considering these facts, interventions and caregiver involvement that can evoke positive emotions and suppress negative psychological responses are important strategies that should be actively implemented in long-term care for people with dementia to maintain residual functions and alleviate the burden of care.</p><p>As examples of emotion-related interventions, music therapy [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref7">7</xref>] and occupational therapy [<xref ref-type="bibr" rid="ref8">8</xref>,<xref ref-type="bibr" rid="ref9">9</xref>] have been shown to be effective in terms of emotional control. However, due to the rapid increase in the number of people with dementia and the shortage of dementia caregivers [<xref ref-type="bibr" rid="ref1">1</xref>,<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref11">11</xref>], there is a lack of staffing power to provide such nondrug therapies broadly and equally. In recent years, clinical applications of socially assistive robots have been used to provide high-quality emotional support and companionship [<xref ref-type="bibr" rid="ref12">12</xref>-<xref ref-type="bibr" rid="ref14">14</xref>]. Socially assistive robots are machines designed to provide assistance in the caregiving process through social rather than physical means and are equipped with a social interface to enable interaction with the user [<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref17">17</xref>]. One systematic review and meta-analysis, as well as one scoping review, found that Paro, a baby seal&#x2013;shaped socially assistive robot, has significant effects on agitation and depression [<xref ref-type="bibr" rid="ref12">12</xref>,<xref ref-type="bibr" rid="ref18">18</xref>], while another systematic review and meta-analysis concluded that there is little evidence that people with dementia derive benefits from socially assistive robots for cognition or neuropsychiatric symptoms when considering various types of robots, although they are feasible and acceptable [<xref ref-type="bibr" rid="ref11">11</xref>]. On the other hand, one small between-groups comparison study reported that a certain type of socially assistive robot showed a negative effect in participants with cognitive decline, based on an examination of immediate neurophysiological changes [<xref ref-type="bibr" rid="ref19">19</xref>].</p><p>When using socially assistive robots in clinical practice, one important aspect to consider is the immediate response of persons with dementia. In general, when confronted with a new robot or technology, a relatively positive immediate response known as the novelty effect [<xref ref-type="bibr" rid="ref20">20</xref>] tends to be observed. In contrast, in people with cognitive decline, the immediate response to robots is reported to be somewhat stressful rather than positive [<xref ref-type="bibr" rid="ref19">19</xref>]. These findings suggest that people with dementia, or those with memory and other cognitive impairments, may have a different immediate response compared to the general public. For example, people with dementia have a reduced ability to process multiple sensory stimuli [<xref ref-type="bibr" rid="ref21">21</xref>]; therefore, they might have difficulty accepting and integrating multiple unfamiliar stimuli (eg, shapes, lights, sounds, and touch) provided simultaneously by the robot in the first interaction. Since they are prone to mental stress when they do not understand a situation [<xref ref-type="bibr" rid="ref22">22</xref>], these stimuli from the robots could cause a tense or negative response. Nonetheless, no studies have verified how socially assistive robots are perceived by people with dementia from the perspective of having to process multiple sensory modalities. Moreover, an immediate response from the person with dementia is crucial in clinical settings because it helps care providers confirm the effectiveness of the robot on the spot and make precise decisions about whether to continue using the robot. Therefore, it is beneficial for clinical applications to focus on understanding the immediate responses of people with dementia when they are given multiple sensory modalities from socially assistive robots.</p><p>For an objective and better understanding of these issues, the signs of emotional responses should be quantified using appropriate techniques. Given that verbal skills tend to be impaired in people with dementia [<xref ref-type="bibr" rid="ref23">23</xref>,<xref ref-type="bibr" rid="ref24">24</xref>], it is important to use not only self-reported outcomes but also objective measures that can be obtained with minimal burden. For example, in the field of psychology, facial expression is considered a differentiated indicator of inner emotions [<xref ref-type="bibr" rid="ref25">25</xref>,<xref ref-type="bibr" rid="ref26">26</xref>]. According to recent reports, analysis of facial imaging using facial expression analysis software is able to quantify facial expressions and estimate emotions with good validity [<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref28">28</xref>]. Applying these technologies to the investigation of the use of socially assistive robots among people with dementia will enable detailed and empirical verification of their effects, such as responses to the different sensory stimuli mentioned above, which are difficult to detect with subjective scales.</p><p>In this context, this study aimed to quantitatively evaluate the psychological and emotional reactions evoked in people with dementia to stimuli derived from socially assistive robots using facial expression analysis of facial video clips. In particular, we investigated how immediate responses changed as the modalities of sensory stimulation provided by the robot increased. Furthermore, from the perspective of eliciting positive emotions, which are beneficial to dementia care, this study also examined the differences in the emotion of joy elicited by the different types of robots.</p></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Ethical Considerations</title><p>The study protocol was approved by the institutional ethics committee of the National Center for Geriatrics and Gerontology (1539) and prospectively registered in the UMIN Clinical Trial Registry (UMIN000046256). All participants with a Clinical Dementia Rating (CDR) scale [<xref ref-type="bibr" rid="ref29">29</xref>] score of 0 or 1 provided informed consent themselves in accordance with the Declaration of Helsinki. For those who were considered to have an insufficient capacity to consent due to cognitive decline equivalent to CDR 2 and 3, informed consent was obtained from their family members, and the procedures were explained to the participants in plain language to obtain their approval. To comply with ethical principles, all data collected were anonymized and stored in a locked file or on a password-protected computer.</p></sec><sec id="s2-2"><title>Study Design and Setting</title><p>This pilot study was conducted as a single-arm, self-controlled, interventional study. Two local nursing care facilities that had no previous experience implementing socially assistive robots were selected as the experiment sites.</p></sec><sec id="s2-3"><title>Participants</title><p>Participants were recruited among the residents of the 2 nursing homes. The inclusion criteria were as follows: a significant decline in cognitive function interfering with independence in the performance of everyday activities, the ability to maintain a sitting position for 15 minutes or more, the ability to communicate using simple words, and the ability to follow 2-step instructions. These criteria were first assessed by nursing home staff members through assessments performed as part of daily nursing care procedures. Regarding cognitive decline, candidates either had a previous formal diagnosis of dementia from their physician or received a diagnosis from one of the researchers (EO, a physician); they also had confirmed evidence that cognitive decline was present and that the decline was not due to delirium or other mental disorders. Ultimately, all the participants met the diagnostic criteria for dementia in the <italic>Diagnostic and Statistical Manual of Mental Disorders, Fifth Edition</italic> (<italic>DSM-5</italic>). Those with unstable physical or mental conditions or evident higher cognitive dysfunction due to causes other than dementia were excluded.</p></sec><sec id="s2-4"><title>Procedures</title><p>The participants were taken to a private room or a place with minimum environmental noise, and their faces were recorded in a resting state for 30 seconds. This was referenced as the control image for calibrating the facial expression analysis (to be described below). Next, the socially assistive robots were presented to the participant by a familiar staff member in 1 session using a predetermined 3-step procedure. The staff explained in advance that they wanted the participants to share how they felt after experiencing the robots. In the first step, the robot was placed on a desk in front of the participant (visual stimulus). In the second step, the robot was manipulated to produce a gentle voice or meow (visual and auditory stimuli). In the third step, the participant was encouraged to touch the robot (visual, auditory, and tactile stimuli) and was able to handle it freely, including petting and holding. The 3 patterns of sensory stimulus produced by the robots were presented for approximately 30 seconds in the context of assessing the participant&#x2019;s immediate responses unless the participant refused (<xref ref-type="fig" rid="figure1">Figure 1</xref>).</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>The flow of robot presentation to the participant. V: visual stimulus only; V+A: visual and auditory stimuli; V+A+T: visual, auditory, and tactile stimuli.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="aging_v7i1e52443_fig01.png"/></fig><p>Each participant was presented with 3 types of commercially available robot with typical characteristics&#x2014;a humanoid-type robot capable of voice communication (RoBoHoN; Sharp Corp), a doll-type robot with the appearance of a stuffed toy and a voice recognition and reproduction system (Chapit; RayTron Inc), and an animal-type (cat-shaped) robot that can meow, move its tail, and recognize sound (Amaenbou-Nekochan; Digirect Co, Ltd)&#x2014;resulting in 3 sessions per participant. The order of presentation, which was determined in advance using a random number table, differed for each participant (<xref ref-type="fig" rid="figure2">Figure 2</xref>).</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Socially assistive robots used in this study. Each participant was presented with these 3 types of robots with typical characteristics, resulting in 3 sessions per participant. The order of presentation was determined in advance using a random number table. (<bold>A</bold>) Humanoid-type, (<bold>B</bold>) doll-type, and (<bold>C</bold>) animal-type robots; (<bold>D</bold>) video recording while the participant is touching the robot.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="aging_v7i1e52443_fig02.png"/></fig></sec><sec id="s2-5"><title>Assessments of Emotional Responses</title><sec id="s2-5-1"><title>Subjective Assessment</title><p>At the end of each robot presentation session, the degree of subjective feelings (happy, sad, angry, surprised, scared, and disgusted) was recorded using a 5-point Likert scale (1: not at all; 2: very little; 3: so-so; 4: somewhat; 5: very much). The participants were shown a scale with words corresponding to each of the 5 points of the scale and were asked to select the point that best described their current feelings. This was assessed a total of 3 times (ie, once at the end of each robot presentation).</p></sec><sec id="s2-5-2"><title>Video Recording and Facial Expression Analysis</title><p>For the study procedures, a video camera (Logicool StreamCam; Logitech Inc) was configured on a desk to capture the participants&#x2019; faces from the front. In case this camera&#x2019;s view was blocked by arm movements or the robot, another camera (Handycam HDR-CX470, Sony Corp) was fixed on a tripod positioned 30 degrees diagonally in front of the participant.</p><p>The videos were recorded continuously during the session in full HD (1920 &#x00D7; 1080 pixels) at 60 frames per second. To optimize the sensitivity and accuracy of the facial expression analysis, we segmented the video clips such that each clip contained 1 sensory stimulation pattern (visual only; visual and auditory; or visual, auditory, and tactile), and then cropped them to a suitable resolution that focused on the facial region. If necessary, the brightness of the video clips was minimally adjusted.</p><p>We used commercially available software for quantifying facial expressions (FaceReader; version 7; Noldus Information Technology Inc). This software was developed based on a quantitative evaluation method called the Facial Action Coding System [<xref ref-type="bibr" rid="ref30">30</xref>], which describes visually identifiable facial muscle movements as &#x201C;action units,&#x201D; identifies the intensity of a basic emotional state, and outputs time-series data sets comprising expression intensity values from 0 to 1 for each of the 7 facial expression elements (ie, neutral, happy, sad, angry, surprised, scared, and disgusted) on a continuous scale with high accuracy [<xref ref-type="bibr" rid="ref27">27</xref>,<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref32">32</xref>]. This method is advantageous in that it can quantitatively evaluate facial expressions conveniently with good reproducibility, unlike subjective evaluations, as discussed previously [<xref ref-type="bibr" rid="ref33">33</xref>]. The software provides 5 face models (General, General61, Children, East Asians, and Elderly) that correspond to the data sets used in the algorithm training. We used the East Asian face model according to the software specifications.</p><p>Additionally, we used the calibration function provided by the software to minimize person-specific biases due to facial wrinkles or light effects. The reference manual of FaceReader explains that this function removes biases in the 7 facial expression elements but does not increase the intensity. For each participant, the resting facial image in the first part of recording was used as the calibration image for all the video clips of the participant. In cases where certain facial expression elements were detected in the neutral control image, those expression elements were corrected in the images to be analyzed. The degree of successful face recognition was evaluated for every video clip, and the session was excluded from further analysis if both images from the 2 cameras had a low proportion of successful frames (&#x003C;20%), with reference to previous studies [<xref ref-type="bibr" rid="ref32">32</xref>,<xref ref-type="bibr" rid="ref34">34</xref>].</p></sec><sec id="s2-5-3"><title>Clinical Assessments</title><p>In addition to basic characteristics, overall cognitive function was assessed using the Japanese version of the Mini-Mental State Examination (MMSE-J) [<xref ref-type="bibr" rid="ref35">35</xref>] and the Japanese version of the Montreal Cognitive Assessment (MoCA-J) [<xref ref-type="bibr" rid="ref36">36</xref>]. The Barthel index was used to assess performance on 10 basic activities of daily living (ADL), which tend to deteriorate in people with dementia. The total score ranges from 0 (worst; all dependent) to 100 (best; all independent). The Dementia Behavior Disturbance Scale (DBDS) [<xref ref-type="bibr" rid="ref37">37</xref>,<xref ref-type="bibr" rid="ref38">38</xref>] was used to assess the severity of neuropsychiatric symptoms. The scale evaluates a total of 28 items on a 5-point scale from 0 (not at all) to 4 (always) in terms of the frequency of the behavioral disturbances typically seen in persons with dementia, such as wandering, agitation, and aggression, and is scored from 0 (best) to 112 (worst). The questionnaire format allowed caregivers to answer the questions easily. Finally, to assess hearing disability, which may affect the response to auditory stimulation, 10 items from the Questionnaire on Hearing [<xref ref-type="bibr" rid="ref39">39</xref>] were surveyed to score the severity of hearing loss in daily life. These 10 questions set up specific situations of hearing speech or environmental sounds in daily life and were to be answered on a 5-point scale from 1 (always able to hear) to 5 (never able to hear). The total score ranges from 10 (best) to 50 (worst).</p><p>The MMSE-J and MoCA-J were administered by skilled occupational therapists on different days. The Barthel index, DBDS, and the Questionnaire on Hearing were scored by nursing home staff members who were sufficiently familiar with the participants.</p></sec></sec><sec id="s2-6"><title>Statistical Analyses</title><p>The averages of the emotions expressed in response to the robot (ie, happy, sad, angry, surprised, scared, and disgusted) were compared to each other using the nonparametric Wilcoxon signed-rank test, with the <italic>P</italic> values multiplied by the number of tests according to the Bonferroni method. The correlations between subjective and objective emotional assessments were also examined using Spearman correlation coefficients. The average expression intensity values during the 30 seconds for each sensory stimulus obtained by facial expression analysis were compared for all 3 patterns (visual only; visual and auditory; or visual, auditory, and tactile) using the Friedman test with the Wilcoxon signed-rank test as a post hoc test. The average of the 3 robot presentation sessions was used in this part of the analysis. Additionally, focusing on positive emotions, the expression intensity values for &#x201C;happy&#x201D; were compared between the different types of robots using the Friedman test with the Wilcoxon signed-rank test as a post hoc test. Statistical analyses were performed using STATA/SE (version 13.1; StataCorp). Any <italic>P</italic> value less than .05 was considered statistically significant.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><p><xref ref-type="table" rid="table1">Table 1</xref> presents the demographic characteristics of the participants. Eleven participants (38%) had been diagnosed with Alzheimer disease by their physician, 5 (17%) had dementia with Lewy bodies, and 13 (45%) met the criteria for major neurocognitive disorder in the <italic>DSM-5</italic>, but the etiology was not specified. All the participants had cognitive decline when compared with the cutoff value of 26 on the MoCA-J [<xref ref-type="bibr" rid="ref36">36</xref>].</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Overall participant characteristics (N=29).</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom" colspan="2">Characteristics</td><td align="left" valign="bottom">Values</td></tr></thead><tbody><tr><td align="left" valign="top" colspan="2">Age (years), mean (SD; range)</td><td align="left" valign="top">88.7 (6.2; 71-98)</td></tr><tr><td align="left" valign="top" colspan="2"><bold>Gender, n</bold></td><td align="left" valign="top"/></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Male</td><td align="char" char="." valign="top">2</td></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Female</td><td align="char" char="." valign="top">27</td></tr><tr><td align="left" valign="top" colspan="3"><bold>Type of disease, n</bold></td></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Alzheimer disease<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup></td><td align="left" valign="top">11</td></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Dementia with Lewy bodies<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup></td><td align="left" valign="top">5</td></tr><tr><td align="left" valign="top"/><td align="left" valign="top">Not specified</td><td align="left" valign="top">13</td></tr><tr><td align="left" valign="top" colspan="2">Years of education, mean (SD; range)</td><td align="left" valign="top">9.7 (2.2; 6-13)</td></tr><tr><td align="left" valign="top" colspan="2">MMSE-J<sup><xref ref-type="table-fn" rid="table1fn3">c</xref></sup>, mean score (SD; range)</td><td align="left" valign="top">18.2 (5.1; 11-28)</td></tr><tr><td align="left" valign="top" colspan="2">MoCA-J<sup><xref ref-type="table-fn" rid="table1fn4">d</xref></sup>, mean score (SD; range)</td><td align="left" valign="top">11.8 (4.9; 2-24)</td></tr><tr><td align="left" valign="top" colspan="2">Barthel index, mean score (SD; range)</td><td align="left" valign="top">66.0 (24.8; 10-95)</td></tr><tr><td align="left" valign="top" colspan="2">10 items from the Questionnaire on Hearing, mean score (SD; range)</td><td align="left" valign="top">26.6 (8.6; 14-50)</td></tr><tr><td align="left" valign="top" colspan="2">DBDS<sup><xref ref-type="table-fn" rid="table1fn5">e</xref></sup>, mean score (SD; range)</td><td align="left" valign="top">13.4 (9.6; 0-40)</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>A total of 4 patients were taking medication for dementia.</p></fn><fn id="table1fn2"><p><sup>b</sup>A total of 2 patients were taking medication for dementia.</p></fn><fn id="table1fn3"><p><sup>c</sup>MMSE-J: Japanese version of Mini-Mental State Examination.</p></fn><fn id="table1fn4"><p><sup>d</sup>MoCA-J: Japanese version of Montreal Cognitive Assessment.</p></fn><fn id="table1fn5"><p><sup>e</sup>DBDS: Dementia Behavior Disturbance Scale.</p></fn></table-wrap-foot></table-wrap><p>The average subjective emotional assessments (5-point Likert scale) and objective expression intensity values for the expression elements across all of 3 types of robots are shown in <xref ref-type="table" rid="table2">Table 2</xref>. Facial analysis failed to detect any action units or emotional elements in 1 of the 29 participants. Also, 1 video clip was excluded from the analyses because of a low proportion of successful frames (participant 13; doll-type robot; visual, auditory, and tactile stimuli presented). After excluding these video clips, the overall percentage of the video frames analyzable by the software was 81.4%. Among the self-reported emotions, &#x201C;happy&#x201D; was significantly the most common (happy vs surprised: <italic>P</italic>=.01; happy vs sad, angry, scared and disgusted: <italic>P</italic>&#x003C;.001), and facial analysis&#x2013;detected emotions showed the same trend in that the values of &#x201C;happy&#x201D; were significantly the most common among the 6 emotional elements assessed (happy vs all others: <italic>P</italic>&#x003C;.001). Additionally, the correlations between subjective and objective emotional assessments were significant for &#x201C;happy,&#x201D; &#x201C;sad,&#x201D; and &#x201C;surprised,&#x201D; though the correlation coefficients were interpreted as slight or low. Taking the value of &#x201C;happy&#x201D; as an example, as shown in <xref ref-type="fig" rid="figure3">Figure 3</xref>, there were cases where the objective value was detected as high even when the subjective feelings were reported as low.</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>The relationship between subjective and objective emotional assessments.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom" colspan="2">Subjective (5-point Likert scale; n=29)</td><td align="left" valign="bottom" colspan="2">Objective (expression intensity values; n=28)</td><td align="left" valign="bottom">&#x03C1;</td><td align="left" valign="bottom"><italic>P</italic> value</td></tr><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">Mean (SD)</td><td align="left" valign="bottom">Range</td><td align="left" valign="bottom">Mean (SD)</td><td align="left" valign="bottom">Range</td><td align="left" valign="bottom"/><td align="left" valign="bottom"/></tr></thead><tbody><tr><td align="left" valign="top">Happy</td><td align="left" valign="top">3.6 (1.0)</td><td align="char" char="hyphen" valign="top">1-5</td><td align="left" valign="top">0.18 (0.16)</td><td align="char" char="." valign="top">0-0.73</td><td align="left" valign="top">0.21</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top">Sad</td><td align="left" valign="top">2.1 (0.8)</td><td align="char" char="hyphen" valign="top">1-5</td><td align="left" valign="top">0.07 (0.09)</td><td align="char" char="." valign="top">0-0.61</td><td align="left" valign="top">0.14</td><td align="left" valign="top">.03</td></tr><tr><td align="left" valign="top">Angry</td><td align="left" valign="top">1.9 (0.8)</td><td align="char" char="hyphen" valign="top">1-4</td><td align="left" valign="top">0.07 (0.11)</td><td align="char" char="." valign="top">0-0.77</td><td align="left" valign="top">0.06</td><td align="left" valign="top">.31</td></tr><tr><td align="left" valign="top">Surprised</td><td align="left" valign="top">3.1 (1.1)</td><td align="char" char="hyphen" valign="top">1-5</td><td align="left" valign="top">0.09 (0.11)</td><td align="char" char="." valign="top">0-0.62</td><td align="left" valign="top">0.29</td><td align="left" valign="top">&#x003C;.001</td></tr><tr><td align="left" valign="top">Scared</td><td align="left" valign="top">1.9 (0.7)</td><td align="char" char="hyphen" valign="top">1-5</td><td align="left" valign="top">0.03 (0.05)</td><td align="char" char="." valign="top">0-0.36</td><td align="left" valign="top">0.05</td><td align="left" valign="top">.42</td></tr><tr><td align="left" valign="top">Disgusted</td><td align="left" valign="top">2.0 (1.0)</td><td align="char" char="hyphen" valign="top">1-5</td><td align="left" valign="top">0.05 (0.06)</td><td align="char" char="." valign="top">0-0.34</td><td align="left" valign="top">0.08</td><td align="left" valign="top">.21</td></tr><tr><td align="left" valign="top">Neutral</td><td align="left" valign="top">N/A<sup><xref ref-type="table-fn" rid="table2fn1">a</xref></sup></td><td align="left" valign="top">N/A</td><td align="left" valign="top">0.46 (0.13)</td><td align="char" char="." valign="top">0.14-0.85</td><td align="left" valign="top">N/A</td><td align="left" valign="top">N/A</td></tr></tbody></table><table-wrap-foot><fn id="table2fn1"><p><sup>a</sup>N/A: not applicable.</p></fn></table-wrap-foot></table-wrap><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>Comparison of the objective expression intensity for &#x201C;happy&#x201D; at each grade of subjective emotional assessment (5-point Likert scale). Error bars indicate SDs, the lines within the boxes indicate medians, and the dots indicate outliers that were more than a quarter of the range &#x00D7; 1.5 away from the first or third quartile.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="aging_v7i1e52443_fig03.png"/></fig><p><xref ref-type="fig" rid="figure4">Figure 4</xref> shows a comparison of 3 different patterns of sensory stimuli among all participants. There were significant differences in the values for &#x201C;neutral&#x201D; (<italic>P</italic>&#x003C;.001), &#x201C;happy&#x201D; (<italic>P</italic>&#x003C;.001), &#x201C;sad&#x201D; (<italic>P</italic>=.001), &#x201C;scared&#x201D; (<italic>P</italic>=.04), and &#x201C;disgusted&#x201D; (<italic>P</italic>&#x003C;.001) among the 3 patterns. Post hoc analyses revealed that the values for &#x201C;happy&#x201D; significantly increased in the pattern with visual, auditory, and tactile stimuli (median score 0.21, IQR 0.09-0.35) compared to the patterns with visual stimulus only (median score 0.10, IQR 0.03-0.22; <italic>P</italic>&#x003C;.001) and with both visual and auditory stimuli (median score 0.10, IQR 0.04-0.23; <italic>P</italic>&#x003C;.001). The values for &#x201C;sad&#x201D; (with visual, auditory, and tactile stimuli: median score 0.05, IQR 0.01-0.11) and &#x201C;disgusted&#x201D; (with visual, auditory, and tactile stimuli: median score 0.04, IQR 0.02-0.10) exhibited the same trend, though both of these values were significantly smaller than those for &#x201C;happy&#x201D; (<italic>P</italic>&#x003C;.001). In contrast, the values for &#x201C;neutral&#x201D; and &#x201C;scared&#x201D; significantly decreased in the pattern with visual, auditory, and tactile stimuli compared with the other 2 patterns. However, the intensity of each emotional element did not change linearly over time.</p><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>Comparison of all the expression intensities under the 3 different patterns of sensory stimulation. Error bars indicate SDs, the lines within the boxes indicate medians, and the dots indicate outliers that are more than a quarter range &#x00D7; 1.5 away from the first or third quartile. The values in the upper right of each group represent the overall <italic>P</italic> values in the Friedman test. The values above the boxplots for the nonresponder group represent the <italic>P</italic> values in the post hoc test (the Wilcoxon signed-rank test). <italic>P</italic> values less than .05 are denoted in bold. V: visual stimulus only; V+A: visual and auditory stimuli; V+A+T: visual, auditory, and tactile stimuli.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="aging_v7i1e52443_fig04.png"/></fig><p>When comparing the expression intensity values for &#x201C;happy&#x201D; between the different types of robots for the same participants and the same sensory stimuli, no statistical differences were found, as depicted in <xref ref-type="fig" rid="figure5">Figure 5A</xref>. When comparing the expression intensity values for &#x201C;happy&#x201D; between the different sensory stimuli for the same robot type, no statistically significant differences were found for robot A (humanoid-type<italic>; P</italic>=.48), while robot B (doll-type<italic>; P</italic>&#x003C;.001) and robot C (animal-type<italic>; P</italic>=.03) had large, significant values in the pattern with visual, auditory, and tactile stimuli compared with the other 2 patterns, as shown in <xref ref-type="fig" rid="figure5">Figure 5B</xref>.</p><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>Comparison of the expression intensity for &#x201C;happy&#x201D; between the different types of robots. (A) Comparison between 3 types of robots with the same pattern of sensory stimulation. (B) Comparison within each robot type using the 3 different patterns of sensory stimulation. Error bars indicate SDs, the lines within the boxes indicate medians, and the dots indicate outliers that are more than a quarter of the range &#x00D7; 1.5 away from the first or third quartile. The values in the upper right of each group represent the overall <italic>P</italic> values from the Friedman test. The values above the boxplots for robots B and C represent the <italic>P</italic> values from the post hoc test (the Wilcoxon signed-rank test). <italic>P</italic> values less than .05 are denoted in bold.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="aging_v7i1e52443_fig05.png"/></fig></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Results</title><p>This study aimed to quantitatively evaluate the psychological and emotional responses of people with dementia to complex sensory stimuli provided by socially assistive robots. To summarize, the values for &#x201C;happy&#x201D; were found to be the largest in the overall response to the robots among the emotional elements in both the self-recorded assessment and objective facial expression analysis. However, correlations between the subjective and objective emotional assessments were found to be relatively low or insignificant. When comparing the 3 different patterns of sensory stimulation, there was a significant increase in some of the expression intensity values when all stimuli (visual, auditory, and tactile) were presented. In the comparison between different types of robots, focusing on &#x201C;happy,&#x201D; there was a significant increase when all stimuli were presented by a doll-type robot and by an animal-type robot but not by a humanoid-type robot.</p><p>The emotional responses revealed by the objective facial expression analysis showed the same trend as the participants&#x2019; subjective assessment, with happy emotions having the largest values in both assessments. This result suggests that the facial expression analysis system can successfully detect the expressions that emerge in people with dementia, which is consistent with a previous study [<xref ref-type="bibr" rid="ref40">40</xref>].</p><p>However, considering that the facial expression intensity was detected as relatively high in some cases where the subjective rating was moderate, the facial expression analysis may be able to capture emotions that are not sufficiently represented by a subjective 5-point Likert scale. In fact, a previous study supports the appropriateness of facial expression analysis as a nonverbal pain assessment for people with dementia when they lack the ability to self-report [<xref ref-type="bibr" rid="ref41">41</xref>]. Therefore, facial expression analysis may also be useful as an accurate nonverbal assessment of emotions for people with dementia.</p><p>Although some studies have already demonstrated positive responses in people with dementia when presented with socially assistive robots [<xref ref-type="bibr" rid="ref42">42</xref>,<xref ref-type="bibr" rid="ref43">43</xref>], this study is the first to objectively and quantitatively describe emotional responses using facial expression analysis. Additionally, these results are inconsistent with those of Goda et al [<xref ref-type="bibr" rid="ref19">19</xref>], who found that a 5-minute talk session with a socially assistive robot caused stress in people with dementia. In contrast to this study, the socially assistive robot&#x2019;s interaction in the study by Goda et al [<xref ref-type="bibr" rid="ref19">19</xref>] was mainly through verbal communication; thus, it was inferred that these stimuli were burdensome for people with dementia, who tend to have a decline in verbal communication skills. The positive expressions shown in this study are considered to have been observed as responses to sensory stimuli and not verbal stimuli.</p><p>Regarding the relationship between the type of sensory stimulus and emotional responses, the expression intensity values for &#x201C;happy,&#x201D; &#x201C;sad,&#x201D; and &#x201C;disgusted&#x201D; significantly increased with increasing sensory stimuli. Particularly, happy emotions were revealed to be elicited most strongly in persons with dementia when different varieties of sensory stimuli were presented. With respect to the values for &#x201C;sad&#x201D; and &#x201C;disgusted,&#x201D; we consider it natural that participants became somewhat careful or timid toward unknown experiences when interacting closely with robots. However, these emotional elements in automated analysis should be interpreted with caution, as they can occasionally include other negative emotions, such as fear [<xref ref-type="bibr" rid="ref44">44</xref>], especially when accompanied by facial movements around the lips or jaws [<xref ref-type="bibr" rid="ref31">31</xref>]. Nevertheless, the values for these negative emotions remain small compared to the value for &#x201C;happy,&#x201D; showing that positive emotions were dominant when visual, auditory, and tactile sensory stimuli were offered.</p><p>Notably, this increase in positive emotional responses may include changes over time, because the sensory stimuli were added over time in this study. Given the procedures in this study, the results could have been influenced by the novelty effect [<xref ref-type="bibr" rid="ref20">20</xref>], which is derived from curiosity toward a new experience. However, the intensity of each emotional element did not change linearly when sensory stimuli were added over time. Moreover, the addition of an auditory stimulus did not significantly increase the expression intensity value, whereas the addition of a tactile stimulus did. These findings suggest the importance of adding tactile stimuli to visual and auditory stimuli. This is plausible considering that tactile information connects through several pathways to the insular cortex, which evokes emotional responses [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>]. The finding that tactile stimuli evoked positive feelings is supported by previous studies that showed that haptic or tangible input was effective in helping people with dementia understand and adapt to their surroundings [<xref ref-type="bibr" rid="ref47">47</xref>,<xref ref-type="bibr" rid="ref48">48</xref>]. Regarding auditory stimuli, the results of the Questionnaire on Hearing did not indicate that the participants had very good hearing, suggesting that the effects of auditory stimuli on emotion elicitation may have been relatively small. Since hearing loss is associated with the risk of developing dementia [<xref ref-type="bibr" rid="ref49">49</xref>], and a high percentage of people with dementia actually have hearing loss [<xref ref-type="bibr" rid="ref50">50</xref>,<xref ref-type="bibr" rid="ref51">51</xref>], this issue may also be of great clinical importance.</p><p>Furthermore, it is noteworthy that a significant increase in expression intensity values for &#x201C;happy&#x201D; with increased sensory stimuli was observed for doll-type robots and animal-type robots, but not for humanoid-type robots. One of the distinctive characteristics of the doll-type and animal robots used in this study was that they were covered by soft, fur-like materials. The importance of soft materials is commonly discussed in the field of soft robotics for medical use or human assistance [<xref ref-type="bibr" rid="ref52">52</xref>,<xref ref-type="bibr" rid="ref53">53</xref>]. Softness is considered effective not only in terms of safety for the human body but also in terms of the imitation of reality or the creation of familiarity [<xref ref-type="bibr" rid="ref54">54</xref>] and emotional processing [<xref ref-type="bibr" rid="ref55">55</xref>]. However, the emotional effects of various tactile sensations in people with dementia have not yet been studied; consequently, given the findings of this study, the effectiveness of soft tactile stimuli in dementia care may be worth exploring in future research.</p></sec><sec id="s4-2"><title>Limitations</title><p>This study has a few limitations. First, it included a small number of participants and a single experimental group. Changes in facial expressions were reliably detected by using the participants&#x2019; resting states as controls. However, another study design, such as a randomized controlled trial, is required to confirm these effects more clearly. Second, the generalizability of our findings may be limited, as most of the participants in this study were female. Previous studies report that there are gender differences in emotional responses to some types of sounds [<xref ref-type="bibr" rid="ref56">56</xref>], emotion expression [<xref ref-type="bibr" rid="ref57">57</xref>], and emotion regulation [<xref ref-type="bibr" rid="ref58">58</xref>], although gender and facial expression have been reported to have no significant correlation [<xref ref-type="bibr" rid="ref40">40</xref>]. Further research with male participants will be needed to reveal possible gender differences in responses toward social robots. Third, since there is no prior literature that has identified a minimum detectable change or a minimal clinically important difference for expression intensity, the clinical significance of the changes in expression intensity values demonstrated in this study needs to be explored further. Finally, this study only investigated the immediate responses to socially assistive robots, with patients allowed to interact with each robot for only 1.5 minutes in total. However, in real clinical settings, people with dementia might express more diverse patterns of responses, using these robots as they would like. Moreover, any enthusiasm resulting from the novelty effect may diminish over time. Thus, further investigation is required to reveal the long-term emotional effects of socially assistive robots on people with dementia, including variability in positive responses over several hours or days of use and the effects of these robots on their neuropsychiatric symptoms.</p></sec><sec id="s4-3"><title>Conclusions</title><p>This study quantitatively examined the emotional reactions of people with dementia to socially assistive robots. The expression intensity values, especially the values for &#x201C;happy,&#x201D; significantly increased with multiple sensory stimuli, including visual, auditory, and tactile stimuli. Therefore, this study shows that socially assistive robots may be more effective in arousing positive emotions when multiple sensory stimuli are involved. Further studies, including randomized controlled trials, are required to further explore the effectiveness of and the optimal methods for using socially assistive robots in dementia care.</p></sec></sec></body><back><ack><p>EO, AO, KK, KM, and SH conceptualized and designed the study. EO collected and analyzed data and drafted the manuscript. MK participated in data collection, data interpretation, and manuscript editing. AO, KK, YO, and SU participated in data interpretation and manuscript editing. IK participated in data interpretation and critical revisions of the manuscript. All authors have read and approved the final manuscript. This study was supported by the Japan Health Research Promotion Bureau Research Fund for Young Investigators (JH2021-Y-11). We sincerely thank the staff of Wakyokai Nagomi-no-Sato and Jinshikai Ruminasu Obu (nursing homes for older people) and the Wellness Valley Promotion Office (Obu City) for their cooperation. We also thank Naho Hashimoto, Ai Sugiyama, Michiko Chiso, and Ayumi Ogura for their technical support.</p></ack><fn-group><fn fn-type="conflict"><p>None declared.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">ADL</term><def><p>activities of daily living</p></def></def-item><def-item><term id="abb2">CDR</term><def><p>Clinical Dementia Rating</p></def></def-item><def-item><term id="abb3">DBDS</term><def><p>Dementia Behavior Disturbance Scale</p></def></def-item><def-item><term id="abb4"><italic>DSM-5</italic></term><def><p><italic>Diagnostic and Statistical Manual of Mental Disorders, Fifth Edition</italic></p></def></def-item><def-item><term id="abb5">MMSE-J</term><def><p>Japanese version of Mini-Mental State Examination</p></def></def-item><def-item><term id="abb6">MoCA-J</term><def><p>Japanese version of Montreal Cognitive Assessment</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><collab>GBD 2019 Dementia Forecasting Collaborators</collab></person-group><article-title>Estimation of the global prevalence of dementia in 2019 and forecasted prevalence in 2050: an analysis for the Global Burden of Disease Study 2019</article-title><source>Lancet Public Health</source><year>2022</year><month>02</month><volume>7</volume><issue>2</issue><fpage>e105</fpage><lpage>e125</lpage><pub-id pub-id-type="doi">10.1016/S2468-2667(21)00249-8</pub-id><pub-id pub-id-type="medline">34998485</pub-id></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Teng</surname><given-names>E</given-names></name><name name-style="western"><surname>Marshall</surname><given-names>GA</given-names></name><name name-style="western"><surname>Cummings</surname><given-names>JL</given-names></name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Boeve</surname><given-names>BF</given-names></name><name name-style="western"><surname>Miller</surname><given-names>BL</given-names></name></person-group><article-title>Neuropsychiatric features of dementia</article-title><source>The Behavioral Neurology of Dementia</source><year>2009</year><publisher-name>Cambridge University Press</publisher-name><fpage>85</fpage><lpage>100</lpage><pub-id pub-id-type="doi">10.1017/CBO9780511581410</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nashiro</surname><given-names>K</given-names></name><name name-style="western"><surname>Sakaki</surname><given-names>M</given-names></name><name name-style="western"><surname>Mather</surname><given-names>M</given-names></name></person-group><article-title>Age differences in brain activity during emotion processing: reflections of age-related decline or increased emotion regulation?</article-title><source>Gerontology</source><year>2012</year><month>02</month><volume>58</volume><issue>2</issue><fpage>156</fpage><lpage>163</lpage><pub-id pub-id-type="doi">10.1159/000328465</pub-id><pub-id pub-id-type="medline">21691052</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Chaudhary</surname><given-names>S</given-names></name><name name-style="western"><surname>Zhornitsky</surname><given-names>S</given-names></name><name name-style="western"><surname>Chao</surname><given-names>HH</given-names></name><name name-style="western"><surname>van Dyck</surname><given-names>CH</given-names></name><name name-style="western"><surname>Li</surname><given-names>CS</given-names></name></person-group><article-title>Emotion processing dysfunction in Alzheimer's disease: an overview of behavioral findings, systems neural correlates, and underlying neural biology</article-title><source>Am J Alzheimers Dis Other Demen</source><year>2022</year><month>01</month><volume>37</volume><fpage>15333175221082834</fpage><pub-id pub-id-type="doi">10.1177/15333175221082834</pub-id><pub-id pub-id-type="medline">35357236</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Broster</surname><given-names>LS</given-names></name><name name-style="western"><surname>Blonder</surname><given-names>LX</given-names></name><name name-style="western"><surname>Jiang</surname><given-names>Y</given-names></name></person-group><article-title>Does emotional memory enhancement assist the memory-impaired?</article-title><source>Front Aging Neurosci</source><year>2012</year><month>03</month><volume>4</volume><fpage>2</fpage><pub-id pub-id-type="doi">10.3389/fnagi.2012.00002</pub-id><pub-id pub-id-type="medline">22479245</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>van der Steen</surname><given-names>JT</given-names></name><name name-style="western"><surname>Smaling</surname><given-names>HJ</given-names></name><name name-style="western"><surname>van der Wouden</surname><given-names>JC</given-names></name><name name-style="western"><surname>Bruinsma</surname><given-names>MS</given-names></name><name name-style="western"><surname>Scholten</surname><given-names>RJ</given-names></name><name name-style="western"><surname>Vink</surname><given-names>AC</given-names></name></person-group><article-title>Music-based therapeutic interventions for people with dementia</article-title><source>Cochrane Database Syst Rev</source><year>2018</year><month>07</month><day>23</day><volume>7</volume><issue>7</issue><fpage>CD003477</fpage><pub-id pub-id-type="doi">10.1002/14651858.CD003477.pub4</pub-id><pub-id pub-id-type="medline">30033623</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sihvonen</surname><given-names>AJ</given-names></name><name name-style="western"><surname>S&#x00E4;rk&#x00E4;m&#x00F6;</surname><given-names>T</given-names></name><name name-style="western"><surname>Leo</surname><given-names>V</given-names></name><name name-style="western"><surname>Tervaniemi</surname><given-names>M</given-names></name><name name-style="western"><surname>Altenm&#x00FC;ller</surname><given-names>E</given-names></name><name name-style="western"><surname>Soinila</surname><given-names>S</given-names></name></person-group><article-title>Music-based interventions in neurological rehabilitation</article-title><source>Lancet Neurol</source><year>2017</year><month>08</month><volume>16</volume><issue>8</issue><fpage>648</fpage><lpage>660</lpage><pub-id pub-id-type="doi">10.1016/S1474-4422(17)30168-0</pub-id><pub-id pub-id-type="medline">28663005</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dooley</surname><given-names>NR</given-names></name><name name-style="western"><surname>Hinojosa</surname><given-names>J</given-names></name></person-group><article-title>Improving quality of life for persons with Alzheimer&#x2019;s disease and their family caregivers: brief occupational therapy intervention</article-title><source>Am J Occup Ther</source><year>2004</year><month>09</month><volume>58</volume><issue>5</issue><fpage>561</fpage><lpage>569</lpage><pub-id pub-id-type="doi">10.5014/ajot.58.5.561</pub-id><pub-id pub-id-type="medline">15481783</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gitlin</surname><given-names>LN</given-names></name><name name-style="western"><surname>Winter</surname><given-names>L</given-names></name><name name-style="western"><surname>Burke</surname><given-names>J</given-names></name><name name-style="western"><surname>Chernett</surname><given-names>N</given-names></name><name name-style="western"><surname>Dennis</surname><given-names>MP</given-names></name><name name-style="western"><surname>Hauck</surname><given-names>WW</given-names></name></person-group><article-title>Tailored activities to manage neuropsychiatric behaviors in persons with dementia and reduce caregiver burden: a randomized pilot study</article-title><source>Am J Geriatr Psychiatry</source><year>2008</year><month>03</month><volume>16</volume><issue>3</issue><fpage>229</fpage><lpage>239</lpage><pub-id pub-id-type="doi">10.1097/JGP.0b013e318160da72</pub-id><pub-id pub-id-type="medline">18310553</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="other"><person-group person-group-type="author"><name name-style="western"><surname>Prince</surname><given-names>M</given-names></name><name name-style="western"><surname>Wimo</surname><given-names>A</given-names></name><name name-style="western"><surname>Guerchet</surname><given-names>M</given-names></name><name name-style="western"><surname>Ali</surname><given-names>GC</given-names></name><name name-style="western"><surname>Wu</surname><given-names>YT</given-names></name><name name-style="western"><surname>Prina</surname><given-names>M</given-names></name></person-group><article-title>World Alzheimer report 2015: the global impact of dementia, an analysis of prevalence, incidence, cost and trends</article-title><year>2015</year><access-date>2024-03-29</access-date><publisher-name>Alzheimer&#x2019;s Disease International</publisher-name><comment><ext-link ext-link-type="uri" xlink:href="https://www.alzint.org/u/WorldAlzheimerReport2015.pdf">https://www.alzint.org/u/WorldAlzheimerReport2015.pdf</ext-link></comment></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Yu</surname><given-names>C</given-names></name><name name-style="western"><surname>Sommerlad</surname><given-names>A</given-names></name><name name-style="western"><surname>Sakure</surname><given-names>L</given-names></name><name name-style="western"><surname>Livingston</surname><given-names>G</given-names></name></person-group><article-title>Socially assistive robots for people with dementia: systematic review and meta-analysis of feasibility, acceptability and the effect on cognition, neuropsychiatric symptoms and quality of life</article-title><source>Ageing Res Rev</source><year>2022</year><month>06</month><volume>78</volume><fpage>101633</fpage><pub-id pub-id-type="doi">10.1016/j.arr.2022.101633</pub-id><pub-id pub-id-type="medline">35462001</pub-id></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Leng</surname><given-names>M</given-names></name><name name-style="western"><surname>Liu</surname><given-names>P</given-names></name><name name-style="western"><surname>Zhang</surname><given-names>P</given-names></name><etal/></person-group><article-title>Pet robot intervention for people with dementia: a systematic review and meta-analysis of randomized controlled trials</article-title><source>Psychiatry Res</source><year>2019</year><month>01</month><volume>271</volume><fpage>516</fpage><lpage>525</lpage><pub-id pub-id-type="doi">10.1016/j.psychres.2018.12.032</pub-id><pub-id pub-id-type="medline">30553098</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Takayanagi</surname><given-names>K</given-names></name><name name-style="western"><surname>Kirita</surname><given-names>T</given-names></name><name name-style="western"><surname>Shibata</surname><given-names>T</given-names></name></person-group><article-title>Comparison of verbal and emotional responses of elderly people with mild/moderate dementia and those with severe dementia in responses to seal robot, PARO</article-title><source>Front Aging Neurosci</source><year>2014</year><month>09</month><volume>6</volume><fpage>257</fpage><pub-id pub-id-type="doi">10.3389/fnagi.2014.00257</pub-id><pub-id pub-id-type="medline">25309434</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Banks</surname><given-names>MR</given-names></name><name name-style="western"><surname>Willoughby</surname><given-names>LM</given-names></name><name name-style="western"><surname>Banks</surname><given-names>WA</given-names></name></person-group><article-title>Animal-assisted therapy and loneliness in nursing homes: use of robotic versus living dogs</article-title><source>J Am Med Dir Assoc</source><year>2008</year><month>03</month><volume>9</volume><issue>3</issue><fpage>173</fpage><lpage>177</lpage><pub-id pub-id-type="doi">10.1016/j.jamda.2007.11.007</pub-id><pub-id pub-id-type="medline">18294600</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Feil-Seifer</surname><given-names>D</given-names></name><name name-style="western"><surname>Mataric</surname><given-names>MJ</given-names></name></person-group><article-title>Defining socially assistive robotics</article-title><conf-name>9th International Conference on Rehabilitation Robotics, 2005. ICORR 2005</conf-name><conf-date>Jun 28 to Jul 1, 2005</conf-date><conf-loc>Chicago, IL</conf-loc><fpage>465</fpage><lpage>468</lpage><pub-id pub-id-type="doi">10.1109/ICORR.2005.1501143</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kachouie</surname><given-names>R</given-names></name><name name-style="western"><surname>Sedighadeli</surname><given-names>S</given-names></name><name name-style="western"><surname>Khosla</surname><given-names>R</given-names></name><name name-style="western"><surname>Chu</surname><given-names>MT</given-names></name></person-group><article-title>Socially assistive robots in elderly care: a mixed-method systematic literature review</article-title><source>Int J Human-Comp Interac</source><year>2014</year><month>05</month><day>4</day><volume>30</volume><issue>5</issue><fpage>369</fpage><lpage>393</lpage><pub-id pub-id-type="doi">10.1080/10447318.2013.873278</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Matari&#x0107;</surname><given-names>MJ</given-names></name></person-group><article-title>Socially assistive robotics: human augmentation versus automation</article-title><source>Sci Robot</source><year>2017</year><month>03</month><day>15</day><volume>2</volume><issue>4</issue><fpage>eaam5410</fpage><pub-id pub-id-type="doi">10.1126/scirobotics.aam5410</pub-id><pub-id pub-id-type="medline">33157869</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hung</surname><given-names>L</given-names></name><name name-style="western"><surname>Liu</surname><given-names>C</given-names></name><name name-style="western"><surname>Woldum</surname><given-names>E</given-names></name><etal/></person-group><article-title>The benefits of and barriers to using a social robot PARO in care settings: a scoping review</article-title><source>BMC Geriatr</source><year>2019</year><month>08</month><day>23</day><volume>19</volume><issue>1</issue><fpage>232</fpage><pub-id pub-id-type="doi">10.1186/s12877-019-1244-6</pub-id><pub-id pub-id-type="medline">31443636</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Goda</surname><given-names>A</given-names></name><name name-style="western"><surname>Shimura</surname><given-names>T</given-names></name><name name-style="western"><surname>Murata</surname><given-names>S</given-names></name><name name-style="western"><surname>Kodama</surname><given-names>T</given-names></name><name name-style="western"><surname>Nakano</surname><given-names>H</given-names></name><name name-style="western"><surname>Ohsugi</surname><given-names>H</given-names></name></person-group><article-title>Psychological and neurophysiological effects of robot assisted activity in elderly people with cognitive decline</article-title><source>Gerontol Geriatr Med</source><year>2020</year><month>01</month><volume>6</volume><fpage>2333721420969601</fpage><pub-id pub-id-type="doi">10.1177/2333721420969601</pub-id><pub-id pub-id-type="medline">33241078</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Smedegaard</surname><given-names>CV</given-names></name></person-group><article-title>Why a proper investigation of novelty effects within SHRI should begin by addressing the scientific plurality of the field</article-title><source>Front Robot AI</source><year>2022</year><month>05</month><volume>9</volume><fpage>741478</fpage><pub-id pub-id-type="doi">10.3389/frobt.2022.741478</pub-id><pub-id pub-id-type="medline">35719207</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Tun</surname><given-names>PA</given-names></name><name name-style="western"><surname>McCoy</surname><given-names>S</given-names></name><name name-style="western"><surname>Wingfield</surname><given-names>A</given-names></name></person-group><article-title>Aging, hearing acuity, and the attentional costs of effortful listening</article-title><source>Psychol Aging</source><year>2009</year><month>09</month><volume>24</volume><issue>3</issue><fpage>761</fpage><lpage>766</lpage><pub-id pub-id-type="doi">10.1037/a0014802</pub-id><pub-id pub-id-type="medline">19739934</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sharp</surname><given-names>BK</given-names></name></person-group><article-title>Stress as experienced by people with dementia: an interpretative phenomenological analysis</article-title><source>Dementia (London)</source><year>2019</year><month>05</month><volume>18</volume><issue>4</issue><fpage>1427</fpage><lpage>1445</lpage><pub-id pub-id-type="doi">10.1177/1471301217713877</pub-id><pub-id pub-id-type="medline">28599594</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Welland</surname><given-names>RJ</given-names></name><name name-style="western"><surname>Lubinski</surname><given-names>R</given-names></name><name name-style="western"><surname>Higginbotham</surname><given-names>DJ</given-names></name></person-group><article-title>Discourse comprehension test performance of elders with dementia of the Alzheimer type</article-title><source>J Speech Lang Hear Res</source><year>2002</year><month>12</month><volume>45</volume><issue>6</issue><fpage>1175</fpage><lpage>1187</lpage><pub-id pub-id-type="doi">10.1044/1092-4388(2002/095)</pub-id><pub-id pub-id-type="medline">12546486</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ripich</surname><given-names>DN</given-names></name><name name-style="western"><surname>Carpenter</surname><given-names>BD</given-names></name><name name-style="western"><surname>Ziol</surname><given-names>EW</given-names></name></person-group><article-title>Conversational cohesion patterns in men and women with Alzheimer&#x2019;s disease: a longitudinal study</article-title><source>Int J Lang Commun Disord</source><year>2000</year><month>01</month><volume>35</volume><issue>1</issue><fpage>49</fpage><lpage>64</lpage><pub-id pub-id-type="doi">10.1080/136828200247241</pub-id><pub-id pub-id-type="medline">10824224</pub-id></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ellgring</surname><given-names>H</given-names></name></person-group><article-title>Facial expression as a behavioral indicator of emotional states</article-title><source>Pharmacopsychiatry</source><year>1989</year><month>02</month><volume>22</volume><issue>Suppl 1</issue><fpage>23</fpage><lpage>28</lpage><pub-id pub-id-type="doi">10.1055/s-2007-1014620</pub-id><pub-id pub-id-type="medline">2654968</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ekman</surname><given-names>P</given-names></name><name name-style="western"><surname>Cordaro</surname><given-names>D</given-names></name></person-group><article-title>What is meant by calling emotions basic</article-title><source>Emotion Review</source><year>2011</year><month>10</month><volume>3</volume><issue>4</issue><fpage>364</fpage><lpage>370</lpage><pub-id pub-id-type="doi">10.1177/1754073911410740</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lewinski</surname><given-names>P</given-names></name><name name-style="western"><surname>den Uyl</surname><given-names>TM</given-names></name><name name-style="western"><surname>Butler</surname><given-names>C</given-names></name></person-group><article-title>Automated facial coding: validation of basic emotions and FACS AUs in Facereader</article-title><source>J Neurosci, Psy, and Econo</source><year>2014</year><month>12</month><volume>7</volume><issue>4</issue><fpage>227</fpage><lpage>236</lpage><pub-id pub-id-type="doi">10.1037/npe0000028</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kulke</surname><given-names>L</given-names></name><name name-style="western"><surname>Feyerabend</surname><given-names>D</given-names></name><name name-style="western"><surname>Schacht</surname><given-names>A</given-names></name></person-group><article-title>A comparison of the affectiva iMotions facial expression analysis software with EMG for identifying facial expressions of emotion</article-title><source>Front Psychol</source><year>2020</year><month>02</month><volume>11</volume><fpage>329</fpage><pub-id pub-id-type="doi">10.3389/fpsyg.2020.00329</pub-id><pub-id pub-id-type="medline">32184749</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hughes</surname><given-names>CP</given-names></name><name name-style="western"><surname>Berg</surname><given-names>L</given-names></name><name name-style="western"><surname>Danziger</surname><given-names>WL</given-names></name><name name-style="western"><surname>Coben</surname><given-names>LA</given-names></name><name name-style="western"><surname>Martin</surname><given-names>RL</given-names></name></person-group><article-title>A new clinical scale for the staging of dementia</article-title><source>Br J Psychiatry</source><year>1982</year><month>06</month><volume>140</volume><fpage>566</fpage><lpage>572</lpage><pub-id pub-id-type="doi">10.1192/bjp.140.6.566</pub-id><pub-id pub-id-type="medline">7104545</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ekman</surname><given-names>P</given-names></name><name name-style="western"><surname>Friesen</surname><given-names>WV</given-names></name></person-group><article-title>Measuring facial movement</article-title><source>J Nonverbal Behav</source><year>1976</year><volume>1</volume><issue>1</issue><fpage>56</fpage><lpage>75</lpage><pub-id pub-id-type="doi">10.1007/BF01115465</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Skiendziel</surname><given-names>T</given-names></name><name name-style="western"><surname>R&#x00F6;sch</surname><given-names>AG</given-names></name><name name-style="western"><surname>Schultheiss</surname><given-names>OC</given-names></name></person-group><article-title>Assessing the convergent validity between the automated emotion recognition software Noldus Facereader 7 And Facial Action Coding System scoring</article-title><source>PLoS One</source><year>2019</year><month>10</month><volume>14</volume><issue>10</issue><fpage>e0223905</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0223905</pub-id><pub-id pub-id-type="medline">31622426</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cohen</surname><given-names>AS</given-names></name><name name-style="western"><surname>Morrison</surname><given-names>SC</given-names></name><name name-style="western"><surname>Callaway</surname><given-names>DA</given-names></name></person-group><article-title>Computerized facial analysis for understanding constricted/blunted affect: initial feasibility, reliability, and validity data</article-title><source>Schizophr Res</source><year>2013</year><month>08</month><volume>148</volume><issue>1-3</issue><fpage>111</fpage><lpage>116</lpage><pub-id pub-id-type="doi">10.1016/j.schres.2013.05.003</pub-id><pub-id pub-id-type="medline">23726720</pub-id></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Obayashi</surname><given-names>Y</given-names></name><name name-style="western"><surname>Uehara</surname><given-names>S</given-names></name><name name-style="western"><surname>Kokuwa</surname><given-names>R</given-names></name><name name-style="western"><surname>Otaka</surname><given-names>Y</given-names></name></person-group><article-title>Quantitative evaluation of facial expression in a patient with minimally conscious state after severe traumatic brain injury</article-title><source>J Head Trauma Rehabil</source><year>2021</year><month>09</month><volume>36</volume><issue>5</issue><fpage>E337</fpage><lpage>E344</lpage><pub-id pub-id-type="doi">10.1097/HTR.0000000000000666</pub-id><pub-id pub-id-type="medline">33741824</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Owada</surname><given-names>K</given-names></name><name name-style="western"><surname>Kojima</surname><given-names>M</given-names></name><name name-style="western"><surname>Yassin</surname><given-names>W</given-names></name><etal/></person-group><article-title>Computer-analyzed facial expression as a surrogate marker for autism spectrum social core symptoms</article-title><source>PLoS One</source><year>2018</year><month>01</month><volume>13</volume><issue>1</issue><fpage>e0190442</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0190442</pub-id><pub-id pub-id-type="medline">29293598</pub-id></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sugishita</surname><given-names>M</given-names></name><name name-style="western"><surname>Hemmi</surname><given-names>I</given-names></name><name name-style="western"><surname>Takeuchi</surname><given-names>T</given-names></name></person-group><article-title>Reexamination of the validity and reliability of the Japanese version of the Mini-Mental State Examination (MMSE-J)</article-title><source>Japanese J Cognit Neurosci</source><year>2016</year><volume>18</volume><issue>3+4</issue><fpage>168</fpage><lpage>183</lpage><pub-id pub-id-type="doi">10.11253/ninchishinkeikagaku.18.168</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Fujiwara</surname><given-names>Y</given-names></name><name name-style="western"><surname>Suzuki</surname><given-names>H</given-names></name><name name-style="western"><surname>Yasunaga</surname><given-names>M</given-names></name><etal/></person-group><article-title>Brief screening tool for mild cognitive impairment in older Japanese: validation of the Japanese version of the Montreal Cognitive Assessment</article-title><source>Geriatr Gerontol Int</source><year>2010</year><month>07</month><volume>10</volume><issue>3</issue><fpage>225</fpage><lpage>232</lpage><pub-id pub-id-type="doi">10.1111/j.1447-0594.2010.00585.x</pub-id><pub-id pub-id-type="medline">20141536</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Baumgarten</surname><given-names>M</given-names></name><name name-style="western"><surname>Becker</surname><given-names>R</given-names></name><name name-style="western"><surname>Gauthier</surname><given-names>S</given-names></name></person-group><article-title>Validity and reliability of the Dementia Behavior Disturbance Scale</article-title><source>J Am Geriatr Soc</source><year>1990</year><month>03</month><volume>38</volume><issue>3</issue><fpage>221</fpage><lpage>226</lpage><pub-id pub-id-type="doi">10.1111/j.1532-5415.1990.tb03495.x</pub-id><pub-id pub-id-type="medline">2313003</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mizoguchi</surname><given-names>T</given-names></name><name name-style="western"><surname>Iijima</surname><given-names>S</given-names></name><name name-style="western"><surname>Eto</surname><given-names>F</given-names></name><name name-style="western"><surname>Ishizuka</surname><given-names>A</given-names></name><name name-style="western"><surname>Orimo</surname><given-names>H</given-names></name></person-group><article-title>Reliability and validity of a Japanese version of the Dementia Behavior Disturbance Scale</article-title><source>Nihon Ronen Igakkai Zasshi</source><year>1993</year><month>10</month><volume>30</volume><issue>10</issue><fpage>835</fpage><lpage>840</lpage><pub-id pub-id-type="doi">10.3143/geriatrics.30.835</pub-id><pub-id pub-id-type="medline">8301854</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Suzuki</surname><given-names>K</given-names></name><name name-style="western"><surname>Okamoto</surname><given-names>M</given-names></name><name name-style="western"><surname>Suzuki</surname><given-names>M</given-names></name><etal/></person-group><article-title>A study on the application of &#x201C;the Questionnaire on Hearing 2002&#x201D; as a tool for subjective validation of hearing aid fitting</article-title><source>Audiology Japan</source><year>2009</year><month>01</month><volume>52</volume><issue>6</issue><fpage>588</fpage><lpage>595</lpage><pub-id pub-id-type="doi">10.4295/audiology.52.588</pub-id></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liu</surname><given-names>Y</given-names></name><name name-style="western"><surname>Wang</surname><given-names>Z</given-names></name><name name-style="western"><surname>Yu</surname><given-names>G</given-names></name></person-group><article-title>The effectiveness of facial expression recognition in detecting emotional responses to sound interventions in older adults with dementia</article-title><source>Front Psychol</source><year>2021</year><volume>12</volume><fpage>707809</fpage><pub-id pub-id-type="doi">10.3389/fpsyg.2021.707809</pub-id><pub-id pub-id-type="medline">34512466</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Atee</surname><given-names>M</given-names></name><name name-style="western"><surname>Hoti</surname><given-names>K</given-names></name><name name-style="western"><surname>Parsons</surname><given-names>R</given-names></name><name name-style="western"><surname>Hughes</surname><given-names>JD</given-names></name></person-group><article-title>A novel pain assessment tool incorporating automated facial analysis: interrater reliability in advanced dementia</article-title><source>Clin Interv Aging</source><year>2018</year><month>07</month><volume>13</volume><fpage>1245</fpage><lpage>1258</lpage><pub-id pub-id-type="doi">10.2147/CIA.S168024</pub-id><pub-id pub-id-type="medline">30038491</pub-id></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Liang</surname><given-names>A</given-names></name><name name-style="western"><surname>Piroth</surname><given-names>I</given-names></name><name name-style="western"><surname>Robinson</surname><given-names>H</given-names></name><etal/></person-group><article-title>A pilot randomized trial of a companion robot for people with dementia living in the community</article-title><source>J Am Med Dir Assoc</source><year>2017</year><month>10</month><day>1</day><volume>18</volume><issue>10</issue><fpage>871</fpage><lpage>878</lpage><pub-id pub-id-type="doi">10.1016/j.jamda.2017.05.019</pub-id><pub-id pub-id-type="medline">28668664</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Inoue</surname><given-names>T</given-names></name><name name-style="western"><surname>Nihei</surname><given-names>M</given-names></name><name name-style="western"><surname>Narita</surname><given-names>T</given-names></name><etal/></person-group><article-title>Field-based development of an information support robot for persons with dementia</article-title><source>Technol Disabil</source><year>2012</year><month>12</month><volume>24</volume><issue>4</issue><fpage>263</fpage><lpage>271</lpage><pub-id pub-id-type="doi">10.3233/TAD-120357</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Sato</surname><given-names>W</given-names></name><name name-style="western"><surname>Hyniewska</surname><given-names>S</given-names></name><name name-style="western"><surname>Minemoto</surname><given-names>K</given-names></name><name name-style="western"><surname>Yoshikawa</surname><given-names>S</given-names></name></person-group><article-title>Facial expressions of basic emotions in Japanese laypeople</article-title><source>Front Psychol</source><year>2019</year><month>02</month><volume>10</volume><fpage>259</fpage><pub-id pub-id-type="doi">10.3389/fpsyg.2019.00259</pub-id><pub-id pub-id-type="medline">30809180</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Koelsch</surname><given-names>S</given-names></name><name name-style="western"><surname>Cheung</surname><given-names>VKM</given-names></name><name name-style="western"><surname>Jentschke</surname><given-names>S</given-names></name><name name-style="western"><surname>Haynes</surname><given-names>JD</given-names></name></person-group><article-title>Neocortical substrates of feelings evoked with music in the ACC, insula, and somatosensory cortex</article-title><source>Sci Rep</source><year>2021</year><month>05</month><day>12</day><volume>11</volume><issue>1</issue><fpage>10119</fpage><pub-id pub-id-type="doi">10.1038/s41598-021-89405-y</pub-id><pub-id pub-id-type="medline">33980876</pub-id></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Olausson</surname><given-names>H</given-names></name><name name-style="western"><surname>Lamarre</surname><given-names>Y</given-names></name><name name-style="western"><surname>Backlund</surname><given-names>H</given-names></name><etal/></person-group><article-title>Unmyelinated tactile afferents signal touch and project to insular cortex</article-title><source>Nat Neurosci</source><year>2002</year><month>09</month><volume>5</volume><issue>9</issue><fpage>900</fpage><lpage>904</lpage><pub-id pub-id-type="doi">10.1038/nn896</pub-id><pub-id pub-id-type="medline">12145636</pub-id></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Dixon</surname><given-names>E</given-names></name><name name-style="western"><surname>Lazar</surname><given-names>A</given-names></name></person-group><article-title>The role of sensory changes in everyday technology use by people with mild to moderate dementia</article-title><source>ASSETS</source><year>2020</year><month>10</month><volume>2020</volume><fpage>41</fpage><pub-id pub-id-type="doi">10.1145/3373625.3417000</pub-id><pub-id pub-id-type="medline">34308427</pub-id></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Che Me</surname><given-names>R</given-names></name><name name-style="western"><surname>Biamonti</surname><given-names>A</given-names></name><name name-style="western"><surname>Mohd Saad</surname><given-names>MR</given-names></name></person-group><article-title>Conceptual design of haptic-feedback navigation device for individuals with Alzheimer's disease</article-title><source>Stud Health Technol Inform</source><year>2015</year><volume>217</volume><fpage>195</fpage><lpage>203</lpage><pub-id pub-id-type="doi">10.3233/978-1-61499-566-1-195</pub-id><pub-id pub-id-type="medline">26294473</pub-id></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Livingston</surname><given-names>G</given-names></name><name name-style="western"><surname>Huntley</surname><given-names>J</given-names></name><name name-style="western"><surname>Sommerlad</surname><given-names>A</given-names></name><etal/></person-group><article-title>Dementia prevention, intervention, and care: 2020 report of the Lancet Commission</article-title><source>Lancet</source><year>2020</year><month>08</month><day>8</day><volume>396</volume><issue>10248</issue><fpage>413</fpage><lpage>446</lpage><pub-id pub-id-type="doi">10.1016/S0140-6736(20)30367-6</pub-id><pub-id pub-id-type="medline">32738937</pub-id></nlm-citation></ref><ref id="ref50"><label>50</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Mamo</surname><given-names>SK</given-names></name><name name-style="western"><surname>Nirmalasari</surname><given-names>O</given-names></name><name name-style="western"><surname>Nieman</surname><given-names>CL</given-names></name><etal/></person-group><article-title>Hearing care intervention for persons with dementia: a pilot study</article-title><source>Am J Geriatr Psychiatry</source><year>2017</year><month>01</month><volume>25</volume><issue>1</issue><fpage>91</fpage><lpage>101</lpage><pub-id pub-id-type="doi">10.1016/j.jagp.2016.08.019</pub-id><pub-id pub-id-type="medline">27890543</pub-id></nlm-citation></ref><ref id="ref51"><label>51</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nguyen</surname><given-names>MF</given-names></name><name name-style="western"><surname>Bonnefoy</surname><given-names>M</given-names></name><name name-style="western"><surname>Adrait</surname><given-names>A</given-names></name><etal/></person-group><article-title>Efficacy of hearing aids on the cognitive status of patients with Alzheimer&#x2019;s disease and hearing loss: a multicenter controlled randomized trial</article-title><source>J Alzheimers Dis</source><year>2017</year><month>04</month><volume>58</volume><issue>1</issue><fpage>123</fpage><lpage>137</lpage><pub-id pub-id-type="doi">10.3233/JAD-160793</pub-id><pub-id pub-id-type="medline">28387664</pub-id></nlm-citation></ref><ref id="ref52"><label>52</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kim</surname><given-names>S</given-names></name><name name-style="western"><surname>Laschi</surname><given-names>C</given-names></name><name name-style="western"><surname>Trimmer</surname><given-names>B</given-names></name></person-group><article-title>Soft robotics: a bioinspired evolution in robotics</article-title><source>Trends Biotechnol</source><year>2013</year><month>05</month><volume>31</volume><issue>5</issue><fpage>287</fpage><lpage>294</lpage><pub-id pub-id-type="doi">10.1016/j.tibtech.2013.03.002</pub-id><pub-id pub-id-type="medline">23582470</pub-id></nlm-citation></ref><ref id="ref53"><label>53</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Trimmer</surname><given-names>B</given-names></name></person-group><article-title>Soft robots</article-title><source>Curr Biol</source><year>2013</year><month>08</month><day>5</day><volume>23</volume><issue>15</issue><fpage>R639</fpage><lpage>41</lpage><pub-id pub-id-type="doi">10.1016/j.cub.2013.04.070</pub-id><pub-id pub-id-type="medline">23928077</pub-id></nlm-citation></ref><ref id="ref54"><label>54</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cabibihan</surname><given-names>JJ</given-names></name><name name-style="western"><surname>Joshi</surname><given-names>D</given-names></name><name name-style="western"><surname>Srinivasa</surname><given-names>YM</given-names></name><name name-style="western"><surname>Chan</surname><given-names>MA</given-names></name><name name-style="western"><surname>Muruganantham</surname><given-names>A</given-names></name></person-group><article-title>Illusory sense of human touch from a warm and soft artificial hand</article-title><source>IEEE Trans Neural Syst Rehabil Eng</source><year>2015</year><month>05</month><volume>23</volume><issue>3</issue><fpage>517</fpage><lpage>527</lpage><pub-id pub-id-type="doi">10.1109/TNSRE.2014.2360533</pub-id><pub-id pub-id-type="medline">25291795</pub-id></nlm-citation></ref><ref id="ref55"><label>55</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Kress</surname><given-names>IU</given-names></name><name name-style="western"><surname>Minati</surname><given-names>L</given-names></name><name name-style="western"><surname>Ferraro</surname><given-names>S</given-names></name><name name-style="western"><surname>Critchley</surname><given-names>HD</given-names></name></person-group><article-title>Direct skin-to-skin versus indirect touch modulates neural responses to stroking versus tapping</article-title><source>Neuroreport</source><year>2011</year><month>09</month><day>14</day><volume>22</volume><issue>13</issue><fpage>646</fpage><lpage>651</lpage><pub-id pub-id-type="doi">10.1097/WNR.0b013e328349d166</pub-id><pub-id pub-id-type="medline">21817928</pub-id></nlm-citation></ref><ref id="ref56"><label>56</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Meng</surname><given-names>Q</given-names></name><name name-style="western"><surname>Hu</surname><given-names>X</given-names></name><name name-style="western"><surname>Kang</surname><given-names>J</given-names></name><name name-style="western"><surname>Wu</surname><given-names>Y</given-names></name></person-group><article-title>On the effectiveness of facial expression recognition for evaluation of urban sound perception</article-title><source>Sci Total Environ</source><year>2020</year><month>03</month><day>25</day><volume>710</volume><fpage>135484</fpage><pub-id pub-id-type="doi">10.1016/j.scitotenv.2019.135484</pub-id><pub-id pub-id-type="medline">31780160</pub-id></nlm-citation></ref><ref id="ref57"><label>57</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Safdar</surname><given-names>S</given-names></name><name name-style="western"><surname>Friedlmeier</surname><given-names>W</given-names></name><name name-style="western"><surname>Matsumoto</surname><given-names>D</given-names></name><etal/></person-group><article-title>Variations of emotional display rules within and across cultures: a comparison between Canada, USA, and Japan</article-title><source>Can J Behav Sci</source><year>2009</year><volume>41</volume><issue>1</issue><fpage>1</fpage><lpage>10</lpage><pub-id pub-id-type="doi">10.1037/a0014387</pub-id></nlm-citation></ref><ref id="ref58"><label>58</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Masumoto</surname><given-names>K</given-names></name><name name-style="western"><surname>Taishi</surname><given-names>N</given-names></name><name name-style="western"><surname>Shiozaki</surname><given-names>M</given-names></name></person-group><article-title>Age and gender differences in relationships among emotion regulation, mood, and mental health</article-title><source>Gerontol Geriatr Med</source><year>2016</year><volume>2</volume><fpage>2333721416637022</fpage><pub-id pub-id-type="doi">10.1177/2333721416637022</pub-id><pub-id pub-id-type="medline">28138490</pub-id></nlm-citation></ref></ref-list></back></article>