<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "http://dtd.nlm.nih.gov/publishing/2.0/journalpublishing.dtd">
<article article-type="research-article" dtd-version="2.0" xmlns:xlink="http://www.w3.org/1999/xlink">
  <front>
    <journal-meta>
      <journal-id journal-id-type="publisher-id">JMI</journal-id>
      <journal-id journal-id-type="nlm-ta">JMIR Med Inform</journal-id>
      <journal-title>JMIR Medical Informatics</journal-title>
      <issn pub-type="epub">2291-9694</issn>
      <publisher>
        <publisher-name>JMIR Publications</publisher-name>
        <publisher-loc>Toronto, Canada</publisher-loc>
      </publisher>
    </journal-meta>
    <article-meta>
      <article-id pub-id-type="publisher-id">v12i1e57097</article-id>
      <article-id pub-id-type="pmid">39121473</article-id>
      <article-id pub-id-type="doi">10.2196/57097</article-id>
      <article-categories>
        <subj-group subj-group-type="heading">
          <subject>Original Paper</subject>
        </subj-group>
        <subj-group subj-group-type="article-type">
          <subject>Original Paper</subject>
        </subj-group>
      </article-categories>
      <title-group>
        <article-title>Recognition of Daily Activities in Adults With Wearable Inertial Sensors: Deep Learning Methods Study</article-title>
      </title-group>
      <contrib-group>
        <contrib contrib-type="editor">
          <name>
            <surname>Lovis</surname>
            <given-names>Christian</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Poulose</surname>
            <given-names>Alwin</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Maximiano</surname>
            <given-names>Marisa</given-names>
          </name>
        </contrib>
        <contrib contrib-type="reviewer">
          <name>
            <surname>Colado Sanchez</surname>
            <given-names>Juan</given-names>
          </name>
        </contrib>
      </contrib-group>
      <contrib-group>
        <contrib id="contrib1" contrib-type="author">
          <name name-style="western">
            <surname>De Ramón Fernández</surname>
            <given-names>Alberto</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-0401-670X</ext-link>
        </contrib>
        <contrib id="contrib2" contrib-type="author" corresp="yes">
          <name name-style="western">
            <surname>Ruiz Fernández</surname>
            <given-names>Daniel</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff1" ref-type="aff">1</xref>
          <address>
            <institution>Department of Computer Technology</institution>
            <institution>University of Alicante</institution>
            <addr-line>Carretera San Vicente del Raspeig s/n</addr-line>
            <addr-line>San Vicente del Raspeig, 03690</addr-line>
            <country>Spain</country>
            <phone>34 965 90 9656 ext 3331</phone>
            <email>druiz@dtic.ua.es</email>
          </address>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-8919-8863</ext-link>
        </contrib>
        <contrib id="contrib3" contrib-type="author">
          <name name-style="western">
            <surname>García Jaén</surname>
            <given-names>Miguel</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0002-4658-1432</ext-link>
        </contrib>
        <contrib id="contrib4" contrib-type="author">
          <name name-style="western">
            <surname>Cortell-Tormo</surname>
            <given-names>Juan M.</given-names>
          </name>
          <degrees>PhD</degrees>
          <xref rid="aff2" ref-type="aff">2</xref>
          <ext-link ext-link-type="orcid">https://orcid.org/0000-0001-7818-8806</ext-link>
        </contrib>
      </contrib-group>
      <aff id="aff1">
        <label>1</label>
        <institution>Department of Computer Technology</institution>
        <institution>University of Alicante</institution>
        <addr-line>San Vicente del Raspeig</addr-line>
        <country>Spain</country>
      </aff>
      <aff id="aff2">
        <label>2</label>
        <institution>Department of General Didactics and Specific Didactics</institution>
        <institution>University of Alicante</institution>
        <addr-line>San Vicente del Raspeig</addr-line>
        <country>Spain</country>
      </aff>
      <author-notes>
        <corresp>Corresponding Author: Daniel Ruiz Fernández <email>druiz@dtic.ua.es</email></corresp>
      </author-notes>
      <pub-date pub-type="collection">
        <year>2024</year>
      </pub-date>
      <pub-date pub-type="epub">
        <day>9</day>
        <month>8</month>
        <year>2024</year>
      </pub-date>
      <volume>12</volume>
      <elocation-id>e57097</elocation-id>
      <history>
        <date date-type="received">
          <day>5</day>
          <month>2</month>
          <year>2024</year>
        </date>
        <date date-type="rev-request">
          <day>1</day>
          <month>3</month>
          <year>2024</year>
        </date>
        <date date-type="rev-recd">
          <day>27</day>
          <month>3</month>
          <year>2024</year>
        </date>
        <date date-type="accepted">
          <day>30</day>
          <month>6</month>
          <year>2024</year>
        </date>
      </history>
      <copyright-statement>©Alberto De Ramón Fernández, Daniel Ruiz Fernández, Miguel García Jaén, Juan M. Cortell-Tormo. Originally published in JMIR Medical Informatics (https://medinform.jmir.org), 09.08.2024.</copyright-statement>
      <copyright-year>2024</copyright-year>
      <license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/">
        <p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (https://creativecommons.org/licenses/by/4.0/), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Medical Informatics, is properly cited. The complete bibliographic information, a link to the original publication on https://medinform.jmir.org/, as well as this copyright and license information must be included.</p>
      </license>
      <self-uri xlink:href="https://medinform.jmir.org/2024/1/e57097" xlink:type="simple"/>
      <abstract>
        <sec sec-type="background">
          <title>Background</title>
          <p>Activities of daily living (ADL) are essential for independence and personal well-being, reflecting an individual’s functional status. Impairment in executing these tasks can limit autonomy and negatively affect quality of life. The assessment of physical function during ADL is crucial for the prevention and rehabilitation of movement limitations. Still, its traditional evaluation based on subjective observation has limitations in precision and objectivity.</p>
        </sec>
        <sec sec-type="objective">
          <title>Objective</title>
          <p>The primary objective of this study is to use innovative technology, specifically wearable inertial sensors combined with artificial intelligence techniques, to objectively and accurately evaluate human performance in ADL. It is proposed to overcome the limitations of traditional methods by implementing systems that allow dynamic and noninvasive monitoring of movements during daily activities. The approach seeks to provide an effective tool for the early detection of dysfunctions and the personalization of treatment and rehabilitation plans, thus promoting an improvement in the quality of life of individuals.</p>
        </sec>
        <sec sec-type="methods">
          <title>Methods</title>
          <p>To monitor movements, wearable inertial sensors were developed, which include accelerometers and triaxial gyroscopes. The developed sensors were used to create a proprietary database with 6 movements related to the shoulder and 3 related to the back. We registered 53,165 activity records in the database (consisting of accelerometer and gyroscope measurements), which were reduced to 52,600 after processing to remove null or abnormal values. Finally, 4 deep learning (DL) models were created by combining various processing layers to explore different approaches in ADL recognition.</p>
        </sec>
        <sec sec-type="results">
          <title>Results</title>
          <p>The results revealed high performance of the 4 proposed models, with levels of accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score ranging between 95% and 97% for all classes and an average loss of 0.10. These results indicate the great capacity of the models to accurately identify a variety of activities, with a good balance between precision and recall. Both the convolutional and bidirectional approaches achieved slightly superior results, although the bidirectional model reached convergence in a smaller number of epochs.</p>
        </sec>
        <sec sec-type="conclusions">
          <title>Conclusions</title>
          <p>The DL models implemented have demonstrated solid performance, indicating an effective ability to identify and classify various daily activities related to the shoulder and lumbar region. These results were achieved with minimal sensorization—being noninvasive and practically imperceptible to the user—which does not affect their daily routine and promotes acceptance and adherence to continuous monitoring, thus improving the reliability of the data collected. This research has the potential to have a significant impact on the clinical evaluation and rehabilitation of patients with movement limitations, by providing an objective and advanced tool to detect key movement patterns and joint dysfunctions.</p>
        </sec>
      </abstract>
      <kwd-group>
        <kwd>activities of daily living</kwd>
        <kwd>ADL</kwd>
        <kwd>ADLs</kwd>
        <kwd>deep learning</kwd>
        <kwd>deep learning models</kwd>
        <kwd>wearable inertial sensors</kwd>
        <kwd>clinical evaluation</kwd>
        <kwd>patient’s rehabilitation</kwd>
        <kwd>rehabilitation</kwd>
        <kwd>movement</kwd>
        <kwd>accelerometers</kwd>
        <kwd>accelerometer</kwd>
        <kwd>accelerometry</kwd>
        <kwd>wearable</kwd>
        <kwd>wearables</kwd>
        <kwd>sensor</kwd>
        <kwd>sensors</kwd>
        <kwd>gyroscopes</kwd>
        <kwd>gyroscope</kwd>
        <kwd>monitor</kwd>
        <kwd>monitoring</kwd>
      </kwd-group>
    </article-meta>
  </front>
  <body>
    <sec sec-type="introduction">
      <title>Introduction</title>
      <p>Activities of daily living (ADL) are the most basic tasks of the person, as they enable them to function with a minimum of autonomy. ADL are crucial for maintaining quality of life and personal well-being, serving as indicators of functional status [<xref ref-type="bibr" rid="ref1">1</xref>-<xref ref-type="bibr" rid="ref3">3</xref>]. ADL are an indicator of a person’s functional status and include basic physical tasks such as moving, eating, dressing, maintaining personal hygiene, and grooming, as well as more complex and instrumental activities such as working, shopping, cleaning, exercising, and participating in recreational activities [<xref ref-type="bibr" rid="ref2">2</xref>-<xref ref-type="bibr" rid="ref4">4</xref>]. Impaired physical function can limit the execution of these tasks, affecting personal goals and independent living. This condition can affect the individual’s ability to achieve personal goals and maintain an independent quality of life [<xref ref-type="bibr" rid="ref2">2</xref>,<xref ref-type="bibr" rid="ref5">5</xref>,<xref ref-type="bibr" rid="ref6">6</xref>]. Therefore, it is necessary to assess this deterioration during the execution of ADL in different preventive, clinical, or rehabilitation contexts [<xref ref-type="bibr" rid="ref6">6</xref>-<xref ref-type="bibr" rid="ref8">8</xref>].</p>
      <p>The functional assessment of ADL is complex, so it is advisable to approach it based on the evaluation of fundamental movement patterns on which these ADL are developed [<xref ref-type="bibr" rid="ref9">9</xref>-<xref ref-type="bibr" rid="ref11">11</xref>]. The shoulder and lumbar region are key joint complexes in this regard. Specifically, the shoulder joint is essential in many basic ADL, providing the mobility and stability necessary to perform actions in all planes of movement. It is essential to position the hand in space in a way that allows one to reach objects, eat, button a shirt, unbutton a bra, or comb one’s hair [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref12">12</xref>-<xref ref-type="bibr" rid="ref14">14</xref>]. The movement patterns most used in its assessment are scapula-humeral elevation in the sagittal and frontal plane and rotations at different elevation angles [<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref13">13</xref>,<xref ref-type="bibr" rid="ref14">14</xref>]. Similarly, the lumbar region is a joint complex that has a close relationship with basic movement patterns such as flexion and extension of the trunk in the sagittal plane but also in extremely important actions such as sitting and standing up [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref18">18</xref>]. Various ADL derive from this fundamental movement pattern, the most studied being the gestures of sitting and getting up from a chair, bending or crouching, and lifting an object or weight [<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref17">17</xref>,<xref ref-type="bibr" rid="ref19">19</xref>].</p>
      <p>The precise evaluation, control, and monitoring of ADL performance are fundamental tasks, although not simple, in the development of effective intervention tools in these clinical and rehabilitation contexts. Traditionally, the assessment of ADL has been based on direct observation and subjective evaluation by therapists, which entails biases, errors, and lack of precision in the results [<xref ref-type="bibr" rid="ref6">6</xref>,<xref ref-type="bibr" rid="ref20">20</xref>-<xref ref-type="bibr" rid="ref22">22</xref>]. In contrast, recent advancements in technology, including wearable health monitoring devices, smart clothing sensors, and mobility assistance devices, enable the objective assessment and quantification of personal performance during ADL [<xref ref-type="bibr" rid="ref23">23</xref>-<xref ref-type="bibr" rid="ref27">27</xref>]. This technology includes wearable devices, motion sensors, and 2D or 3D motion capture systems, which allow complex movements and functionality of key joints, such as the shoulder or lumbar region, to be accurately recorded and analyzed during the performance of ADL [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref15">15</xref>]. However, limitations such as its high acquisition and implementation cost, its specialized technical knowledge, its lack of transparency and complexity, or its lack of validation and reliability hinder its applicability in the specific clinical or rehabilitation context [<xref ref-type="bibr" rid="ref4">4</xref>,<xref ref-type="bibr" rid="ref9">9</xref>,<xref ref-type="bibr" rid="ref24">24</xref>,<xref ref-type="bibr" rid="ref25">25</xref>].</p>
      <p>A promising solution to overcome the aforementioned limitations is the use of wearable inertial sensors [<xref ref-type="bibr" rid="ref28">28</xref>-<xref ref-type="bibr" rid="ref34">34</xref>]. These have been gaining substantial scientific interest due to their potential to provide real-time information on kinematic aspects of human movement through continuous, dynamic, and minimally invasive monitoring. In the clinical and rehabilitation field, this technology has emerged as a simple and low-cost alternative to obtain precise information on accelerations, angular velocities, and trajectories in the different planes of movement during the execution of different basic ADL. This technology offers several advantages. It allows for a more accurate and objective assessment of the functionality of key joint complexes, identifying specific areas of weakness or limitation in movement during ADL and providing quantitative data on the person’s progress over time [<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref35">35</xref>,<xref ref-type="bibr" rid="ref36">36</xref>]. On the other hand, it favors the motivation of patients, by being able to visualize their evolution, thus improving treatment adherence [<xref ref-type="bibr" rid="ref28">28</xref>,<xref ref-type="bibr" rid="ref31">31</xref>,<xref ref-type="bibr" rid="ref37">37</xref>].</p>
      <p>However, inertial sensors have some limitations. Despite being light and small, these devices may not be entirely transparent for users, especially due to the high number of sensors that, in many cases, must be used to obtain data that accurately interpret human movement [<xref ref-type="bibr" rid="ref30">30</xref>,<xref ref-type="bibr" rid="ref38">38</xref>,<xref ref-type="bibr" rid="ref39">39</xref>].</p>
      <p>Compared with the traditional approach of most studies that only use wearable inertial sensors to monitor kinematic aspects of human movement, the use of artificial intelligence (AI) techniques has been gaining popularity, by helping to improve the process of assessing and supervising different body movements using inertial sensors, in addition to reducing the number of sensors necessary for this [<xref ref-type="bibr" rid="ref40">40</xref>-<xref ref-type="bibr" rid="ref42">42</xref>].</p>
      <p>In Yen at al [<xref ref-type="bibr" rid="ref40">40</xref>], a wearable device consisting of a microcontroller and an inertial sensor placed on the participant’s waist is presented. The signals collected by the accelerometer and gyroscope were used to train a 1D convolutional neural network–based feature learning model, enabling the identification of 6 ADL. The results demonstrated high accuracy in both external and study data, validating the effectiveness of the proposed method.</p>
      <p>The study by Huynh-The et al [<xref ref-type="bibr" rid="ref41">41</xref>] introduces an innovative method for recognizing ADL- and sports-related activities using wearable sensors. This method involves converting inertial data into color images, facilitating the learning of highly discriminative features using convolutional neural networks. Experimental results showed recognition accuracy of over 95%, outperforming other deep learning (DL)–based approaches for human activity recognition (HAR).</p>
      <p>In Ronald et al [<xref ref-type="bibr" rid="ref43">43</xref>], a novel DL model inspired by the Inception-ResNet architecture is presented for HAR tasks. The proposed model, trained on data collected from smartphones and inertial sensors capturing accelerometer, gyroscope, magnetometer, GPS, temperature, and heart rate signals, achieved remarkable performance across different data sets, demonstrating its flexibility and adaptability to varying signal types and quantities.</p>
      <p>Meanwhile, Poulose et al [<xref ref-type="bibr" rid="ref44">44</xref>] address the challenges of HAR in health care systems by proposing an approach based on a human image threshing machine using smartphone camera images. The human image threshing system uses mask region–based convolutional neural networks for human detection and a DL model for activity classification, achieving a precision of 98.53% and surpassing conventional sensor-based HAR approaches.</p>
      <p>This study is based on the combination of accelerometer and gyroscope signals with AI techniques for the assessment of the shoulder and lumbar spine. AI algorithms can process the data captured by inertial sensors and perform sophisticated analyses to detect patterns, identify alterations in movement, and provide relevant clinical information. This facilitates a more complete and accurate evaluation of the joint movement of the shoulder or lower back, allowing a better understanding of dysfunctions and personalization of treatment and rehabilitation plans. The key contributions made by this study are summarized as follows:</p>
      <list list-type="order">
        <list-item>
          <p>Accelerometer and gyroscope signals with AI integration for enhanced ADL assessment: This combination shows great potential for the assessment of shoulder and lumbar region motion in basic ADL performance, providing an objective and advanced perspective in clinical evaluation and rehabilitation. However, validly and reliably demonstrating its use as a control and evaluation tool for ADL performance, in gestures such as eating, combing hair, dressing, sitting, or standing, still appears as an unresolved research challenge. Therefore, in this study, we aim to address the automatic detection and monitoring, using AI techniques, of the patient’s basic ADL related to the shoulder and back.</p>
        </list-item>
        <list-item>
          <p>Enhanced activity recognition precision: Our study relies on direct capture of inertial sensor signals, potentially offering a more precise and less image quality–dependent solution.</p>
        </list-item>
        <list-item>
          <p>Efficient sensors use: For signal capture, only 2 sensors are used. Furthermore, it is intended to achieve this objective through minimal, noninvasive, and practically transparent sensorization for the user, improving adherence to the monitoring process and facilitating the integration of technology into the individual’s daily life at a low cost.</p>
        </list-item>
        <list-item>
          <p>Direct inertial data approach: Our study focuses on the direct use of accelerometer and gyroscope data without requiring additional conversion for model training.</p>
        </list-item>
        <list-item>
          <p>Broad scope and versatility: It covers a wide range of activities, showcasing its versatility and adaptability.</p>
        </list-item>
      </list>
      <p>We believe that this novel approach will make a significant contribution to this field of research, as it can be used in the prevention, clinical, or rehabilitation contexts of the shoulder and lumbar region.</p>
      <p>The remainder of the paper is organized as follows: the <italic>Methods</italic> section addresses how the database was generated, the processing layers used, and the architecture of the 4 developed DL models, as well as the parameters selected for their training and optimization. In the <italic>Results</italic> section, the evaluation outcomes obtained by the 4 DL models are presented, analyzed, and compared. Finally, the <italic>Discussion</italic> section presents a discussion of the principal findings and conclusions regarding our study.</p>
    </sec>
    <sec sec-type="methods">
      <title>Methods</title>
      <sec>
        <title>Overview</title>
        <p>This research work focuses on the detection and automatic monitoring of ADL using AI models (DL models) and wearable inertial sensors to prevent or diagnose injuries, as well as supervise rehabilitation processes. <xref rid="figure1" ref-type="fig">Figure 1</xref> presents an overview of the methodology proposed. In the following subsections, each step is explained in depth.</p>
        <fig id="figure1" position="float">
          <label>Figure 1</label>
          <caption>
            <p>System overview. HDAR: Human Daily Activities Recognition ; IMU: inertial measurement unit.</p>
          </caption>
          <graphic xlink:href="medinform_v12i1e57097_fig1.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
      <sec>
        <title>Ethical Considerations</title>
        <p>This study has been conducted in strict accordance with the ethical principles outlined in the Declaration of Helsinki. Approval for this research was obtained from the Ethics Committee of the University of Alicante (protocol code UA-2023-11-16).</p>
        <p>Prior to commencement, participants provided written informed consent. Respect for participants, including their autonomy, confidentiality, and well-being, has been ensured.</p>
        <p>All collected data have undergone a rigorous anonymization process, safeguarding the privacy of the individuals involved in the research. Protective measures were implemented in accordance with institutional guidelines to ensure the security of participant information throughout the study.</p>
        <p>Participants involved in human subjects research were not provided with any form of compensation. This decision was made to uphold transparency and fairness in the research process and to minimize potential biases associated with compensation.</p>
      </sec>
      <sec>
        <title>Data Collection</title>
        <p>A total of 9 ADLs were included in the study, 6 of them related to the shoulder (eating [E], combing hair [CH], fastening the bra [FB], opening the door [OD], reaching for an object [RO], and buttoning up [BU]) and 3 related to the back (sitting [S], standing up [SU], and half squat [HS]). <xref rid="figure2" ref-type="fig">Figure 2</xref> graphically shows these movements.</p>
        <fig id="figure2" position="float">
          <label>Figure 2</label>
          <caption>
            <p>Graphic description of activities of daily living movements. In the top row (from left to right): eating, combing hair, and fastening the bra. In the middle row (from left to right): opening the door, reaching for an object, and buttoning up. Bottom row (left to right): sitting, standing up, and half squat.</p>
          </caption>
          <graphic xlink:href="medinform_v12i1e57097_fig2.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>To monitor movements, we used 2 self-developed inertial measurement unit ERGOtex model sensors [<xref ref-type="bibr" rid="ref45">45</xref>,<xref ref-type="bibr" rid="ref46">46</xref>]. This inertial measurement unit ERGOtex sensor comprises 3 triaxial accelerometers (±2 g, controlled noise at 100 µg/√Hz), triaxial gyroscopes (±1000 deg/s, sensitivity error within ±1%, and low noise level, at ±4 mdeg/s/√Hz), and magnetometers, encapsulated in a device (weight=8 g, dimensions=23×21×10 mm). The ICM-20602 MEMS MotionTracking (TDK Corp) device was selected for its high-performance specifications, critical for the reliability of the device. The incorporation of a 1K-byte FIFO buffer reduces serial bus congestion, enhancing measurement consistency and optimizing device power use. It operates at a sampling rate of 20 Hz, has an autonomy of 8 hours, and can be attached to the skin using double-sided tape or secured elsewhere using an elastic strap. These enhancements guarantee reliable response times and sensitivity levels, crucial for maintaining data accuracy (<xref rid="figure3" ref-type="fig">Figure 3</xref>).</p>
        <fig id="figure3" position="float">
          <label>Figure 3</label>
          <caption>
            <p>ERGOtex inertial measurement unit sensors were developed for movement identification.</p>
          </caption>
          <graphic xlink:href="medinform_v12i1e57097_fig3.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <p>The inertial sensors were attached to the skin over the sacrum (S1) and the distal part of the upper extremity (close to the wrist). Primarily designed for monitoring spine posture and arm, this device records acceleration data across all 3 axes. Internal integration of the acceleration signal occurs within the device, transmitting data instantly via Bluetooth (frequency=2.4 GHz) to a smartphone or tablet equipped with the preinstalled app. This application enables immediate data visualization and facilitates export to a spreadsheet in comma-separated text format (CSV).</p>
        <p>The database generated initially had 53,165 records of all activities. The records were grouped into batches of time series (of different lengths) that represented the different movements. Each record was made up of 12 attributes or numerical variables, corresponding to the value obtained by the accelerometer and gyroscope of each sensor during the execution of the movement according to its 3 axes (Acx, Acy, Acz, Gyx, Gyy, Gyz). After the processing stage, where null, missing, and abnormal values were eliminated, the database was reduced to 52,600 records (RO: n=6423, FB: n=6956, E: n=6216, OD: n=6472, SU: n=3678, CH: n=6010, BU: n=5915, HS: n=6630, and S: n=4300).</p>
      </sec>
      <sec>
        <title>DL Models</title>
        <sec>
          <title>Processing Layers</title>
          <p>To create the DL models, different processing layers that perform the transformation, regularization, feature extraction, regularization, and dependency capture operations were combined. The basics of each of them are presented below.</p>
        </sec>
        <sec>
          <title>1D Convolution Layer for Feature Extraction</title>
          <p>A 1D convolutional layer is specifically designed to process data that follows a 1D structure, such as time series or text sequences. In the case of a 1D time series, the 1D convolution operation follows a similar process as a standard convolutional layer but is performed along 1 dimension instead of 2 [<xref ref-type="bibr" rid="ref47">47</xref>]. The convolution operation is the key component of this type of layer. During the 1D convolution operation, a filter (kernel) of defined size slides along the time series, multiplying the filter values by the corresponding values in the time series and summing them to produce a single value at the output. This process is repeated for each filter position throughout the time series, thus generating a feature map that highlights relevant patterns in the data sequences. The 1D convolutional layer is essential for the automatic identification of patterns in time series, allowing efficient extraction of important features during the training process. By reducing the number of parameters and avoiding overfitting, 1D convolution helps capture the temporal structure of data and improve model performance in time series prediction or classification tasks [<xref ref-type="bibr" rid="ref48">48</xref>]. Given an input 1D time series <italic>X</italic> and a set of filters <italic>F</italic>, the convolution operation is performed as follows (equation 1):</p>
          <graphic xlink:href="medinform_v12i1e57097_fig11.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>where <italic>Y<sub>i</sub></italic> is the output value at the feature map position <italic>i</italic>, <italic>X<sub>i+j</sub></italic> is the time series value at position <italic>i+j</italic>, * denotes the convolution operation, and <italic>b<sub>i</sub></italic> is the bias associated with the output <italic>F<sub>j</sub></italic>, and <italic>m</italic> is the filter size.</p>
        </sec>
        <sec>
          <title>Long Short-Term Memory Layer for Modeling Temporal Dependencies</title>
          <p>Long short-term memory (LSTM) layers are a type of recurrent layer designed to overcome the limitations of traditional recurrent neural networks in capturing long-term dependencies in temporal sequences [<xref ref-type="bibr" rid="ref49">49</xref>]. Its design is based on the idea of using internal memory structures controlled by gates to manage information over time and make decisions about what information to retain and discard. In an LSTM, 3 main gates are introduced: the forget gate, which decides what information should be discarded from the previous memory; the input gate, which decides what new information should be stored in memory; and the output gate, which determines what memory information should be used to generate the output of the layer. These gates are controlled by activation functions and adjustable weights during training.</p>
          <p>An overview of the fundamental equations of an LSTM cell is presented below, which describe how an LSTM cell manages information and gates to process and retain relevant information over time in a temporal sequence, given one input at a time step <italic>t</italic>, denoted as <italic>x<sub>t</sub></italic>, and the outputs of the previous time step [<italic>h<sub>t</sub></italic><sub>–1</sub>] (LSTM cell output) and <italic>C<sub>t</sub></italic><sub>–1</sub> (LSTM cell state).</p>
          <p>Forget gate (<italic>f<sub>t</sub></italic>): decides what information should be discarded or forgotten from the cell state (equation 2)</p>
          <graphic xlink:href="medinform_v12i1e57097_fig12.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>Input gate (<italic>i<sub>t</sub></italic>): decide what new information to store in the cell state (equations 3 and 4)</p>
          <graphic xlink:href="medinform_v12i1e57097_fig13.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <graphic xlink:href="medinform_v12i1e57097_fig14.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>The forgotten information and new information are then combined to update the state of the cell (equation 5).</p>
          <graphic xlink:href="medinform_v12i1e57097_fig15.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>Output gate (<italic>o<sub>t</sub></italic>): finally, the final activation at the current position (<italic>h<sub>t</sub></italic>) is calculated with the output gate (<italic>o<sub>t</sub></italic>), which regulates the amount of information to be output (equation 6)</p>
          <graphic xlink:href="medinform_v12i1e57097_fig16.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <graphic xlink:href="medinform_v12i1e57097_fig17.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>Where σ is the sigmoid function; <italic>tanh</italic> is the hyperbolic tangent function; <italic>W<sub>i</sub></italic>,<italic>W<sub>C</sub></italic> and <italic>W<sub>o</sub></italic> are weight matrices that are learned during training; and <italic>b<sub>f</sub>, b<sub>i</sub>, b<sub>c</sub>,</italic> and <italic>b<sub>o</sub></italic> are biases. [<italic>h<sub>t–</sub></italic><sub>1</sub>,<italic>x<sub>t</sub></italic>] denotes the concatenation of <italic>h<sub>t–</sub></italic><sub>1</sub> and <italic>x<sub>t</sub></italic> before applying the linear operation.</p>
        </sec>
        <sec>
          <title>Dropout Regularization Layer</title>
          <p>The Dropout layer is a regularization strategy that prevents overfitting by introducing variability into the network during training [<xref ref-type="bibr" rid="ref50">50</xref>]. This technique randomly turns off a percentage of units in each iteration, temporarily removing them and forcing the network to learn more robust representations. Based on the assembly concept, it simulates the presence or absence of units, improving effectiveness and reducing dependence on specific units. In addition to its impact on generalization, the Dropout layer acts as an effective regularization mechanism, improving modeling efficiency and performance by preventing overoptimization and facilitating generalization to unseen data [<xref ref-type="bibr" rid="ref51">51</xref>,<xref ref-type="bibr" rid="ref52">52</xref>].</p>
        </sec>
        <sec>
          <title>Flatten and Fully Connected (Dense) Transformation Layers</title>
          <p>The Flatten layer aims to transform 2D or 3D data into a 1D format, allowing for a more manageable representation and facilitating the transition from convolutional layers to dense layers [<xref ref-type="bibr" rid="ref53">53</xref>]. Given a 3D input matrix where <italic>m</italic>, <italic>n</italic>, and <italic>p</italic> are the spatial dimensions, the Flatten layer converts this matrix into a 1D vector <italic>X’</italic> of size <italic>m</italic> * <italic>n * p.</italic></p>
          <p>The fully connected (FC) or Dense layer connects all neurons in 1 layer to all neurons in the next layer [<xref ref-type="bibr" rid="ref48">48</xref>]. It performs linear transformations on the data followed by nonlinear activation functions, allowing complex representations to be learned. If <italic>X</italic> is the input of the Dense layer, <italic>W</italic> is the weight matrix, and <italic>b</italic> is the bias vector, the output <italic>Y</italic> is calculated as (equation 8):</p>
          <graphic xlink:href="medinform_v12i1e57097_fig18.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>where σ is the activation function.</p>
        </sec>
        <sec>
          <title>1D MaxPooling Layer for Feature Reduction</title>
          <p>The 1D MaxPooling layer is a technique used in neural networks to reduce the spatial dimensionality of data by retaining only the maximum values in specific regions [<xref ref-type="bibr" rid="ref54">54</xref>,<xref ref-type="bibr" rid="ref55">55</xref>]. In the context of 1D time series, 1D MaxPooling is used to summarize the most relevant information and reduce the computational cost by decreasing the number of parameters in the network. Given a 1D input data set <italic>X</italic> with elements and a pooling window of size <italic>p</italic>, the output <italic>Y</italic> is calculated by taking the maximum value in each window. Mathematically, this can be expressed as (equation 9):</p>
          <graphic xlink:href="medinform_v12i1e57097_fig19.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          <p>where <italic>i</italic> is the index of the pooling window. This process is repeated until the entire length of the entry is covered.</p>
        </sec>
        <sec>
          <title>Proposed Architecture</title>
        </sec>
        <sec>
          <title>Overview</title>
          <p>The processing layers described above were combined to create 4 DL models of different complexity. Each model was designed to explore and exploit different approaches in data processing for the ADL recognition task. The architectures and distinctive features of each of these models are detailed below.</p>
        </sec>
        <sec>
          <title>Convolutional Approach</title>
          <p>The first proposed architecture uses a convolutional approach. It is composed of 3 main layers: a 1D convolutional layer, a pooling layer, and an FC layer (<xref rid="figure4" ref-type="fig">Figure 4</xref>). The convolutional layer, with 64 filters and a kernel size of 5, performs local feature extraction. Next, the pooling layer with pool size 2 is applied to reduce the dimensionality and preserve the most relevant features. Subsequently, a Flatten layer is used to convert the output into a 1D vector before connecting it to an FC layer with 128 neurons and a rectified lineal unit (ReLU) activation function. ReLU is a nonlinear activation function commonly used in neural networks to introduce nonlinearities and aid in model convergence [<xref ref-type="bibr" rid="ref56">56</xref>]. Finally, a Dropout layer with a rate of 40% is incorporated to prevent overfitting. The output layer uses the Softmax function and is designed for multiclass classification. The output layer uses the Softmax function, which is commonly used in multiclass classification tasks to compute the probabilities of each class outcome and facilitate decision-making based on the highest probability class [<xref ref-type="bibr" rid="ref57">57</xref>].</p>
          <fig id="figure4" position="float">
            <label>Figure 4</label>
            <caption>
              <p>Model architecture based on a convolutional approach.</p>
            </caption>
            <graphic xlink:href="medinform_v12i1e57097_fig4.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Deep LSTM Approach</title>
          <p>The second architecture is based on a deep LSTM networking approach. It includes 2 LSTM layers, both with 64 units, followed by a Flatten layer. Then, 2 FC layers, with 64 and 32 neurons, respectively, and ReLU activation function are incorporated. The output layer uses the Softmax function for multiclass classification (<xref rid="figure5" ref-type="fig">Figure 5</xref>). This architecture deepens into the LSTM network with multiple layers, allowing more complex temporal patterns to be learned. The complexity increases compared with the convolutional model due to the deepening of the LSTM layers and the increase in FC connections. This approach seeks to capture more elaborate temporal dependencies in time series data.</p>
          <fig id="figure5" position="float">
            <label>Figure 5</label>
            <caption>
              <p>Model architecture based on a deep LSTM approach. LSTM: long short-term memory.</p>
            </caption>
            <graphic xlink:href="medinform_v12i1e57097_fig5.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Hybrid Approach: 1D Convolutional + LSTM</title>
          <p>The third architecture adopts a hybrid approach combining convolutional layers and LSTM networks (<xref rid="figure6" ref-type="fig">Figure 6</xref>). It starts with a 1D convolutional layer with 32 filters and kernel size 3, followed by an LSTM layer with 64 units. Subsequently, a pooling layer and a Flatten layer are applied. A Dropout layer (30%) is introduced to prevent overfitting before connecting to an FC layer with 64 neurons and ReLU activation. The output layer uses Softmax for multiclass classification. This architecture seeks to take advantage of the ability of convolutional layers to extract local features and the ability of LSTMs to model long-term temporal dependencies, offering a combination of both capabilities. Its complexity lies in the integration of 2 different approaches to improve the representation and understanding of time series data.</p>
          <fig id="figure6" position="float">
            <label>Figure 6</label>
            <caption>
              <p>Model architecture based on a hybrid approach (convolutional + LSTM). LSTM: long short-term memory.</p>
            </caption>
            <graphic xlink:href="medinform_v12i1e57097_fig6.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Bidirectional LSTM Approach</title>
          <p>The fourth architecture adopts a bidirectional approach using LSTM layers (<xref rid="figure7" ref-type="fig">Figure 7</xref>). It starts with a bidirectional LSTM layer with 64 units to capture temporal patterns in both directions. Then, a Flatten layer is applied before connecting with 2 FC layers of 64 and 32 neurons, respectively, with ReLU activation function. The output layer uses Softmax for multiclass classification. This architecture represents a more sophisticated and complex model by taking advantage of the ability of bidirectional LSTMs to capture both forward and backward temporal dependencies.</p>
          <fig id="figure7" position="float">
            <label>Figure 7</label>
            <caption>
              <p>Model architecture based on a bidirectional LSTM approach. LSTM: long short-term memory.</p>
            </caption>
            <graphic xlink:href="medinform_v12i1e57097_fig7.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
        </sec>
        <sec>
          <title>Selection of Parameters for Training and Optimization of Models</title>
          <p>For a better understanding of the data and selection of the hyperparameters of the AI model, the accelerometry and gyroscope values of each movement were analyzed separately (<xref rid="figure8" ref-type="fig">Figure 8</xref>). Based on this, the temporal sequences were divided into windows of 100 records with a 10-record overlap between adjacent windows.</p>
          <fig id="figure8" position="float">
            <label>Figure 8</label>
            <caption>
              <p>Time series of eating activity: (A) accelerometer and (B) gyroscope.</p>
            </caption>
            <graphic xlink:href="medinform_v12i1e57097_fig8.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
          </fig>
          <p>Each model was trained over 150 epochs, representing a complete iteration through the training data. The model weights were updated every 1024 records (batch size), and training was stopped if the validation accuracy did not improve for 15 consecutive epochs (early stopping) to prevent overfitting.</p>
          <p>As the optimization algorithm during training, Adam was used. Its primary goal is to adjust the network’s weights and biases so that the model’s loss function is minimized. Adam enhances the standard gradient descent technique by adjusting the learning rate for each parameter individually, potentially leading to faster convergence and better model performance.</p>
          <p>Additionally, an L2 regularization term with a strength of 0.0015 was also applied to mitigate overfitting. This term controls the excessive growth of weights during training by adding a penalty term to the model’s loss function. The regularization strength determines how much large weights are penalized. By penalizing large weights, L2 regularization helps smooth out the model’s decisions and prevents it from fitting too closely to the training data.</p>
          <p>Categorical cross-entropy was used as the loss function. This function measures the discrepancy between the probability distributions predicted by the model and the actual distributions. The primary evaluation metric was accuracy, indicating the proportion of the model’s predictions in the test set that were correct.</p>
        </sec>
      </sec>
    </sec>
    <sec sec-type="results">
      <title>Results</title>
      <sec>
        <title>Evaluation Metrics</title>
        <p>The experiment was performed on a personal computer with Microsoft Windows 10, an Intel(R) Core(TM) i5-4210U CPU @ 1.70GHz-2.40 GHz, 6 GB RAM, and no GPU. All software was implemented using the Python programming language and the TensorFlow library in the Spyder development environment. After preparing the data, the DL models were trained using 70% of the data, and the remaining 30% was used to evaluate their performance. For this, popular evaluation metrics were used in classification problems, including precision, recall, <italic>F</italic><sub>1</sub>-score, and accuracy.</p>
        <p>Accuracy (equation 10) refers to the proportion of correct predictions, true positives (TPs) and true negatives (TNs) in relation to the total predictions made by the model, which include false positives (FPs) and false negatives (FNs).</p>
        <graphic xlink:href="medinform_v12i1e57097_fig20.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        <p>Precision (equation 11) represents the proportion of positive predictions that were correct. It is calculated as the number of TPs divided by the sum of TPs and FPs.</p>
        <graphic xlink:href="medinform_v12i1e57097_fig21.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        <p>Recall (equation 12) refers to the proportion of TP cases that were correctly identified by the model, calculated as the number of TPs divided by the sum of TPs and FNs.</p>
        <graphic xlink:href="medinform_v12i1e57097_fig22.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        <p><italic>F</italic><sub>1</sub>-score (equation 13) is a measure that combines precision and recall. It is calculated as the harmonic mean between precision and recall and provides a more balanced assessment of model performance, particularly useful when there is an imbalance in the class distribution in the data.</p>
        <graphic xlink:href="medinform_v12i1e57097_fig23.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
      </sec>
      <sec>
        <title>Evaluation Outcomes</title>
        <p>The obtained results show the high performance of the 4 proposed models, with accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score ranging between 95% and 97% for all cases (<xref ref-type="table" rid="table1">Table 1</xref>), while the loss function indicates an error rate of approximately 0.10 for the models. The high accuracy, precision, and recall suggest an ability to accurately identify multiple classes of activities, while the high <italic>F</italic><sub>1</sub>-score indicates a good balance between precision and recall. These results suggest that the models have effectively learned the relationships in the training, enabling them to identify patterns and generalize effectively to data they have not encountered during training, demonstrating strong and reliable predictive capabilities.</p>
        <table-wrap position="float" id="table1">
          <label>Table 1</label>
          <caption>
            <p>Evaluation metrics of the designed deep learning models.</p>
          </caption>
          <table width="1000" cellpadding="5" cellspacing="0" border="1" rules="groups" frame="hsides">
            <col width="200"/>
            <col width="200"/>
            <col width="200"/>
            <col width="200"/>
            <col width="200"/>
            <thead>
              <tr valign="top">
                <td>Models</td>
                <td>Accuracy (%)</td>
                <td>Precision (%)</td>
                <td>Recall (%)</td>
                <td><italic>F</italic><sub>1</sub>-score (%)</td>
              </tr>
            </thead>
            <tbody>
              <tr valign="top">
                <td>CNN<sup>a</sup></td>
                <td>97.11</td>
                <td>97.19</td>
                <td>97.14</td>
                <td>97.14</td>
              </tr>
              <tr valign="top">
                <td>Deep LSTM<sup>d</sup></td>
                <td>95.52</td>
                <td>95.64</td>
                <td>95.58</td>
                <td>95.54</td>
              </tr>
              <tr valign="top">
                <td>CNN+LSTM</td>
                <td>96.19</td>
                <td>96.19</td>
                <td>96.14</td>
                <td>96.14</td>
              </tr>
              <tr valign="top">
                <td>Bidirectional LSTM</td>
                <td>97.56</td>
                <td>97.51</td>
                <td>97.52</td>
                <td>97.51</td>
              </tr>
            </tbody>
          </table>
          <table-wrap-foot>
            <fn id="table1fn1">
              <p><sup>a</sup>CNN: convolutional neural network.</p>
            </fn>
            <fn id="table1fn2">
              <p><sup>b</sup>LSTM: long short-term memory.</p>
            </fn>
          </table-wrap-foot>
        </table-wrap>
        <p>When comparing different modeling approaches, it is evident that both the convolutional and bidirectional methods yield similar results across all evaluated metrics. This suggests that, despite the bidirectional approach’s inherent complexity in processing sequences in both directions, it does not offer a significant improvement over the simpler convolutional method. The convolutional model may have struck an optimal balance between learnability and generalization, enabling it to match or even surpass more complex models in terms of accuracy. However, it is worth noting that the bidirectional model achieved convergence in a smaller number of epochs (n=30; <xref rid="figure9" ref-type="fig">Figure 9</xref>), which is particularly valuable when rapid training and model responsiveness are required.</p>
        <p>It is also noteworthy that more complex models, such as deep LSTM and the hybrid approach, exhibit slightly inferior results compared with the convolutional approach. This observation may stem from several factors. First, the generalization ability of these models may be compromised due to the inherent complexity of their architectures and sensitivity to weight initialization. Additionally, the nature of the data and the suitability of different modeling approaches to capture the relevant characteristics of the time series should be considered. The activities represented in the data may benefit more from a simpler, more straightforward approach, such as convolutional, rather than more complex methods that may be prone to capturing irrelevant features or noise in the data.</p>
        <p>At the activity or class level, the confusion matrix provides a detailed breakdown of the model predictions for each class compared with the real class. Referring to the confusion matrix of the model with the best performance (<xref rid="figure10" ref-type="fig">Figure 10</xref>), it is observed that the majority of the predictions align with the main diagonal of the matrix, indicating that, for the most part, the classes are classified correctly. However, the activity of eating exhibits the most erroneous predictions, primarily being confused with the activities of opening a door and combing one’s hair. This confusion may arise due to overlapping movements and shared characteristics, such as acceleration and rotation patterns, making it challenging for the model to distinguish between them. Moreover, variations in the sequence of movements and the context in which these activities are performed may lead to different interpretations by the model. Variability in the execution of activities and differences in movements between individuals can also contribute to confusion among these classes.</p>
        <fig id="figure9" position="float">
          <label>Figure 9</label>
          <caption>
            <p>Training sessions progress over iterations.</p>
          </caption>
          <graphic xlink:href="medinform_v12i1e57097_fig9.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
        <fig id="figure10" position="float">
          <label>Figure 10</label>
          <caption>
            <p>Confusion matrix of the winning model based on the bidirectional approach: (left) standard and (right) normalized. BU: buttoning up; CH: combing hair; E: eating; FB: fastening the bra; HS: half squat; OD: opening the door; RO: reaching for an object; S: sitting SU: standing up.</p>
          </caption>
          <graphic xlink:href="medinform_v12i1e57097_fig10.png" alt-version="no" mimetype="image" position="float" xlink:type="simple"/>
        </fig>
      </sec>
    </sec>
    <sec sec-type="discussion">
      <title>Discussion</title>
      <sec>
        <title>Background</title>
        <p>ADL are fundamental tasks that enable individuals to function with a minimum of autonomy and maintain their quality of life. Precise evaluation of ADL, especially in clinical and rehabilitation contexts, is crucial for understanding individuals’ functional status and designing effective interventions. Traditionally, the assessment of ADL has relied on direct observation and subjective evaluation by therapists, which can lead to biases and errors. Innovative technology, including wearable inertial sensors and AI, offers new opportunities for objective and quantitative evaluation of ADL performance.</p>
      </sec>
      <sec>
        <title>Principal Findings</title>
        <p>This study presents an innovative initiative by combining wearable inertial sensors with AI techniques to evaluate human movement in ADL. The implemented AI models have demonstrated solid performance, exhibiting high accuracy, precision, recall, and <italic>F</italic><sub>1</sub>-score (ranging between 95% and 97%), indicating an effective ability to identify and classify a variety of daily activities related to the shoulder and lumbar region. Furthermore, these results have been achieved through minimal sensorization, which is noninvasive and practically imperceptible to the user, thus minimizing interference with their daily life. This feature is crucial as it promotes user acceptance and adherence to continuous monitoring, contributing to the reliability of the collected data.</p>
      </sec>
      <sec>
        <title>Comparison to Prior Work</title>
        <p>This study presents significant improvements in the identification and monitoring of activities of ADL compared with other existing methods. Unlike most previous approaches that primarily focus on activities involving the lumbar region (sitting, lying down, standing up, etc), our proposal allows for the precise identification of complex movements involving both the lumbar region and the shoulder. This is achieved using only 2 low-cost inertial sensors, contrasting with other solutions that require a higher degree of sensorization or bulkier devices. This minimally invasive monitoring enables individuals to perform daily activities naturally, promoting a more authentic representation of movement.</p>
        <p>The information provided by the sensors is used by DL algorithms for movement identification, without requiring additional processing. This enables immediate analysis of movement patterns during the performance of everyday activities, avoiding the delay associated with data processing needed in image-based motion capture systems, which tend to be more expensive and complex to set up and maintain.</p>
        <p>Furthermore, the use of inertial sensors offers versatility and adaptability, making them suitable for monitoring a wide range of ADL in different environments and contexts. They provide valuable information on movement patterns and functional abilities that may not be effectively captured or may be more difficult to capture by traditional 2D and 3D motion capture systems, which are more limited by factors such as image quality, potential obstructions in the line of sight between the camera and the person, or the need to use a greater number of cameras or sensors to capture all movement details.</p>
      </sec>
      <sec>
        <title>Limitations and Strengths of This Study</title>
        <p>This study demonstrates notable strengths in its methodology and approach. It uses the integration of inertial sensors and AI to improve the assessment of shoulder and lumbar motion during basic ADL performance, providing an objective and advanced perspective for clinical evaluation and rehabilitation. Although challenges persist in validating its use across various ADL gestures, such as eating or dressing, our focus on automatic detection and monitoring using AI techniques addresses this gap. Furthermore, by directly capturing inertial sensor signals and using only 2 sensors, our approach ensures enhanced activity recognition precision and efficiency. This strategy facilitates seamless integration into individuals’ daily lives at a low cost, promoting improved adherence to monitoring. Additionally, our study’s direct use of accelerometer and gyroscope data without conversion for model training emphasizes its versatility and broad scope, highlighting its adaptability across a wide range of activities.</p>
        <p>However, it is essential to acknowledge potential limitations to encourage further research and refinement. One limitation lies in the scope of activities monitored, which primarily focuses on specific muscle groups. Future research should aim to expand the scope of using AI and wearable inertial sensors beyond the assessment of shoulder and lumbar motion, broadening the range of monitored ADL. Given this limitation, it would be interesting to conduct in the future more extensive studies that encompass a broader range of ADL and other more distal body segments. For instance, investigations could explore the application of these technologies in assessing motion patterns related to limb motion (ie, elbow and wrist, or knee and ankle movements), offering valuable insights into biomechanical segmentary dynamics and enhancing our understanding of musculoskeletal movement patterns through AI approaches. Despite this limitation, the study sets a solid foundation for future endeavors in this field, showcasing its potential for advancement and application in clinical and rehabilitative settings.</p>
      </sec>
      <sec>
        <title>Conclusions</title>
        <p>This research has the potential to significantly impact the clinical evaluation and rehabilitation of patients with movement limitations, offering an objective and advanced tool to detect key movement patterns and joint dysfunctions. Such information can assist professionals in tailoring treatment plans to be more precise and personalized, addressing specific areas of weakness, and designing interventions to improve the patient’s functionality and quality of life.</p>
      </sec>
    </sec>
  </body>
  <back>
    <app-group/>
    <glossary>
      <title>Abbreviations</title>
      <def-list>
        <def-item>
          <term id="abb1">ADL</term>
          <def>
            <p>activities of daily living</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb2">AI</term>
          <def>
            <p>artificial intelligence</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb3">BU</term>
          <def>
            <p>buttoning up</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb4">CH</term>
          <def>
            <p>combing hair</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb5">DL</term>
          <def>
            <p>deep learning</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb6">E</term>
          <def>
            <p>eating</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb7">FB</term>
          <def>
            <p>fastening the bra</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb8">FC</term>
          <def>
            <p>fully connected</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb9">FN</term>
          <def>
            <p>false negative</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb10">FP</term>
          <def>
            <p>false positive</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb11">HAR</term>
          <def>
            <p>human activity recognition</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb12">HS</term>
          <def>
            <p>half squat</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb13">OD</term>
          <def>
            <p>opening the door</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb14">LSTM</term>
          <def>
            <p>long short-term memory</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb15">RO</term>
          <def>
            <p>reaching for an object</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb16">ReLU</term>
          <def>
            <p>rectified lineal unit</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb17">S</term>
          <def>
            <p>sitting</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb18">SU</term>
          <def>
            <p>standing up</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb19">TN</term>
          <def>
            <p>true negative</p>
          </def>
        </def-item>
        <def-item>
          <term id="abb20">TP</term>
          <def>
            <p>true positive</p>
          </def>
        </def-item>
      </def-list>
    </glossary>
    <ack>
      <p>This research was funded by the Valencian Innovation Agency of Spain (grant INNVA1/2020/81).</p>
    </ack>
    <notes>
      <sec>
        <title>Data Availability</title>
        <p>The datasets generated during this study are not publicly available due to ethical restrictions but are available from the corresponding author on reasonable request.</p>
      </sec>
    </notes>
    <fn-group>
      <fn fn-type="con">
        <p>DRF and JCT designed the research. JCT and MGJ collected and supervised the data. ADR analyzed the data and developed and evaluated the deep learning models. All authors drafted the manuscript. DRF and JCT critically reviewed the manuscript. DRF had primary responsibility for the final content. All authors read and approved the final manuscript.</p>
      </fn>
      <fn fn-type="conflict">
        <p>None declared.</p>
      </fn>
    </fn-group>
    <ref-list>
      <ref id="ref1">
        <label>1</label>
        <nlm-citation citation-type="book">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Edemekong</surname>
              <given-names>PF</given-names>
            </name>
            <name name-style="western">
              <surname>Bomgaars</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Sukumaran</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Levy</surname>
              <given-names>SB</given-names>
            </name>
          </person-group>
          <source>Activities of Daily Living</source>
          <year>2019</year>
          <publisher-loc>Treasure Island, FL</publisher-loc>
          <publisher-name>StatPearls Publishing LLC</publisher-name>
        </nlm-citation>
      </ref>
      <ref id="ref2">
        <label>2</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Merrilees</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Activities of daily living</article-title>
          <source>Encyclopedia of the Neurological Sciences</source>
          <year>2014</year>
          <pub-id pub-id-type="medline">29261878</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref3">
        <label>3</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Katz</surname>
              <given-names>S</given-names>
            </name>
          </person-group>
          <article-title>Assessing self-maintenance: activities of daily living, mobility, and instrumental activities of daily living</article-title>
          <source>J Am Geriatr Soc</source>
          <year>1983</year>
          <volume>31</volume>
          <issue>12</issue>
          <fpage>721</fpage>
          <lpage>727</lpage>
          <pub-id pub-id-type="doi">10.1111/j.1532-5415.1983.tb03391.x</pub-id>
          <pub-id pub-id-type="medline">6418786</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref4">
        <label>4</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kang</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Kang</surname>
              <given-names>SJ</given-names>
            </name>
          </person-group>
          <article-title>A smart device for non-invasive ADL estimation through multi-environmental sensor fusion</article-title>
          <source>Sci Rep</source>
          <year>2023</year>
          <volume>13</volume>
          <issue>1</issue>
          <fpage>17246</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1038/s41598-023-44436-5"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41598-023-44436-5</pub-id>
          <pub-id pub-id-type="medline">37821665</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41598-023-44436-5</pub-id>
          <pub-id pub-id-type="pmcid">PMC10567750</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref5">
        <label>5</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Chan</surname>
              <given-names>CS</given-names>
            </name>
            <name name-style="western">
              <surname>Slaughter</surname>
              <given-names>SE</given-names>
            </name>
            <name name-style="western">
              <surname>Jones</surname>
              <given-names>CA</given-names>
            </name>
            <name name-style="western">
              <surname>Wagg</surname>
              <given-names>AS</given-names>
            </name>
          </person-group>
          <article-title>Greater independence in activities of daily living is associated with higher health-related quality of life scores in nursing home residents with dementia</article-title>
          <source>Healthcare (Basel)</source>
          <year>2015</year>
          <volume>3</volume>
          <issue>3</issue>
          <fpage>503</fpage>
          <lpage>518</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=healthcare3030503"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/healthcare3030503</pub-id>
          <pub-id pub-id-type="medline">27417776</pub-id>
          <pub-id pub-id-type="pii">healthcare3030503</pub-id>
          <pub-id pub-id-type="pmcid">PMC4939554</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref6">
        <label>6</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Giebel</surname>
              <given-names>CM</given-names>
            </name>
            <name name-style="western">
              <surname>Sutcliffe</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Challis</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Activities of daily living and quality of life across different stages of dementia: a UK study</article-title>
          <source>Aging Ment Health</source>
          <year>2015</year>
          <volume>19</volume>
          <issue>1</issue>
          <fpage>63</fpage>
          <lpage>71</lpage>
          <pub-id pub-id-type="doi">10.1080/13607863.2014.915920</pub-id>
          <pub-id pub-id-type="medline">24831511</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref7">
        <label>7</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Herero</surname>
              <given-names>VG</given-names>
            </name>
            <name name-style="western">
              <surname>Extremera</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Daily life activities as mediators of the relationship between personality variables and subjective well-being among older adults</article-title>
          <source>Pers Individ Differ</source>
          <year>2010</year>
          <volume>49</volume>
          <issue>2</issue>
          <fpage>124</fpage>
          <lpage>129</lpage>
          <pub-id pub-id-type="doi">10.1016/j.paid.2010.03.019</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref8">
        <label>8</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Osborne</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Rizzo</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Chapter 106 - neurorehabilitation</article-title>
          <source>Neurol Clin Neurosci</source>
          <year>2007</year>
          <fpage>1423</fpage>
          <lpage>1432</lpage>
          <pub-id pub-id-type="doi">10.1016/b978-0-323-03354-1.50110-3</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref9">
        <label>9</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Klemt</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Prinold</surname>
              <given-names>JA</given-names>
            </name>
            <name name-style="western">
              <surname>Morgans</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Smith</surname>
              <given-names>SHL</given-names>
            </name>
            <name name-style="western">
              <surname>Nolte</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Reilly</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Bull</surname>
              <given-names>AMJ</given-names>
            </name>
          </person-group>
          <article-title>Analysis of shoulder compressive and shear forces during functional activities of daily life</article-title>
          <source>Clin Biomech (Bristol, Avon)</source>
          <year>2018</year>
          <volume>54</volume>
          <fpage>34</fpage>
          <lpage>41</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S0268-0033(18)30214-6"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.clinbiomech.2018.03.006</pub-id>
          <pub-id pub-id-type="medline">29550641</pub-id>
          <pub-id pub-id-type="pii">S0268-0033(18)30214-6</pub-id>
          <pub-id pub-id-type="pmcid">PMC6405441</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref10">
        <label>10</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Vaisy</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Gizzi</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Petzke</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Consmüller</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Pfingsten</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Falla</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Measurement of lumbar spine functional movement in low back pain</article-title>
          <source>Clin J Pain</source>
          <year>2015</year>
          <volume>31</volume>
          <issue>10</issue>
          <fpage>876</fpage>
          <lpage>885</lpage>
          <pub-id pub-id-type="doi">10.1097/AJP.0000000000000190</pub-id>
          <pub-id pub-id-type="medline">25503596</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref11">
        <label>11</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kaljić</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Pašalić</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Katana</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Mačak Hadžiomerović</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Bojičić</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Jaganjac</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Salkić</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Influence of motion therapy on daily life activities of people with lumbar pain syndrome</article-title>
          <source>J Health Sci</source>
          <year>2022</year>
          <volume>12</volume>
          <fpage>213</fpage>
          <lpage>222</lpage>
          <pub-id pub-id-type="doi">10.17532/jhsci.2022.1975</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref12">
        <label>12</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Poppen</surname>
              <given-names>N K</given-names>
            </name>
            <name name-style="western">
              <surname>Walker</surname>
              <given-names>P S</given-names>
            </name>
          </person-group>
          <article-title>Normal and abnormal motion of the shoulder</article-title>
          <source>J Bone Joint Surg Am</source>
          <year>1976</year>
          <month>03</month>
          <volume>58</volume>
          <issue>2</issue>
          <fpage>195</fpage>
          <lpage>201</lpage>
          <pub-id pub-id-type="medline">1254624</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref13">
        <label>13</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Magda</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Cáceres</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Doctoral Thesis</article-title>
          <source>University of Valencia</source>
          <year>2019</year>
          <access-date>2024-07-26</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="http://hdl.handle.net/10251/133994">http://hdl.handle.net/10251/133994</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref14">
        <label>14</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Michiels</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Grevenstein</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Kinematics of shoulder abduction in the scapular plane. on the influence of abduction velocity and external load</article-title>
          <source>Clin Biomech (Bristol, Avon)</source>
          <year>1995</year>
          <volume>10</volume>
          <issue>3</issue>
          <fpage>137</fpage>
          <lpage>143</lpage>
          <pub-id pub-id-type="doi">10.1016/0268-0033(95)93703-v</pub-id>
          <pub-id pub-id-type="medline">11415544</pub-id>
          <pub-id pub-id-type="pii">026800339593703V</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref15">
        <label>15</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sánchez-Zuriaga</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>López-Pascual</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Garrido-Jaén</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>de Moya</surname>
              <given-names>MFP</given-names>
            </name>
            <name name-style="western">
              <surname>Prat-Pastor</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Reliability and validity of a new objective tool for low back pain functional assessment</article-title>
          <source>Spine (Phila Pa 1976)</source>
          <year>2011</year>
          <volume>36</volume>
          <issue>16</issue>
          <fpage>1279</fpage>
          <lpage>1288</lpage>
          <pub-id pub-id-type="doi">10.1097/BRS.0b013e3181f471d8</pub-id>
          <pub-id pub-id-type="medline">21240051</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref16">
        <label>16</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Artacho</surname>
              <given-names>PCA</given-names>
            </name>
            <name name-style="western">
              <surname>Andrea</surname>
              <given-names>C</given-names>
            </name>
          </person-group>
          <article-title>Biomechanical assessment of the spine based on functional analysis of various activities of daily living</article-title>
          <source>University of Valencia</source>
          <year>2018</year>
          <access-date>2018-12-14</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://roderic.uv.es/handle/10550/68281#.ZA1QDVfRrrs.mendeley">https://roderic.uv.es/handle/10550/68281#.ZA1QDVfRrrs.mendeley</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref17">
        <label>17</label>
        <nlm-citation citation-type="web">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Fuster Ortí</surname>
              <given-names>MA</given-names>
            </name>
          </person-group>
          <article-title>Effects of a manual spinal traction technique on the lumbo-pelvic movement pattern and activation of the erector spinae during trunk flexion-extension in patients with low back pain</article-title>
          <source>University of Valencia</source>
          <year>2021</year>
          <access-date>2024-07-09</access-date>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://dialnet.unirioja.es/servlet/tesis?codigo=311244">https://dialnet.unirioja.es/servlet/tesis?codigo=311244</ext-link>
          </comment>
        </nlm-citation>
      </ref>
      <ref id="ref18">
        <label>18</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lehman</surname>
              <given-names>GJ</given-names>
            </name>
          </person-group>
          <article-title>Biomechanical assessments of lumbar spinal function. how low back pain sufferers differ from normals. implications for outcome measures research. part i: kinematic assessments of lumbar function</article-title>
          <source>J Manipulative Physiol Ther</source>
          <year>2004</year>
          <volume>27</volume>
          <issue>1</issue>
          <fpage>57</fpage>
          <lpage>62</lpage>
          <pub-id pub-id-type="doi">10.1016/j.jmpt.2003.11.007</pub-id>
          <pub-id pub-id-type="medline">14739876</pub-id>
          <pub-id pub-id-type="pii">S0161475403001866</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref19">
        <label>19</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Arguisuelas</surname>
              <given-names>MD</given-names>
            </name>
            <name name-style="western">
              <surname>Lisón</surname>
              <given-names>JF</given-names>
            </name>
            <name name-style="western">
              <surname>Doménech-Fernández</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Martínez-Hurtado</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Salvador Coloma</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Sánchez-Zuriaga</surname>
              <given-names>D</given-names>
            </name>
          </person-group>
          <article-title>Effects of myofascial release in erector spinae myoelectric activity and lumbar spine kinematics in non-specific chronic low back pain: randomized controlled trial</article-title>
          <source>Clin Biomech (Bristol, Avon)</source>
          <year>2019</year>
          <volume>63</volume>
          <fpage>27</fpage>
          <lpage>33</lpage>
          <pub-id pub-id-type="doi">10.1016/j.clinbiomech.2019.02.009</pub-id>
          <pub-id pub-id-type="medline">30784788</pub-id>
          <pub-id pub-id-type="pii">S0268-0033(18)30033-0</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref20">
        <label>20</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Katz</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ford</surname>
              <given-names>AB</given-names>
            </name>
            <name name-style="western">
              <surname>Moskowitz</surname>
              <given-names>RB</given-names>
            </name>
            <name name-style="western">
              <surname>Jackson</surname>
              <given-names>BA</given-names>
            </name>
            <name name-style="western">
              <surname>Jaffe</surname>
              <given-names>MW</given-names>
            </name>
          </person-group>
          <article-title>Studies of illness in the aged: the index of ADL: a standardized measure of biological and psychosocial function</article-title>
          <source>JAMA J Am Med Assoc</source>
          <year>1963</year>
          <volume>185</volume>
          <fpage>914</fpage>
          <lpage>919</lpage>
          <pub-id pub-id-type="doi">10.1001/jama.1963.03060120024016</pub-id>
          <pub-id pub-id-type="medline">14044222</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref21">
        <label>21</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Morris</surname>
              <given-names>JN</given-names>
            </name>
            <name name-style="western">
              <surname>Fries</surname>
              <given-names>BE</given-names>
            </name>
            <name name-style="western">
              <surname>Morris</surname>
              <given-names>SA</given-names>
            </name>
          </person-group>
          <article-title>Scaling ADLs within the MDS</article-title>
          <source>J Gerontol A Biol Sci Med Sci</source>
          <year>1999</year>
          <volume>54</volume>
          <issue>11</issue>
          <fpage>M546</fpage>
          <lpage>M553</lpage>
          <pub-id pub-id-type="doi">10.1093/gerona/54.11.m546</pub-id>
          <pub-id pub-id-type="medline">10619316</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref22">
        <label>22</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Graf</surname>
              <given-names>C</given-names>
            </name>
            <collab>Hartford Institute for Geriatric Nursing</collab>
          </person-group>
          <article-title>The Lawton instrumental activities of daily living (IADL) scale</article-title>
          <source>Medsurg Nurs</source>
          <year>2008</year>
          <volume>17</volume>
          <issue>5</issue>
          <fpage>343</fpage>
          <lpage>344</lpage>
          <pub-id pub-id-type="medline">19051984</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref23">
        <label>23</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Sánchez-Zuriaga</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Artacho-Pérez</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Biviá-Roig</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Lumbopelvic flexibility modulates neuromuscular responses during trunk flexion-extension</article-title>
          <source>J Electromyogr Kinesiol</source>
          <year>2016</year>
          <volume>28</volume>
          <fpage>152</fpage>
          <lpage>157</lpage>
          <pub-id pub-id-type="doi">10.1016/j.jelekin.2016.04.007</pub-id>
          <pub-id pub-id-type="medline">27155332</pub-id>
          <pub-id pub-id-type="pii">S1050-6411(16)30028-1</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref24">
        <label>24</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Pashmdarfard</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Azad</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Assessment tools to evaluate activities of daily living (ADL) and instrumental activities of daily living (IADL) in older adults: a systematic review</article-title>
          <source>Med J Islam Repub Iran</source>
          <year>2020</year>
          <volume>34</volume>
          <fpage>33</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/32617272"/>
          </comment>
          <pub-id pub-id-type="doi">10.34171/mjiri.34.33</pub-id>
          <pub-id pub-id-type="medline">32617272</pub-id>
          <pub-id pub-id-type="pmcid">PMC7320974</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref25">
        <label>25</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jekel</surname>
              <given-names>K</given-names>
            </name>
            <name name-style="western">
              <surname>Damian</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Storf</surname>
              <given-names>H</given-names>
            </name>
            <name name-style="western">
              <surname>Hausner</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Frölich</surname>
              <given-names>L</given-names>
            </name>
          </person-group>
          <article-title>Development of a proxy-free objective assessment tool of instrumental activities of daily living in mild cognitive impairment using smart home technologies</article-title>
          <source>J Alzheimers Dis</source>
          <year>2016</year>
          <volume>52</volume>
          <issue>2</issue>
          <fpage>509</fpage>
          <lpage>517</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/27031479"/>
          </comment>
          <pub-id pub-id-type="doi">10.3233/JAD-151054</pub-id>
          <pub-id pub-id-type="medline">27031479</pub-id>
          <pub-id pub-id-type="pii">JAD151054</pub-id>
          <pub-id pub-id-type="pmcid">PMC4927882</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref26">
        <label>26</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Amaral Gomes</surname>
              <given-names>ES</given-names>
            </name>
            <name name-style="western">
              <surname>Ramsey</surname>
              <given-names>KA</given-names>
            </name>
            <name name-style="western">
              <surname>Rojer</surname>
              <given-names>AGM</given-names>
            </name>
            <name name-style="western">
              <surname>Reijnierse</surname>
              <given-names>EM</given-names>
            </name>
            <name name-style="western">
              <surname>Maier</surname>
              <given-names>AB</given-names>
            </name>
          </person-group>
          <article-title>The association of objectively measured physical activity and sedentary behavior with (instrumental) activities of daily living in community-dwelling older adults: a systematic review</article-title>
          <source>Clin Interv Aging</source>
          <year>2021</year>
          <volume>16</volume>
          <fpage>1877</fpage>
          <lpage>1915</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/34737555"/>
          </comment>
          <pub-id pub-id-type="doi">10.2147/CIA.S326686</pub-id>
          <pub-id pub-id-type="medline">34737555</pub-id>
          <pub-id pub-id-type="pii">326686</pub-id>
          <pub-id pub-id-type="pmcid">PMC8560073</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref27">
        <label>27</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Goverover</surname>
              <given-names>Y</given-names>
            </name>
            <name name-style="western">
              <surname>Kalmar</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Gaudino-Goering</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Shawaryn</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Moore</surname>
              <given-names>NB</given-names>
            </name>
            <name name-style="western">
              <surname>Halper</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>DeLuca</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>The relation between subjective and objective measures of everyday life activities in persons with multiple sclerosis</article-title>
          <source>Arch Phys Med Rehabil</source>
          <year>2005</year>
          <volume>86</volume>
          <issue>12</issue>
          <fpage>2303</fpage>
          <lpage>2308</lpage>
          <pub-id pub-id-type="doi">10.1016/j.apmr.2005.05.016</pub-id>
          <pub-id pub-id-type="medline">16344027</pub-id>
          <pub-id pub-id-type="pii">S0003-9993(05)00522-8</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref28">
        <label>28</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Bonato</surname>
              <given-names>P</given-names>
            </name>
          </person-group>
          <article-title>Advances in wearable technology and applications in physical medicine and rehabilitation</article-title>
          <source>J Neuroeng Rehabil</source>
          <year>2005</year>
          <volume>2</volume>
          <issue>1</issue>
          <fpage>2</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://jneuroengrehab.biomedcentral.com/articles/10.1186/1743-0003-2-2"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/1743-0003-2-2</pub-id>
          <pub-id pub-id-type="medline">15733322</pub-id>
          <pub-id pub-id-type="pii">1743-0003-2-2</pub-id>
          <pub-id pub-id-type="pmcid">PMC552335</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref29">
        <label>29</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Campbell</surname>
              <given-names>AS</given-names>
            </name>
            <name name-style="western">
              <surname>de Ávila</surname>
              <given-names>BEF</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Wearable biosensors for healthcare monitoring</article-title>
          <source>Nat Biotechnol</source>
          <year>2019</year>
          <volume>37</volume>
          <issue>4</issue>
          <fpage>389</fpage>
          <lpage>406</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30804534"/>
          </comment>
          <pub-id pub-id-type="doi">10.1038/s41587-019-0045-y</pub-id>
          <pub-id pub-id-type="medline">30804534</pub-id>
          <pub-id pub-id-type="pii">10.1038/s41587-019-0045-y</pub-id>
          <pub-id pub-id-type="pmcid">PMC8183422</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref30">
        <label>30</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rodgers</surname>
              <given-names>MM</given-names>
            </name>
            <name name-style="western">
              <surname>Alon</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Pai</surname>
              <given-names>VM</given-names>
            </name>
            <name name-style="western">
              <surname>Conroy</surname>
              <given-names>RS</given-names>
            </name>
          </person-group>
          <article-title>Wearable technologies for active living and rehabilitation: current research challenges and future opportunities</article-title>
          <source>J Rehabil Assist Technol Eng</source>
          <year>2019</year>
          <volume>6</volume>
          <fpage>2055668319839607</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://journals.sagepub.com/doi/abs/10.1177/2055668319839607?url_ver=Z39.88-2003&amp;rfr_id=ori:rid:crossref.org&amp;rfr_dat=cr_pub  0pubmed"/>
          </comment>
          <pub-id pub-id-type="doi">10.1177/2055668319839607</pub-id>
          <pub-id pub-id-type="medline">31245033</pub-id>
          <pub-id pub-id-type="pii">10.1177_2055668319839607</pub-id>
          <pub-id pub-id-type="pmcid">PMC6582279</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref31">
        <label>31</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lang</surname>
              <given-names>CE</given-names>
            </name>
            <name name-style="western">
              <surname>Barth</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Holleran</surname>
              <given-names>CL</given-names>
            </name>
            <name name-style="western">
              <surname>Konrad</surname>
              <given-names>JD</given-names>
            </name>
            <name name-style="western">
              <surname>Bland</surname>
              <given-names>MD</given-names>
            </name>
          </person-group>
          <article-title>Implementation of wearable sensing technology for movement: pushing forward into the routine physical rehabilitation care field</article-title>
          <source>Sensors (Basel)</source>
          <year>2020</year>
          <volume>20</volume>
          <issue>20</issue>
          <fpage>5744</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s20205744"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s20205744</pub-id>
          <pub-id pub-id-type="medline">33050368</pub-id>
          <pub-id pub-id-type="pii">s20205744</pub-id>
          <pub-id pub-id-type="pmcid">PMC7601835</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref32">
        <label>32</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Porciuncula</surname>
              <given-names>F</given-names>
            </name>
            <name name-style="western">
              <surname>Roto</surname>
              <given-names>AV</given-names>
            </name>
            <name name-style="western">
              <surname>Kumar</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Davis</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Roy</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Walsh</surname>
              <given-names>CJ</given-names>
            </name>
            <name name-style="western">
              <surname>Awad</surname>
              <given-names>LN</given-names>
            </name>
          </person-group>
          <article-title>Wearable movement sensors for rehabilitation: a focused review of technological and clinical advances</article-title>
          <source>PM R</source>
          <year>2018</year>
          <volume>10</volume>
          <issue>9 Suppl 2</issue>
          <fpage>S220</fpage>
          <lpage>S232</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://europepmc.org/abstract/MED/30269807"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.pmrj.2018.06.013</pub-id>
          <pub-id pub-id-type="medline">30269807</pub-id>
          <pub-id pub-id-type="pii">S1934-1482(18)30363-0</pub-id>
          <pub-id pub-id-type="pmcid">PMC6700726</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref33">
        <label>33</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jalloul</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Wearable sensors for the monitoring of movement disorders</article-title>
          <source>Biomed J</source>
          <year>2018</year>
          <volume>41</volume>
          <issue>4</issue>
          <fpage>249</fpage>
          <lpage>253</lpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://linkinghub.elsevier.com/retrieve/pii/S2319-4170(17)30408-0"/>
          </comment>
          <pub-id pub-id-type="doi">10.1016/j.bj.2018.06.003</pub-id>
          <pub-id pub-id-type="medline">30348268</pub-id>
          <pub-id pub-id-type="pii">S2319-4170(17)30408-0</pub-id>
          <pub-id pub-id-type="pmcid">PMC6198019</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref34">
        <label>34</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Wu</surname>
              <given-names>W</given-names>
            </name>
            <name name-style="western">
              <surname>Dasgupta</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Ramirez</surname>
              <given-names>EE</given-names>
            </name>
            <name name-style="western">
              <surname>Peterson</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Norman</surname>
              <given-names>GJ</given-names>
            </name>
          </person-group>
          <article-title>Classification accuracies of physical activities using smartphone motion sensors</article-title>
          <source>J Med Internet Res</source>
          <year>2012</year>
          <volume>14</volume>
          <issue>5</issue>
          <fpage>e130</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.jmir.org/2012/5/e130/"/>
          </comment>
          <pub-id pub-id-type="doi">10.2196/jmir.2208</pub-id>
          <pub-id pub-id-type="medline">23041431</pub-id>
          <pub-id pub-id-type="pii">v14i5e130</pub-id>
          <pub-id pub-id-type="pmcid">PMC3510774</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref35">
        <label>35</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Rast</surname>
              <given-names>FM</given-names>
            </name>
            <name name-style="western">
              <surname>Labruyère</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Systematic review on the application of wearable inertial sensors to quantify everyday life motor activity in people with mobility impairments</article-title>
          <source>J Neuroeng Rehabil</source>
          <year>2020</year>
          <volume>17</volume>
          <issue>1</issue>
          <fpage>148</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://jneuroengrehab.biomedcentral.com/articles/10.1186/s12984-020-00779-y"/>
          </comment>
          <pub-id pub-id-type="doi">10.1186/s12984-020-00779-y</pub-id>
          <pub-id pub-id-type="medline">33148315</pub-id>
          <pub-id pub-id-type="pii">10.1186/s12984-020-00779-y</pub-id>
          <pub-id pub-id-type="pmcid">PMC7640711</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref36">
        <label>36</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kristoffersson</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Lindén</surname>
              <given-names>M</given-names>
            </name>
          </person-group>
          <article-title>A systematic review of wearable sensors for monitoring physical activity</article-title>
          <source>Sensors (Basel)</source>
          <year>2022</year>
          <volume>22</volume>
          <issue>2</issue>
          <fpage>573</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s22020573"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s22020573</pub-id>
          <pub-id pub-id-type="medline">35062531</pub-id>
          <pub-id pub-id-type="pii">s22020573</pub-id>
          <pub-id pub-id-type="pmcid">PMC8778538</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref37">
        <label>37</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Camomilla</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Bergamini</surname>
              <given-names>E</given-names>
            </name>
            <name name-style="western">
              <surname>Fantozzi</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Vannozzi</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Trends supporting the in-field use of wearable inertial sensors for sport performance evaluation: a systematic review</article-title>
          <source>Sensors (Basel)</source>
          <year>2018</year>
          <volume>18</volume>
          <issue>3</issue>
          <fpage>873</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s18030873"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s18030873</pub-id>
          <pub-id pub-id-type="medline">29543747</pub-id>
          <pub-id pub-id-type="pii">s18030873</pub-id>
          <pub-id pub-id-type="pmcid">PMC5877384</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref38">
        <label>38</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Picerno</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Iosa</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>D'Souza</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Benedetti</surname>
              <given-names>MG</given-names>
            </name>
            <name name-style="western">
              <surname>Paolucci</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Morone</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Wearable inertial sensors for human movement analysis: a five-year update</article-title>
          <source>Expert Rev Med Devices</source>
          <year>2021</year>
          <volume>18</volume>
          <issue>sup1</issue>
          <fpage>79</fpage>
          <lpage>94</lpage>
          <pub-id pub-id-type="doi">10.1080/17434440.2021.1988849</pub-id>
          <pub-id pub-id-type="medline">34601995</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref39">
        <label>39</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Iosa</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Picerno</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Paolucci</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Morone</surname>
              <given-names>G</given-names>
            </name>
          </person-group>
          <article-title>Wearable inertial sensors for human movement analysis</article-title>
          <source>Expert Rev Med Devices</source>
          <year>2016</year>
          <volume>13</volume>
          <issue>7</issue>
          <fpage>641</fpage>
          <lpage>659</lpage>
          <pub-id pub-id-type="doi">10.1080/17434440.2016.1198694</pub-id>
          <pub-id pub-id-type="medline">27309490</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref40">
        <label>40</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Yen</surname>
              <given-names>CT</given-names>
            </name>
            <name name-style="western">
              <surname>Liao</surname>
              <given-names>JX</given-names>
            </name>
            <name name-style="western">
              <surname>Huang</surname>
              <given-names>YK</given-names>
            </name>
          </person-group>
          <article-title>Human daily activity recognition performed using wearable inertial sensors combined with deep learning algorithms</article-title>
          <source>IEEE Access</source>
          <year>2020</year>
          <volume>8</volume>
          <fpage>174105</fpage>
          <lpage>174114</lpage>
          <pub-id pub-id-type="doi">10.1109/access.2020.3025938</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref41">
        <label>41</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Huynh-The</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Hua</surname>
              <given-names>CH</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>DS</given-names>
            </name>
          </person-group>
          <article-title>Visualizing inertial data for wearable sensor based daily life activity recognition using convolutional neural network</article-title>
          <source>Annu Int Conf IEEE Eng Med Biol Soc</source>
          <year>2019</year>
          <volume>2019</volume>
          <fpage>2478</fpage>
          <lpage>2481</lpage>
          <pub-id pub-id-type="doi">10.1109/EMBC.2019.8857366</pub-id>
          <pub-id pub-id-type="medline">31946400</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref42">
        <label>42</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Mustafa</surname>
              <given-names>Z</given-names>
            </name>
          </person-group>
          <article-title>A study of machine learning techniques based on human daily living activities via inertial sensors</article-title>
          <year>2023</year>
          <conf-name>International Conference on IT Innovation and Knowledge Discovery, ITIKD</conf-name>
          <conf-date>08 March 2023</conf-date>
          <conf-loc>Manama, Bahrain</conf-loc>
          <pub-id pub-id-type="doi">10.1109/itikd56332.2023.10099820</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref43">
        <label>43</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Ronald</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Poulose</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>DS</given-names>
            </name>
          </person-group>
          <article-title>iSPLInception: an Inception-ResNet deep learning architecture for human activity recognition</article-title>
          <source>IEEE Access</source>
          <year>2021</year>
          <volume>9</volume>
          <fpage>68985</fpage>
          <lpage>69001</lpage>
          <pub-id pub-id-type="doi">10.1109/access.2021.3078184</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref44">
        <label>44</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Poulose</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kim</surname>
              <given-names>JH</given-names>
            </name>
            <name name-style="western">
              <surname>Han</surname>
              <given-names>DS</given-names>
            </name>
          </person-group>
          <article-title>HIT HAR: human image threshing machine for human activity recognition using deep learning models</article-title>
          <source>Comput Intell Neurosci</source>
          <year>2022</year>
          <volume>2022</volume>
          <fpage>1808990</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://doi.org/10.1155/2022/1808990"/>
          </comment>
          <pub-id pub-id-type="doi">10.1155/2022/1808990</pub-id>
          <pub-id pub-id-type="medline">36248917</pub-id>
          <pub-id pub-id-type="pmcid">PMC9560851</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref45">
        <label>45</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>García-Luna</surname>
              <given-names>MA</given-names>
            </name>
            <name name-style="western">
              <surname>Jimenez-Olmedo</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Pueo</surname>
              <given-names>B</given-names>
            </name>
            <name name-style="western">
              <surname>Manchado</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Cortell-Tormo</surname>
              <given-names>JM</given-names>
            </name>
          </person-group>
          <article-title>Concurrent validity of the ergotex device for measuring low back posture</article-title>
          <source>Bioengineering (Basel)</source>
          <year>2024</year>
          <volume>11</volume>
          <issue>1</issue>
          <fpage>98</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=bioengineering11010098"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/bioengineering11010098</pub-id>
          <pub-id pub-id-type="medline">38275578</pub-id>
          <pub-id pub-id-type="pii">bioengineering11010098</pub-id>
          <pub-id pub-id-type="pmcid">PMC10812927</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref46">
        <label>46</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jimenez-Olmedo</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Tortosa-Martínez</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Cortell-Tormo</surname>
              <given-names>JM</given-names>
            </name>
            <name name-style="western">
              <surname>Pueo</surname>
              <given-names>B</given-names>
            </name>
          </person-group>
          <article-title>Assessing the validity of the ergotex IMU in joint angle measurement: a comparative study with optical tracking systems</article-title>
          <source>Sensors (Basel)</source>
          <year>2024</year>
          <volume>24</volume>
          <issue>6</issue>
          <fpage>1903</fpage>
          <comment>
            <ext-link ext-link-type="uri" xlink:type="simple" xlink:href="https://www.mdpi.com/resolver?pii=s24061903"/>
          </comment>
          <pub-id pub-id-type="doi">10.3390/s24061903</pub-id>
          <pub-id pub-id-type="medline">38544165</pub-id>
          <pub-id pub-id-type="pii">s24061903</pub-id>
          <pub-id pub-id-type="pmcid">PMC10974527</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref47">
        <label>47</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Kiranyaz</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Avci</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Abdeljaber</surname>
              <given-names>O</given-names>
            </name>
            <name name-style="western">
              <surname>Ince</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Gabbouj</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Inman</surname>
              <given-names>DJ</given-names>
            </name>
          </person-group>
          <article-title>1D convolutional neural networks and applications: a survey</article-title>
          <source>Mech Syst Signal Process</source>
          <year>2021</year>
          <volume>151</volume>
          <fpage>107398</fpage>
          <pub-id pub-id-type="doi">10.1016/j.ymssp.2020.107398</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref48">
        <label>48</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Heaton</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Ian goodfellow, yoshua bengio, and aaron courville: deep learning</article-title>
          <source>Genet Program Evolvable Mach</source>
          <year>2018</year>
          <volume>19</volume>
          <issue>1-2</issue>
          <fpage>305</fpage>
          <lpage>307</lpage>
          <pub-id pub-id-type="doi">10.1007/s10710-017-9314-z</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref49">
        <label>49</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Hochreiter</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Schmidhuber</surname>
              <given-names>J</given-names>
            </name>
          </person-group>
          <article-title>Long short-term memory</article-title>
          <source>Neural Comput</source>
          <year>1997</year>
          <volume>9</volume>
          <issue>8</issue>
          <fpage>1735</fpage>
          <lpage>1780</lpage>
          <pub-id pub-id-type="doi">10.1162/neco.1997.9.8.1735</pub-id>
          <pub-id pub-id-type="medline">9377276</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref50">
        <label>50</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Park</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Kwak</surname>
              <given-names>N</given-names>
            </name>
          </person-group>
          <article-title>Analysis on the dropout effect in convolutional neural networks</article-title>
          <source>Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics)</source>
          <year>2017</year>
          <conf-name>Asian Conference on Computer Vision</conf-name>
          <conf-date>10 March 2017</conf-date>
          <conf-loc>Springer, Cham</conf-loc>
          <fpage>189</fpage>
          <lpage>204</lpage>
          <pub-id pub-id-type="doi">10.1007/978-3-319-54184-6_12</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref51">
        <label>51</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Srivastava</surname>
              <given-names>N</given-names>
            </name>
            <name name-style="western">
              <surname>Hinton</surname>
              <given-names>G</given-names>
            </name>
            <name name-style="western">
              <surname>Krizhevsky</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Sutskever</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Salakhutdinov</surname>
              <given-names>R</given-names>
            </name>
          </person-group>
          <article-title>Dropout: a simple way to prevent neural networks from overfitting</article-title>
          <source>JMLR</source>
          <year>2014</year>
          <volume>15</volume>
          <issue>56</issue>
          <fpage>1929</fpage>
          <lpage>1958</lpage>
          <pub-id pub-id-type="doi">10.5555/2627435.2670313</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref52">
        <label>52</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Salehin</surname>
              <given-names>I</given-names>
            </name>
            <name name-style="western">
              <surname>Kang</surname>
              <given-names>DK</given-names>
            </name>
          </person-group>
          <article-title>A review on dropout regularization approaches for deep neural networks within the scholarly domain</article-title>
          <source>Electronics (Switzerland)</source>
          <year>2023</year>
          <volume>12</volume>
          <issue>14</issue>
          <fpage>3106</fpage>
          <pub-id pub-id-type="doi">10.3390/electronics12143106</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref53">
        <label>53</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Jin</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Dundar</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Culurciello</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>Flattened convolutional neural networks for feedforward acceleration</article-title>
          <year>2015</year>
          <conf-name>3rd International Conference on Learning Representations, ICLR  - Workshop Track Proceedings</conf-name>
          <conf-date>20 November 2015</conf-date>
          <conf-loc>USA</conf-loc>
        </nlm-citation>
      </ref>
      <ref id="ref54">
        <label>54</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Christlein</surname>
              <given-names>V</given-names>
            </name>
            <name name-style="western">
              <surname>Spranger</surname>
              <given-names>L</given-names>
            </name>
            <name name-style="western">
              <surname>Seuret</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Nicolaou</surname>
              <given-names>A</given-names>
            </name>
            <name name-style="western">
              <surname>Kral</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Maier</surname>
              <given-names>A</given-names>
            </name>
          </person-group>
          <article-title>Deep generalized max pooling</article-title>
          <year>2019</year>
          <conf-name>Proceedings of the International Conference on Document Analysis and Recognition, ICDAR</conf-name>
          <conf-date>26 February 2024</conf-date>
          <conf-loc>Australia</conf-loc>
          <pub-id pub-id-type="doi">10.1109/icdar.2019.00177</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref55">
        <label>55</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Lee</surname>
              <given-names>CY</given-names>
            </name>
            <name name-style="western">
              <surname>Gallagher</surname>
              <given-names>P</given-names>
            </name>
            <name name-style="western">
              <surname>Tu</surname>
              <given-names>Z</given-names>
            </name>
          </person-group>
          <article-title>Generalizing pooling functions in CNNs: mixed, gated, and tree</article-title>
          <source>IEEE Trans Pattern Anal Mach Intell</source>
          <year>2018</year>
          <volume>40</volume>
          <issue>4</issue>
          <fpage>863</fpage>
          <lpage>875</lpage>
          <pub-id pub-id-type="doi">10.1109/TPAMI.2017.2703082</pub-id>
          <pub-id pub-id-type="medline">28504932</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref56">
        <label>56</label>
        <nlm-citation citation-type="confproc">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Banerjee</surname>
              <given-names>C</given-names>
            </name>
            <name name-style="western">
              <surname>Mukherjee</surname>
              <given-names>T</given-names>
            </name>
            <name name-style="western">
              <surname>Pasiliao</surname>
              <given-names>E</given-names>
            </name>
          </person-group>
          <article-title>An empirical study on generalizations of the RelU activation function</article-title>
          <year>2019</year>
          <conf-name>ACMSE</conf-name>
          <conf-date>18 April 2019</conf-date>
          <conf-loc>New York, NY</conf-loc>
          <pub-id pub-id-type="doi">10.1145/3299815.3314450</pub-id>
        </nlm-citation>
      </ref>
      <ref id="ref57">
        <label>57</label>
        <nlm-citation citation-type="journal">
          <person-group person-group-type="author">
            <name name-style="western">
              <surname>Zhu</surname>
              <given-names>D</given-names>
            </name>
            <name name-style="western">
              <surname>Lu</surname>
              <given-names>S</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>M</given-names>
            </name>
            <name name-style="western">
              <surname>Lin</surname>
              <given-names>J</given-names>
            </name>
            <name name-style="western">
              <surname>Wang</surname>
              <given-names>Z</given-names>
            </name>
          </person-group>
          <article-title>Efficient precision-adjustable architecture for softmax function in deep learning</article-title>
          <source>IEEE Trans Circuits Syst II Express Briefs</source>
          <year>2020</year>
          <volume>67</volume>
          <issue>12</issue>
          <fpage>3382</fpage>
          <lpage>3386</lpage>
          <pub-id pub-id-type="doi">10.1109/tcsii.2020.3002564</pub-id>
        </nlm-citation>
      </ref>
    </ref-list>
  </back>
</article>
