<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE article PUBLIC "-//NLM//DTD Journal Publishing DTD v2.0 20040830//EN" "journalpublishing.dtd"><article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="2.0" xml:lang="en" article-type="research-article"><front><journal-meta><journal-id journal-id-type="nlm-ta">JMIR Nursing</journal-id><journal-id journal-id-type="publisher-id">nursing</journal-id><journal-id journal-id-type="index">33</journal-id><journal-title>JMIR Nursing</journal-title><abbrev-journal-title>JMIR Nursing</abbrev-journal-title><issn pub-type="epub">2562-7600</issn><publisher><publisher-name>JMIR Publications</publisher-name><publisher-loc>Toronto, Canada</publisher-loc></publisher></journal-meta><article-meta><article-id pub-id-type="publisher-id">v7i1e58094</article-id><article-id pub-id-type="doi">10.2196/58094</article-id><article-categories><subj-group subj-group-type="heading"><subject>Original Paper</subject></subj-group></article-categories><title-group><article-title>Unobtrusive Nighttime Movement Monitoring to Support Nursing Home Continence Care: Algorithm Development and Validation Study</article-title></title-group><contrib-group><contrib contrib-type="author" corresp="yes"><name name-style="western"><surname>Strauven</surname><given-names>Hannelore</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Wang</surname><given-names>Chunzhuo</given-names></name><degrees>PhD</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Hallez</surname><given-names>Hans</given-names></name><degrees>Prof Dr</degrees><xref ref-type="aff" rid="aff2">2</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Vanden Abeele</surname><given-names>Vero</given-names></name><degrees>Prof Dr</degrees><xref ref-type="aff" rid="aff3">3</xref></contrib><contrib contrib-type="author"><name name-style="western"><surname>Vanrumste</surname><given-names>Bart</given-names></name><degrees>Prof Dr</degrees><xref ref-type="aff" rid="aff1">1</xref></contrib></contrib-group><aff id="aff1"><institution>e-Media Research Lab/STADIUS, Department of Electrical Engineering, KU Leuven</institution><addr-line>Andreas Vesaliusstraat 13</addr-line><addr-line>Leuven</addr-line><country>Belgium</country></aff><aff id="aff2"><institution>Research Group M-Group/DistriNet, Department of Computer Science, KU Leuven</institution><addr-line>Brugge</addr-line><country>Belgium</country></aff><aff id="aff3"><institution>e-Media Research Lab/Human-Computer Interaction, Department of Computer Science, KU Leuven</institution><addr-line>Leuven</addr-line><country>Belgium</country></aff><contrib-group><contrib contrib-type="editor"><name name-style="western"><surname>Borycki</surname><given-names>Elizabeth</given-names></name></contrib></contrib-group><contrib-group><contrib contrib-type="reviewer"><name name-style="western"><surname>Wagg</surname><given-names>Adrian</given-names></name></contrib><contrib contrib-type="reviewer"><name name-style="western"><surname>Mao</surname><given-names>Siqi</given-names></name></contrib></contrib-group><author-notes><corresp>Correspondence to Hannelore Strauven, PhD, e-Media Research Lab/STADIUS, Department of Electrical Engineering, KU Leuven, Andreas Vesaliusstraat 13, Leuven, 3000, Belgium, +32 16377662; <email>hannelore.strauven@kuleuven.be</email></corresp></author-notes><pub-date pub-type="collection"><year>2024</year></pub-date><pub-date pub-type="epub"><day>24</day><month>12</month><year>2024</year></pub-date><volume>7</volume><elocation-id>e58094</elocation-id><history><date date-type="received"><day>06</day><month>03</month><year>2024</year></date><date date-type="rev-recd"><day>09</day><month>09</month><year>2024</year></date><date date-type="accepted"><day>23</day><month>09</month><year>2024</year></date></history><copyright-statement>&#x00A9; Hannelore Strauven, Chunzhuo Wang, Hans Hallez, Vero Vanden Abeele, Bart Vanrumste. Originally published in JMIR Nursing (<ext-link ext-link-type="uri" xlink:href="https://nursing.jmir.org">https://nursing.jmir.org</ext-link>), 24.12.2024. </copyright-statement><copyright-year>2024</copyright-year><license license-type="open-access" xlink:href="https://creativecommons.org/licenses/by/4.0/"><p>This is an open-access article distributed under the terms of the Creative Commons Attribution License (<ext-link ext-link-type="uri" xlink:href="https://creativecommons.org/licenses/by/4.0/">https://creativecommons.org/licenses/by/4.0/</ext-link>), which permits unrestricted use, distribution, and reproduction in any medium, provided the original work, first published in JMIR Nursing, is properly cited. The complete bibliographic information, a link to the original publication on <ext-link ext-link-type="uri" xlink:href="https://nursing.jmir.org/">https://nursing.jmir.org/</ext-link>, as well as this copyright and license information must be included.</p></license><self-uri xlink:type="simple" xlink:href="https://nursing.jmir.org/2024/1/e58094"/><abstract><sec><title>Background</title><p>The rising prevalence of urinary incontinence (UI) among older adults, particularly those living in nursing homes (NHs), underscores the need for innovative continence care solutions. The implementation of an unobtrusive sensor system may support nighttime monitoring of NH residents&#x2019; movements and, more specifically, the agitation possibly associated with voiding events.</p></sec><sec><title>Objective</title><p>This study aims to explore the application of an unobtrusive sensor system to monitor nighttime movement, integrated into a care bed with accelerometer sensors connected to a pressure-redistributing care mattress.</p></sec><sec sec-type="methods"><title>Methods</title><p>A total of 6 participants followed a 7-step protocol. The obtained dataset was segmented into 20-second windows with a 50% overlap. Each window was labeled with 1 of the 4 chosen activity classes: in bed, agitation, turn, and out of bed. A total of 1416 features were selected and analyzed with an XGBoost algorithm. At last, the model was validated using leave one subject out cross-validation (LOSOCV).</p></sec><sec sec-type="results"><title>Results</title><p>The trained model attained a trustworthy overall <italic>F</italic><sub>1</sub>-score of 79.56% for all classes and, more specifically, an <italic>F</italic><sub>1</sub>-score of 79.67% for the class &#x201C;Agitation.&#x201D;</p></sec><sec sec-type="conclusions"><title>Conclusions</title><p>The results from this study provide promising insights in unobtrusive nighttime movement monitoring. The study underscores the potential to enhance the quality of care for NH residents through a machine learning model based on data from accelerometers connected to a viscoelastic care mattress, thereby driving progress in the field of continence care and artificial intelligence&#x2013;supported health care for older adults.</p></sec></abstract><kwd-group><kwd>nursing home</kwd><kwd>agitation</kwd><kwd>incontinence</kwd><kwd>accelerometer</kwd><kwd>unobtrusive</kwd><kwd>enuresis</kwd><kwd>sensor technology</kwd></kwd-group></article-meta></front><body><sec id="s1" sec-type="intro"><title>Introduction</title><sec id="s1-1"><title>Background</title><p>With the increase in life expectancy, there is a corresponding rise in the prevalence of urinary incontinence (UI), a common health problem among older adults [<xref ref-type="bibr" rid="ref1">1</xref>]. Studies have indicated that UI affects over 50% of older adults residing in nursing homes (NHs) [<xref ref-type="bibr" rid="ref2">2</xref>-<xref ref-type="bibr" rid="ref4">4</xref>]. Current care practices for managing UI in NHs involve incontinence wear (ie, disposable absorbent products), with or without scheduled toilet visits (voiding) [<xref ref-type="bibr" rid="ref5">5</xref>]. Unfortunately, these practices often lead to redundant checks and delayed interventions, thereby triggering undesirable consequences, such as disturbed sleep patterns [<xref ref-type="bibr" rid="ref6">6</xref>]. Despite its significant impact for residents&#x2019; health and overall quality of life, along with the increased burden it places on care personnel and noteworthy financial implications, UI remains underdiagnosed and its management underreported [<xref ref-type="bibr" rid="ref7">7</xref>].</p><p>In a recent scoping review conducted by Omotunde and Wagg [<xref ref-type="bibr" rid="ref8">8</xref>], authors found encouraging outcomes regarding technology-driven continence care with a majority of solutions incorporating sensor technology integrated into body-worn disposable absorbent products (colloquially referred to as &#x201C;smart diapers&#x201D;). These smart diapers offer feedback regarding saturation levels and are capable of identifying instances of leakage [<xref ref-type="bibr" rid="ref9">9</xref>]. Complementary software apps are used to collate these data and generate insightful information and reminders intended for care personnel.</p><p>The introduction of technology-based continence care within NHs, facilitated by the use of smart diapers, holds the potential to monitor voiding processes and allows for timely product changes [<xref ref-type="bibr" rid="ref10">10</xref>]. By embracing such advancements, NHs could potentially improve the quality of care delivered to residents with UI.</p><p>The technology, however, comes with significant expenses, with an initial installation and training cost up to US $3300, combined with an additional charge of up to US $3.50 per diaper for the incontinence wear [<xref ref-type="bibr" rid="ref11">11</xref>]. Consequently, smart diapers are mainly used for a short period of time (ie, 3 days) to establish a personalized continence care plan of the resident [<xref ref-type="bibr" rid="ref10">10</xref>,<xref ref-type="bibr" rid="ref12">12</xref>]. Furthermore, the use of smart diapers is intrinsically linked to the use of continence wear, limiting the ability to monitor UI patterns exclusively during periods when these absorbent products are worn. This contrasts with the research findings of Ostaszkiewicz et al [<xref ref-type="bibr" rid="ref13">13</xref>], which emphasized the urgent need for independent resources, for example, technology, to inform decision-making regarding continence wear.</p><p>Moreover, previous research [<xref ref-type="bibr" rid="ref14">14</xref>] underlined the significance of developing technology solutions that exhibit sensitivity toward issues of intimacy, stigma, and taboo inherent in continence care to preserve the NH residents&#x2019; dignity and overall quality of life. In light of this perspective, the design of a monitoring system for NHs should prioritize unobtrusiveness, discreteness, and compatibility with appropriate care equipment.</p><p>Prior research has explored monitoring nighttime movement and identifying sleep-related disorders or sleep stages via the use of unobtrusive sensor systems equipped with accelerometer or pressure sensors, connected to beds [<xref ref-type="bibr" rid="ref15">15</xref>-<xref ref-type="bibr" rid="ref18">18</xref>]. However, only a limited number of researchers have directed the focus of nighttime movement monitoring with accelerometer sensors connected to the bed toward the exploration of detecting nighttime movement to support NH continence care. These few studies [<xref ref-type="bibr" rid="ref19">19</xref>-<xref ref-type="bibr" rid="ref21">21</xref>] are listed in <xref ref-type="table" rid="table1">Table 1</xref> and are further detailed in the section <italic>Prior Work</italic>.</p><table-wrap id="t1" position="float"><label>Table 1.</label><caption><p>Overview of unobtrusive accelerometer sensor systems evaluated to monitor nighttime agitation with a relation to continence care, summarizing the number of participants (p) and location of the study setup, the sensor position on the mattress, and the algorithm deployed for data analysis.</p></caption><table id="table1" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom">Authors and study</td><td align="left" valign="bottom">Study setup</td><td align="left" valign="bottom">Sensor position (mattress)</td><td align="left" valign="bottom">Algorithm</td></tr></thead><tbody><tr><td align="left" valign="top">Gong et al [<xref ref-type="bibr" rid="ref19">19</xref>]</td><td align="left" valign="top">12 p at home</td><td align="left" valign="top">Top and bottom + wristbands</td><td align="left" valign="top">Cole&#x2018;s actigraphy [<xref ref-type="bibr" rid="ref22">22</xref>] and STFT<sup><xref ref-type="table-fn" rid="table1fn1">a</xref></sup> [<xref ref-type="bibr" rid="ref23">23</xref>]</td></tr><tr><td align="left" valign="top">T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref20">20</xref>]</td><td align="left" valign="top">4 p at home</td><td align="left" valign="top">Top</td><td align="left" valign="top">CNN<sup><xref ref-type="table-fn" rid="table1fn2">b</xref></sup></td></tr><tr><td align="left" valign="top">T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref21">21</xref>]</td><td align="left" valign="top">1 p in lab</td><td align="left" valign="top">Bottom</td><td align="left" valign="top">FFT<sup><xref ref-type="table-fn" rid="table1fn3">c</xref></sup> and CNN</td></tr></tbody></table><table-wrap-foot><fn id="table1fn1"><p><sup>a</sup>STFT: short-time Fourier transform.</p></fn><fn id="table1fn2"><p><sup>b</sup>CNN: convolutional neural network.</p></fn><fn id="table1fn3"><p><sup>c</sup>FFT: fast Fourier transform.</p></fn></table-wrap-foot></table-wrap></sec><sec id="s1-2"><title>Prior Work</title><p>Gong et al [<xref ref-type="bibr" rid="ref19">19</xref>] monitored nighttime movement and incontinence in patients with Alzheimer disease. Their study encompassed 12 participants in a home environment. Wetness events were monitored via the wireless bed-wetting alarm system DryBuddy [<xref ref-type="bibr" rid="ref24">24</xref>]. The system used two triaxial accelerometer sensors, positioned on the upper and lower sides of the mattress. They applied Cole&#x2019;s actigraphy algorithm [<xref ref-type="bibr" rid="ref22">22</xref>] on the sensor data to estimate wake and sleep periods. Two additional accelerometer sensors [<xref ref-type="bibr" rid="ref25">25</xref>] were strapped to both wrists of participants to monitor hand movements. For the nighttime sleep agitation assessment, they calculated a short-time Fourier transform [<xref ref-type="bibr" rid="ref23">23</xref>], based on a combined dataset from the bed sensors data and wrists&#x2019; nodes data, to indicate agitation.</p><p>The authors established that almost half (49%) of the sleep agitation events occurred before a voiding event, supporting the observation that a need to void can trigger agitation. However, authors did not provide evaluation metrics for the used algorithm, nor differentiated multiple nighttime activities.</p><p>In another study, T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref20">20</xref>] deployed a smartphone-integrated triaxial accelerometer, which was placed in 4 different positions on the mattress. Their study encompassed 4 participants within a home setting, using a convolutional neural network (CNN) approach for nighttime activity tracking (ie, none, sit down, lay down, sit up, and stand up). When including all sensor positions in the model, an accuracy (ie, the ratio of correct predictions to the total number of predictions [<xref ref-type="bibr" rid="ref26">26</xref>]) of 92% was reached. Unfortunately, authors did not further elaborate on these different positions.</p><p>In a subsequent study of T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref21">21</xref>], the smartphone was substituted with an triaxial accelerometer sensor, positioned on the bottom surface of the mattress. This evaluation was conducted with one participant within a laboratory environment, and activity tracking (ie, none, in bed, out of bed, changing position, and agitation) was accomplished via a fast Fourier transform (FFT) model in addition to a CNN-based model. For the model that combines FFT and CNN, an accuracy of 88.96% was achieved, showing the applicability of unobtrusive monitoring of nighttime movement via accelerometer sensors.</p><p>Both studies yield outcomes that suggest promise for the detection of agitation and, hence, monitoring nighttime movement. However, Gong et al&#x2019;s [<xref ref-type="bibr" rid="ref19">19</xref>] system design involved the use of nodes strapped on the participants&#x2019; wrists, which could be perceived as obtrusive. Furthermore, their study did not include classification metrics to evaluate the used algorithm, hindering a meaningful comparison with alternative system designs.</p><p>Conversely, T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref21">21</xref>] prioritize an unobtrusive system design in their research, incorporating thorough evaluation metrics for their developed algorithms. Consequently, in their subsequent study, they substitute the smartphone app with a mattress-attached sensor. However, this modified setup is only validated using data from a single participant.</p><p>Both studies were conducted in a home environment or explicitly specified the use of a standard bed and mattress, without considering the pressure-redistributing features of care mattresses for care beds. Such a care mattress is composed of a temperature-sensitive cell structure that softens by the heat from an individual&#x2019;s body and molds around the body to distribute pressure efficiently [<xref ref-type="bibr" rid="ref27">27</xref>]. This means that the individual&#x2019;s weight can spread over a much wider area compared with a conventional mattress. Such a care mattress is frequently used in NHs, as it reduces the risk of pressure ulcers and, thus, is recommended for use among individuals at high risk of developing pressure ulcers [<xref ref-type="bibr" rid="ref28">28</xref>-<xref ref-type="bibr" rid="ref30">30</xref>].</p></sec><sec id="s1-3"><title>Goal of This Study</title><p>In this study, we extend the investigation of unobtrusive monitoring with accelerometer sensors positioned on the bottom surface of a pressure-redistributing care mattress. This exploration aims to monitor nighttime movement and detect large body movements, a symptom of nocturnal agitation [<xref ref-type="bibr" rid="ref31">31</xref>], simulated by 6 adult participants in an experimental setup. Notably, our approach incorporates a care bed with a viscoelastic mattress as used in NH settings, for the purpose of tracking 4 activities: in bed, turn, agitation, and out of bed. Through this methodology, we endeavor to enhance the understanding of the potential of using artificial intelligence tools in advancing the field of NH continence care.</p></sec></sec><sec id="s2" sec-type="methods"><title>Methods</title><sec id="s2-1"><title>Movement Monitoring System</title><p>The movement of the participants was monitored using the Byteflies Kit [<xref ref-type="bibr" rid="ref32">32</xref>], a medically certified motion monitoring device, with sensor dots. A single sensor dot can record triaxial accelerometer and triaxial gyroscope signals, sampled at 100 Hz. Sensor dots can last 24 hours and are charged via a docking station.</p><p>In this measurement, the researchers opted to attach two sensor dots to the bottom side of the mattress on an NH care bed (<xref ref-type="fig" rid="figure1">Figure 1</xref>) on the left and right side. The placement of the dots on the mattress aligned with the positioning in a previous study [<xref ref-type="bibr" rid="ref33">33</xref>]. If a participant lay down on the care bed, the sensor dots were located beneath their back.</p><p>The pressure-redistributing care mattress is a Tempur-Med viscoelastic mattress with a width of 14 cm, as commonly used in NHs to reduce pressure ulcers [<xref ref-type="bibr" rid="ref27">27</xref>].</p><fig position="float" id="figure1"><label>Figure 1.</label><caption><p>Photograph of the movement monitoring system attached to the bottom side of the mattress on a nursing home care bed. The position of the two Byteflies sensor dots is circled by a dotted line, with dot 1 positioned on top and dot 2 on the bottom.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="nursing_v7i1e58094_fig01.png"/></fig></sec><sec id="s2-2"><title>Recruitment</title><p>The data acquisition was carried out during the COVID-19 pandemic, from October 2020 until December 2020. NH residents are a frail population and were greatly affected by the adverse health effects of the pandemic. Therefore, we recruited university colleagues who were allowed to travel to campus. Ethical approval to conduct the research was obtained from the KU Leuven Social and Societal Ethics Committee with protocol number G-2020&#x2010;2214. Safety measures as mandated by the national government were applied at all times. Inclusion criteria for participants were individuals 18 years or older, living in Belgium, and being able to participate independently, understanding the purpose and involvement and providing consent. In total, 6 colleagues volunteered their time with a mean age of 29 (SD=4) years, a mean height of 177 (SD=9) cm, and a mean weight of 74 (SD=20) kg. Among the participants, 2 were female, and 4 were male.</p></sec><sec id="s2-3"><title>Measurement Protocol</title><p>The participants were instructed to follow a 7-step protocol outlined in <xref ref-type="fig" rid="figure2">Figure 2</xref> to simulate nighttime movement, including (nocturnal) agitation. To start with step 1, the participants entered the care bed on their back, lying down for 60 seconds. Subsequently, for step 2, they turned onto their left side and waited for 30 seconds. This sequence continued with step 3, involving a return to their back and a 30-second wait, followed by step 4, requiring a turn to their right side with another 30-second interval. In step 5, participants engaged in large body movements for 30 seconds, with the execution left to the participants&#x2019; interpretation, because the authors could not find a standardized definition or duration for nocturnal agitation in older adults correlated to incontinence. Step 5 was succeeded by step 6, involving lying on their back for 60 seconds. Finally, step 7 required participants to leave the bed for 60 seconds before repeating the entire protocol. Each participant completed the protocol 5 times. A Garmin Venue SQ smartwatch [<xref ref-type="bibr" rid="ref34">34</xref>] guided the participants through the protocol, providing vibration notifications to prompt transitions between steps.</p><fig position="float" id="figure2"><label>Figure 2.</label><caption><p>Illustration of the measurement protocol followed by the participants to monitor their movement.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="nursing_v7i1e58094_fig02.png"/></fig></sec><sec id="s2-4"><title>Data Collection and Analysis</title><p>First, relevant settings and hyperparameters to start the data analysis and training process were tuned on the basis of the obtained results. After an exploration to identify the optimal settings, the following configurations were selected.</p><sec id="s2-4-1"><title>Signal Preprocessing and Annotation</title><p><xref ref-type="fig" rid="figure3">Figure 3</xref> displays the unfiltered triaxial accelerometer data gathered from the 2 Byteflies dots for participant 2. These data were annotated based on the protocol&#x2019;s time interval. In this preprocessing, the data were also scaled to the unit variance and band-pass filtered. Starting from the filter settings initially outlined by Razjouyan et al [<xref ref-type="bibr" rid="ref35">35</xref>] and further fine-tuned for our dataset, a fourth-order Butterworth filter was applied with cutoff frequencies at 2 Hz and 10 Hz. At last, the data were categorized into 4 activity classes that are relevant for monitoring with regard to NH (continence) care management:</p><list list-type="order"><list-item><p>In bed: The bed is occupied, and the participant lies either on their back (steps 1, 3, and 6), left side (step 2), or right side (step 4). For NH residents, activity within this class is considered regular nighttime behavior.</p></list-item><list-item><p>Agitation: The bed is occupied, and the participant acts agitated by moving their arms and legs (step 5). For NH residents who need continence care, this agitated movement can be triggered by a voiding event.</p></list-item><list-item><p>Turn: The bed is occupied, and the participant transitions from the current step in the protocol to the following step (eg, from turning on the left side in step 2 to turning on the back in step 3). On each occasion in the measurement protocol, the last 5 seconds of the current step and the first 5 seconds of the following step are categorized as a turn. For NH residents, turning is considered to be an effective way of preventing pressure ulcers [<xref ref-type="bibr" rid="ref36">36</xref>].</p></list-item><list-item><p>Out of bed (unoccupied): The bed is unoccupied, as the participant left the bed. This is regular daytime behavior for NH residents.</p></list-item></list><fig position="float" id="figure3"><label>Figure 3.</label><caption><p>The unfiltered triaxial acceleration (Ax, Ay, and Az) in gravity (g) in time of two Byteflies dots for participant 2 for a complete measurement protocol. Beneath this sensor data, the signal is categorized into 4 activity classes.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="nursing_v7i1e58094_fig03.png"/></fig></sec><sec id="s2-4-2"><title>Sliding Window</title><p>A sliding window was applied on the dataset and the selected window size was 20 seconds with 50% overlapping for a sampling frequency of 100 Hz. If there were multiple classes present within one window, the window was annotated as the majority class.</p></sec><sec id="s2-4-3"><title>Feature Extraction and Selection</title><p>To extract features from the segmented time series dataset, the Time Series Feature Extraction Library Python package was selected [<xref ref-type="bibr" rid="ref37">37</xref>]. Time Series Feature Extraction Library is an automated process of feature extraction, designed to accelerate the time consuming and complex exploratory analysis of multidimensional time series. The library computes over 60 different parameters across temporal, statistical, and spectral domains. Out of the computed set of 4668 features, a refined subset of 1416 features was derived after eliminating correlated and zero-variance features. Subsequently, this selected subset was scaled to unit variance.</p></sec><sec id="s2-4-4"><title>Machine Learning Algorithm</title><p>On the basis of the selected features and obtained results in the training process, the scalable end-to-end tree extreme gradient boosting system, XGBoost, was used to train the model for the classification task at hand [<xref ref-type="bibr" rid="ref38">38</xref>]. It is an open-source Python package that implements gradient boosting and tree learning paralleling, effective in applications with limited data and in human activity recognition, including older adults [<xref ref-type="bibr" rid="ref38">38</xref>-<xref ref-type="bibr" rid="ref41">41</xref>]. The hyperparameters of the XGBoost algorithm were optimally adapted to obtain a robust and accurate model. SHAP (Shapley additive explanations) TreeExplainer was used as the explanation method for the model&#x2019;s output, providing fast local explanations with guaranteed consistency [<xref ref-type="bibr" rid="ref42">42</xref>].</p></sec><sec id="s2-4-5"><title>Model Training and Evaluation</title><p>To evaluate the model, leave one subject out cross-validation (LOSOCV) was used. This statistical technique divides the original dataset into a training and validation set, alternating between them in successive rounds and ensuring each data point undergoes validation [<xref ref-type="bibr" rid="ref43">43</xref>]. Gholamiangonabadi et al [<xref ref-type="bibr" rid="ref44">44</xref>] demonstrated that LOSOCV serves as a rigid criterion for evaluation models of times series accelerometer data in human activity recognition.</p><p>In this study, the process involved 6 iterations and, for each iteration, data from 1 out of the 6 participants was left out as the validation set to train the XGBoost classification model.</p></sec></sec><sec id="s2-5"><title>Ethical Considerations</title><p>Ethical approval to conduct the research was obtained from the KU Leuven Social and Societal Ethics Committee with protocol number G-2020&#x2010;2214. All participants were invited to participate voluntarily and received verbal and written information about the study in advance. Each participant signed an informed consent form and was assigned a unique identifier for data processing. The first author kept the names and unique identifiers separately from the obtained study data.</p></sec></sec><sec id="s3" sec-type="results"><title>Results</title><sec id="s3-1"><title>Algorithm Performance</title><p>Upon acquisition of data from all participants, the data were processed to assess the effectiveness of the XGBoost model for the task at hand. The validation outcomes are represented in <xref ref-type="table" rid="table2">Tables 2</xref> and <xref ref-type="table" rid="table3">3</xref>, with the results for the total dataset from two Byteflies dots at the top and the results split per Byteflies dot at the bottom. In total, the dataset encompasses 898 windows, of which 481 windows (53.56%) were attributed to the class &#x201C;In bed,&#x201D; 60 windows (6.68%) to &#x201C;Agitation,&#x201D; 207 windows (23.05%) to &#x201C;Turn,&#x201D; and 150 windows (16.70%) to &#x201C;Out of bed.&#x201D; Because the number of windows per class is not proportional, the dataset can be considered imbalanced.</p><table-wrap id="t2" position="float"><label>Table 2.</label><caption><p>Overview of the distribution of windows (n and %) per class for the total dataset from two Byteflies dots and the classification metrics precision, recall, and <italic>F</italic><sub>1</sub>-score (%) per class for the leave one subject out cross-validation of the XGBoost model.</p></caption><table id="table2" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">In bed</td><td align="left" valign="bottom">Agitation</td><td align="left" valign="bottom">Turn</td><td align="left" valign="bottom">Out of bed</td></tr></thead><tbody><tr><td align="left" valign="bottom">Windows, n (%)</td><td align="left" valign="bottom">481 (53.6)</td><td align="left" valign="bottom">60 (6.7)</td><td align="left" valign="bottom">207 (23)</td><td align="left" valign="bottom">150 (16.7)</td></tr><tr><td align="left" valign="bottom">Precision (%)</td><td align="left" valign="bottom">84.32</td><td align="left" valign="bottom">77.78</td><td align="left" valign="bottom">69.67</td><td align="left" valign="bottom">78.95</td></tr><tr><td align="left" valign="bottom">Recall (%)</td><td align="left" valign="bottom">86.07</td><td align="left" valign="bottom">81.67</td><td align="left" valign="bottom">71.01</td><td align="left" valign="bottom">70</td></tr><tr><td align="left" valign="bottom"><italic>F</italic><sub>1</sub>-score (%)</td><td align="left" valign="bottom">85.19</td><td align="left" valign="bottom">79.67</td><td align="left" valign="bottom">70.33</td><td align="left" valign="bottom">74.24</td></tr></tbody></table></table-wrap><table-wrap id="t3" position="float"><label>Table 3.</label><caption><p>Overview of the classification metrics precision, recall, and F1-score (%) per Byteflies dot (d1 and d2) for the leave one subject out cross-validation of the XGBoost model.</p></caption><table id="table3" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom" colspan="2">In bed</td><td align="left" valign="bottom" colspan="2">Agitation</td><td align="left" valign="bottom" colspan="2">Turn</td><td align="left" valign="bottom" colspan="2">Out of bed</td></tr><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">d1</td><td align="left" valign="bottom">d2</td><td align="left" valign="bottom">d1</td><td align="left" valign="bottom">d2</td><td align="left" valign="bottom">d1</td><td align="left" valign="bottom">d2</td><td align="left" valign="bottom">d1</td><td align="left" valign="bottom">d2</td></tr></thead><tbody><tr><td align="left" valign="bottom">Precision (%)</td><td align="char" char="." valign="bottom">81.75</td><td align="char" char="." valign="bottom">77.99</td><td align="char" char="." valign="bottom">84.13</td><td align="char" char="." valign="bottom">68.06</td><td align="char" char="." valign="bottom">71.83</td><td align="char" char="." valign="bottom">59.46</td><td align="char" char="." valign="bottom">77.97</td><td align="char" char="." valign="bottom">80.70</td></tr><tr><td align="left" valign="bottom">Recall (%)</td><td align="char" char="." valign="bottom">85.65</td><td align="char" char="." valign="bottom">85.45</td><td align="char" char="." valign="bottom">88.33</td><td align="char" char="." valign="bottom">81.67</td><td align="char" char="." valign="bottom">73.56</td><td align="char" char="." valign="bottom">52.88</td><td align="char" char="." valign="bottom">61.74</td><td align="char" char="." valign="bottom">61.74</td></tr><tr><td align="left" valign="bottom">F<sub>1</sub>-score (%)</td><td align="char" char="." valign="bottom">83.65</td><td align="char" char="." valign="bottom">81.55</td><td align="char" char="." valign="bottom">86.18</td><td align="char" char="." valign="bottom">74.24</td><td align="char" char="." valign="bottom">72.68</td><td align="char" char="." valign="bottom">55.98</td><td align="char" char="." valign="bottom">68.91</td><td align="char" char="." valign="bottom">69.96</td></tr></tbody></table></table-wrap><p>The classification metrics (ie, precision, recall, and <italic>F</italic><sub>1</sub>-score) listed in <xref ref-type="table" rid="table2">Tables 2</xref> and <xref ref-type="table" rid="table3">3</xref> provide a performance assessment of the deployed algorithm. The confusion matrix depicted in <xref ref-type="fig" rid="figure4">Figure 4</xref> illustrates the distribution of the number of windows predicted per class for the total cross-validation dataset from two Byteflies dots. Precision is the measure to tell how many correct positive predictions the model made [<xref ref-type="bibr" rid="ref26">26</xref>]. It is calculated as the ratio of true positive predictions to the total positive predictions (true and false positive). The class &#x201C;In bed&#x201D; attained the highest precision (84.32%) and the model interpreted a few of the windows from the classes &#x201C;Turn&#x201D; (41) and &#x201C;Out of bed&#x201D; (36) as &#x201C;In bed.&#x201D; These are called false positives. The precision for &#x201C;Agitation&#x201D; was 77.78% as the model predicted some windows from the class &#x201C;Turn&#x201D; as &#x201C;Agitation&#x201D; too.</p><p>Recall is another classification metric to measure the ratio of correct positive predictions to all actual positives [<xref ref-type="bibr" rid="ref26">26</xref>]. The recall for the class &#x201C;In bed&#x201D; scored the highest (86.07%), and the misclassified windows were predicted as &#x201C;Turn&#x201D; (44) or &#x201C;Out of bed&#x201D; (23) (false negatives). Also for the &#x201C;Agitation&#x201D; class, it was observed that 11 of the incorrectly predicted windows were labeled as &#x201C;Turn.&#x201D; Conversely, the incorrect predictions for &#x201C;Turn&#x201D; also had a majority of 41 windows in &#x201C;In bed.&#x201D; For the class &#x201C;Out of bed,&#x201D; the most incorrectly predicted windows were observed in &#x201C;In bed&#x201D; (36).</p><p>At last, the <italic>F</italic><sub>1</sub>-score is the harmonic mean or weighted average of precision and recall for a classification problem and especially useful with an imbalanced dataset [<xref ref-type="bibr" rid="ref26">26</xref>]. The class &#x201C;In bed&#x201D; attained the highest overall <italic>F</italic><sub>1</sub>-score (85.19%) and &#x201C;Turn&#x201D; manifested the lowest <italic>F</italic><sub>1</sub>-score (70.33%).</p><p>The results split per Byteflies dot in <xref ref-type="table" rid="table2">Table 2</xref>, revealed an overall better outcome for dot 1, compared with dot 2, especially for the classes &#x201C;Agitation&#x201D; and &#x201C;Turn.&#x201D; When combining the data from two dots, the results for the classes &#x201C;In Bed&#x201D; and &#x201C;Out of bed,&#x201D; improved. In contrast, the results for &#x201C;Agitation&#x201D; and &#x201C;Turn&#x201D; were higher for dot 1 than combined with the lower result of dot 2.</p><fig position="float" id="figure4"><label>Figure 4.</label><caption><p>The confusion matrix of the leave one subject out cross-validation of the XGBoost model, containing data from 2 Byteflies dots, visualizing the number of windows predicted per class.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="nursing_v7i1e58094_fig04.png"/></fig></sec><sec id="s3-2"><title>High-Impact Features</title><p><xref ref-type="fig" rid="figure5">Figure 5</xref> illustrates the mean absolute SHAP value per class for the top 10 features with the highest impact on the model predictions. SHAP values use a game-theoretic approach to quantify the contribution of each feature to the machine learning model&#x2019;s outcome [<xref ref-type="bibr" rid="ref45">45</xref>]. These values assign an importance value to each feature, reflecting how much it influences the final prediction. To specifically elaborate on the tree-based XGBoost model at hand, the TreeExplainer explanation method is used. The name of the feature provides information on the data used to compute the feature: the axis (Ax, Ay, or Az), the applied filter (band-pass), and the sensor dot (1 or 2).</p><p>Among the selected features, the &#x201C;ECDF Percentile Count&#x201D; computes the cumulative sum of samples falling below the percentile of the empirical cumulative distribution function (ECDF) [<xref ref-type="bibr" rid="ref37">37</xref>].</p><fig position="float" id="figure5"><label>Figure 5.</label><caption><p>The bar plot of the mean absolute Shapley additive explanations values per class for the top 10 features with the highest impact on the XGBoost model&#x2019;s output. The name of the feature provides information on the data used to compute the feature: the acceleration axis (Ax, Ay, or Az), the applied band-pass filter (bp), and the sensor dot (1 or 2). ECDF: Empirical Cumulative Distribution Function; FFT : Fast Fourier Transform.</p></caption><graphic alt-version="no" mimetype="image" position="float" xlink:type="simple" xlink:href="nursing_v7i1e58094_fig05.png"/></fig><p>The ECDF is a simple nonparametric estimator and is obtained by calculating the cumulative probability for each number of unique observations in the data sample less than or equal to a given unique observation x, divided by the total number of observations n (Equation 1) [<xref ref-type="bibr" rid="ref46">46</xref>,<xref ref-type="bibr" rid="ref47">47</xref>].</p><disp-formula id="E1"> <label>(1)</label><mml:math id="eqn1"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mi>E</mml:mi><mml:mi>C</mml:mi><mml:mi>D</mml:mi><mml:mi>F</mml:mi><mml:mo stretchy="false">(</mml:mo><mml:mi>x</mml:mi><mml:mo stretchy="false">)</mml:mo><mml:mo>=</mml:mo><mml:mo stretchy="false">(</mml:mo><mml:mi>n</mml:mi><mml:mi>u</mml:mi><mml:mi>m</mml:mi><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mspace width="thinmathspace"/><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mspace width="thinmathspace"/><mml:mi>u</mml:mi><mml:mi>n</mml:mi><mml:mi>i</mml:mi><mml:mi>q</mml:mi><mml:mi>u</mml:mi><mml:mi>e</mml:mi><mml:mspace width="thinmathspace"/><mml:mi>o</mml:mi><mml:mi>b</mml:mi><mml:mi>s</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mi>v</mml:mi><mml:mi>a</mml:mi><mml:mi>t</mml:mi><mml:mi>i</mml:mi><mml:mi>o</mml:mi><mml:mi>n</mml:mi><mml:mi>s</mml:mi><mml:mo>&#x2264;</mml:mo><mml:mfrac><mml:mi>x</mml:mi><mml:mi>n</mml:mi></mml:mfrac><mml:mo stretchy="false">)</mml:mo></mml:mrow></mml:mstyle></mml:math></disp-formula><p>Notably, the &#x201C;ECDF Percentile Count_0&#x201D; for data along the x-axis of sensor dot 1 computes the cumulative sum of samples falling below the 20th ECDF percentile and significantly impacts the model&#x2019;s output across the three classes &#x201C;In bed,&#x201D; &#x201C;Out of bed,&#x201D; and &#x201C;Agitation.&#x201D; For the class &#x201C;Turn,&#x201D; the feature &#x201C;Absolute energy&#x201D; for data of the x-axis of sensor dot 1, which computes the absolute energy of the signal, has the highest average <inline-graphic xlink:href="nursing_v7i1e58094_fig06.png"/>impact [<xref ref-type="bibr" rid="ref37">37</xref>]. In addition, 3 features in the plot involve FFT mean coefficients, capturing the mean value of each spectrogram frequency [<xref ref-type="bibr" rid="ref37">37</xref>]. With a default setting of 256 bins and a sampling frequency of 100 Hz, the bin width is calculated with Equation 2 and is 0.39 Hz. This means that bin 55 corresponds to 21.48 Hz, bin 62 to 24.22 Hz, and bin 144 to 43.68 Hz. The frequency of bin 144 is the same as for bin 112, as only half of the bins are unique in the FFT spectrum of a signal [<xref ref-type="bibr" rid="ref48">48</xref>].</p><disp-formula id="E2"><label>(2)</label><mml:math id="eqn2"><mml:mstyle displaystyle="true" scriptlevel="0"><mml:mrow><mml:mi>B</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mi>w</mml:mi><mml:mi>i</mml:mi><mml:mi>d</mml:mi><mml:mi>t</mml:mi><mml:mi>h</mml:mi><mml:mo>=</mml:mo><mml:mfrac><mml:mrow><mml:mi>S</mml:mi><mml:mi>a</mml:mi><mml:mi>m</mml:mi><mml:mi>p</mml:mi><mml:mi>l</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>g</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mi>f</mml:mi><mml:mi>r</mml:mi><mml:mi>e</mml:mi><mml:mi>q</mml:mi><mml:mi>u</mml:mi><mml:mi>e</mml:mi><mml:mi>n</mml:mi><mml:mi>c</mml:mi><mml:mi>y</mml:mi></mml:mrow><mml:mrow><mml:mi>N</mml:mi><mml:mi>u</mml:mi><mml:mi>m</mml:mi><mml:mi>b</mml:mi><mml:mi>e</mml:mi><mml:mi>r</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mi>o</mml:mi><mml:mi>f</mml:mi><mml:mtext>&#x00A0;</mml:mtext><mml:mi>b</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>s</mml:mi></mml:mrow></mml:mfrac><mml:mo>=</mml:mo><mml:mtext>&#x00A0;</mml:mtext><mml:mfrac><mml:mrow><mml:mn>100</mml:mn><mml:mtext>&#x00A0;</mml:mtext><mml:mi>H</mml:mi><mml:mi>z</mml:mi></mml:mrow><mml:mrow><mml:mn>265</mml:mn><mml:mtext>&#x00A0;</mml:mtext><mml:mi>b</mml:mi><mml:mi>i</mml:mi><mml:mi>n</mml:mi><mml:mi>s</mml:mi></mml:mrow></mml:mfrac><mml:mo>=</mml:mo><mml:mn>0.39</mml:mn><mml:mrow><mml:mtext>&#x00A0;</mml:mtext></mml:mrow><mml:mi>H</mml:mi><mml:mi>z</mml:mi></mml:mrow></mml:mstyle></mml:math></disp-formula><p>Another set of 3 features in the plot relates to continuous wavelet transform (CWT): &#x201C;Wavelet absolute mean&#x201D;&#x2019; computes the CWT absolute mean value for each wavelet scale, while &#x201C;Wavelet energy&#x201D; quantifies the CWT energy for each wavelet scale [<xref ref-type="bibr" rid="ref37">37</xref>]. Importantly, none of the selected features are computed based on z-axis data. Based on the positioning of the sensor dots, the z-axis was directed upwards, from the bottom to the top of the mattress [<xref ref-type="bibr" rid="ref32">32</xref>].</p></sec><sec id="s3-3"><title>Leave One Subject Out Cross-Validation</title><p><xref ref-type="table" rid="table4">Table 4</xref> presents the weighted F1-score and accuracy for the LOSOCV set per participant, along with the overall result. The accuracy, or the ratio of correct predictions to the total number of predictions [26], is chosen to be able to compare the results with prior work. However, the weighted F1-score is the more appropriate metric here for model validation due to the label imbalance of the dataset, as illustrated in <xref ref-type="table" rid="table2">Table 2</xref> and <xref ref-type="table" rid="table3">Table 3</xref>. The model achieved an overall score of 79.56% for F1-score and 79.62% for accuracy. Participant-specific performance ranged from a minimum F1-score of 69.40% (participant 3) to a maximum of 87.72% (participant 4), and accuracy ranged from 69.33% (participant 3) to 88% (participant 4).</p><table-wrap id="t4" position="float"><label>Table 4.</label><caption><p>Overview of the weighted <italic>F</italic><sub>1</sub>-score and accuracy (%) per participant (p) and in total for the leave one subject out cross-validation of the XGBoost model, containing data from two Byteflies dots.</p></caption><table id="table4" frame="hsides" rules="groups"><thead><tr><td align="left" valign="bottom"/><td align="left" valign="bottom">p1</td><td align="left" valign="bottom">p2</td><td align="left" valign="bottom">p3</td><td align="left" valign="bottom">p4</td><td align="left" valign="bottom">p5</td><td align="left" valign="bottom">p6</td><td align="left" valign="bottom">Total</td></tr></thead><tbody><tr><td align="left" valign="top"><italic>F</italic><sub>1</sub>-score (%)</td><td align="char" char="." valign="top">80.71</td><td align="char" char="." valign="top">79.27</td><td align="char" char="." valign="top">69.40</td><td align="char" char="." valign="top">87.72</td><td align="char" char="." valign="top">77.37</td><td align="char" char="." valign="top">81.67</td><td align="char" char="." valign="top">79.56</td></tr><tr><td align="left" valign="top">Accuracy (%)</td><td align="char" char="." valign="top">80.54</td><td align="char" char="." valign="top">79.33</td><td align="char" char="." valign="top">69.33</td><td align="char" char="." valign="top">88.00</td><td align="char" char="." valign="top">78.00</td><td align="char" char="." valign="top">82.55</td><td align="char" char="." valign="top">79.62</td></tr></tbody></table></table-wrap></sec></sec><sec id="s4" sec-type="discussion"><title>Discussion</title><sec id="s4-1"><title>Principal Results</title><p>The study gained insights into using accelerometer sensors, an XGBoost model, and LOSOCV as an unobtrusive approach for monitoring nighttime movements to support NH continence care, using a viscoelastic care mattress in our setup, which effectively distributes an individual&#x2019;s weight over a broader surface area.</p><p>The confusion matrix indicated that the model correctly classified most windows. With an overall <italic>F</italic><sub>1</sub>-score of 79.56%, and more specifically 79.67% for the class &#x201C;Agitation,&#x201D; the algorithm developed in this study has attained a high level of trustworthiness. The validation results for each participant revealed a variation in <italic>F</italic><sub>1</sub>-score of 18.32% among participants. Despite this variability, it is noteworthy that all participants&#x2019; test outcomes achieved strong model performance. This was particularly remarkable given the considerable differences in weight and height among participants.</p><p>A notable observation is applied to the results for class &#x201C;Turn,&#x201D; where 41 windows are misclassified as &#x201C;In bed.&#x201D; This misclassification may be attributed to the selected window size of 20 seconds. This window size provided the best overall result during the exploration phase, but is considerably larger than the 10-second duration of a turn in the dataset. Given that the primary emphasis of the study was on the detection of agitation, this misclassification was not deemed as concerning. Another issue arose with the smaller number of misclassifications between &#x201C;In bed&#x201D; and &#x201C;Out of bed.&#x201D; By removing the signal&#x2019;s direct current component with a band-pass filter, the difference between the 2 activities became less visible. Given the importance of being able to accurately determine whether the bed of an NH resident is occupied or not, the sensor system could be enhanced by incorporating additional components, such as a pressure sensor.</p><p>In our investigation of feature impact on the output model, the cumulative sum of samples falling below the 20th percentile of the ECDF has a high impact on 3 of the 4 classes: &#x201C;In bed,&#x201D; &#x201C;Out of bed,&#x201D; and &#x201C;Agitation.&#x201D; Interestingly, none of the selected features are computed based on the z-axis data. This suggests that movement in the (inverse) direction from the bottom to the top of the mattress provides less informative input for our classification task.</p><p>Upon comparing the outcomes for the individual sensor dots with the combined result, it became evident that using more than one sensor only slightly improved the model&#x2019;s overall performance for 2 out of 4 classes. When learning more about the results per dot for &#x201C;Agitation,&#x201D; it was observed that only 4 additional windows were misclassified for dot 2, compared with the result for dot 1. Since there was only a limited number of windows for this class, this is immediately notable in a performance assessment. Unfortunately, there was no clear explanation for this discrepancy in the classification.</p><p>In practical application, a nighttime movement monitoring system could support (continence) care in NHs by notifying care personnel based on the detected events. In the case of the detected event &#x201C;In bed,&#x201D; no immediate action from care personnel is required. However, when the system detects &#x201C;Agitation,&#x201D; care personnel should receive a notification, especially when multiple successive agitation events are identified. For incontinent residents, care personnel could then assess and, if necessary, change the incontinence material. To provide personalized support, the NH would have the flexibility to adjust the threshold for the number of detected events per resident. For the event &#x201C;Turn,&#x201D; action from care personnel would only be necessary when it occurs infrequently, aimed at preventing decubitus. Here, the frequency could be tailored to each resident. Finally, when &#x201C;Out of bed&#x201D; is detected, care personnel should be notified that the resident has left the bed, allowing them to assist the resident back into bed without further complications.</p></sec><sec id="s4-2"><title>Comparison With Prior Work</title><p>Gong et al [<xref ref-type="bibr" rid="ref19">19</xref>] detected agitation using data from both bed sensors and wristbands for the algorithm, yet the paper lacks detailed precision insights or evaluation metrics for their model to compare with the results of this study.</p><p>T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref21">21</xref>] achieved a high accuracy (92% and 85%) using FFT and CNN models across various bed sensor setups and participants. Their studies, however, entailed fewer participants and did not use a viscoelastic care mattress.</p><p>Notably, the smartphone accelerometer placed on top of the mattress yielded higher accuracy than the bottom placement of the accelerometer sensor, indicating the latter as a more challenging position for movement measurement. Nonetheless, the bottom placement aligns more closely with the goal of developing an unobtrusive system. T&#x2019;Jonck et al [<xref ref-type="bibr" rid="ref20">20</xref>,<xref ref-type="bibr" rid="ref21">21</xref>] also concluded that the accelerometer&#x2019;s position on the bed should not significantly impact the model&#x2019;s ability to classify the data. This is contradictory to our findings, where a difference in performance is recorded between the two Byteflies dots.</p></sec><sec id="s4-3"><title>Limitations</title><p>The dataset of our study is limited, incorporating only 6 iterations (1 per participant). A larger sample size by including more participants in the study could potentially yield improved results.</p><p>Another limitation was the absence of NH residents or older adults among the participants. The simulation of large body movements was based on participants&#x2019; own interpretation, which may not authentically mirror the nocturnal agitation experienced by older adults.</p><p>Finally, the study adhered to a protocol designed to simulate nighttime movement. It is essential to note that this simulation differs from real nighttime movement, where lying in a specific posture, turning, or experiencing agitation is not dictated by predefined time intervals and does not necessarily follow a sequential pattern.</p></sec><sec id="s4-4"><title>Conclusions</title><p>This study presented the exploration of accelerometer-based unobtrusive monitoring of nighttime movements to support NH continence care. The XGBoost model combined with an LOSOCV approach provided valuable insights into activity tracking. The model was able to successfully detect the specified activities with an overall <italic>F</italic><sub>1</sub>-score of 79.56%.</p><p>To gain deeper insights into the developed sensor system and to address its limitations, we recommend conducting a follow-up study in an NH setting. This will enhance the study&#x2019;s external validity by capturing real-world conditions. Incorporating NH residents and monitoring their nighttime behavior present new challenges, including limited bed mobility and the need for transfers.</p></sec></sec></body><back><ack><p>The research was carried out within the imec.icon project DISCRETE that ran from October 1, 2018, to March 31, 2021, and joined forces of commercial partners Televic Healthcare, Corilus, Distrac Group, and the know-how of Zorg Kortrijk and WZC Sint-Bernardus, with the scientific expertise of researchers from van imec-KU Leuven-DISTRINET, imec-KU Leuven STADIUS, and KU Leuven-HCI. The project was funded by Flanders Innovation &#x0026; Entrepreneurship. This research also acknowledges the project PROCON in the Erasmus+ 2021-2027 program of the European Commission funding (project ID 101185699). We used the generative AI tool ChatGPT by OpenAI [<xref ref-type="bibr" rid="ref49">49</xref>] to optimize the paper's academic writing.</p></ack><fn-group><fn fn-type="conflict"><p>None declared.</p></fn></fn-group><glossary><title>Abbreviations</title><def-list><def-item><term id="abb1">CNN</term><def><p>convolutional neural network</p></def></def-item><def-item><term id="abb2">CWT</term><def><p>continuous wavelet transform</p></def></def-item><def-item><term id="abb3">ECDF</term><def><p>empirical cumulative distribution function</p></def></def-item><def-item><term id="abb4">FFT</term><def><p>fast Fourier transform</p></def></def-item><def-item><term id="abb5">LOSOCV</term><def><p>leave one subject out cross-validation</p></def></def-item><def-item><term id="abb6">NH</term><def><p>nursing home</p></def></def-item><def-item><term id="abb7">SHAP</term><def><p>Shapley additive explanations</p></def></def-item><def-item><term id="abb8">UI</term><def><p>urinary incontinence</p></def></def-item></def-list></glossary><ref-list><title>References</title><ref id="ref1"><label>1</label><nlm-citation citation-type="web"><article-title>Ageing and health</article-title><source>World Health Organization</source><year>2022</year><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.who.int/news-room/fact-sheets/detail/ageing-and-health">https://www.who.int/news-room/fact-sheets/detail/ageing-and-health</ext-link></comment></nlm-citation></ref><ref id="ref2"><label>2</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Offermans</surname><given-names>MPW</given-names> </name><name name-style="western"><surname>Du Moulin</surname><given-names>M</given-names> </name><name name-style="western"><surname>Hamers</surname><given-names>JPH</given-names> </name><name name-style="western"><surname>Dassen</surname><given-names>T</given-names> </name><name name-style="western"><surname>Halfens</surname><given-names>RJG</given-names> </name></person-group><article-title>Prevalence of urinary incontinence and associated risk factors in nursing home residents: a systematic review</article-title><source>Neurourol Urodyn</source><year>2009</year><volume>28</volume><issue>4</issue><fpage>288</fpage><lpage>294</lpage><pub-id pub-id-type="doi">10.1002/nau.20668</pub-id><pub-id pub-id-type="medline">19191259</pub-id></nlm-citation></ref><ref id="ref3"><label>3</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Jerez-Roig</surname><given-names>J</given-names> </name><name name-style="western"><surname>Santos</surname><given-names>MM</given-names> </name><name name-style="western"><surname>Souza</surname><given-names>DLB</given-names> </name><name name-style="western"><surname>Amaral</surname><given-names>F</given-names> </name><name name-style="western"><surname>Lima</surname><given-names>KC</given-names> </name></person-group><article-title>Prevalence of urinary incontinence and associated factors in nursing home residents</article-title><source>Neurourol Urodyn</source><year>2016</year><month>01</month><volume>35</volume><issue>1</issue><fpage>102</fpage><lpage>107</lpage><pub-id pub-id-type="doi">10.1002/nau.22675</pub-id><pub-id pub-id-type="medline">25307780</pub-id></nlm-citation></ref><ref id="ref4"><label>4</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Higami</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Yamakawa</surname><given-names>M</given-names> </name><name name-style="western"><surname>Kang</surname><given-names>Y</given-names> </name><etal/></person-group><article-title>Prevalence of incontinence among cognitively impaired older residents in long&#x2010;term care facilities in East Asia: a cross&#x2010;sectional study</article-title><source>Geriatr Gerontol Int</source><year>2019</year><month>05</month><volume>19</volume><issue>5</issue><fpage>444</fpage><lpage>450</lpage><pub-id pub-id-type="doi">10.1111/ggi.13639</pub-id><pub-id pub-id-type="medline">30811809</pub-id></nlm-citation></ref><ref id="ref5"><label>5</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Huion</surname><given-names>A</given-names> </name><name name-style="western"><surname>De Witte</surname><given-names>N</given-names> </name><name name-style="western"><surname>Everaert</surname><given-names>K</given-names> </name><name name-style="western"><surname>Halfens</surname><given-names>RJG</given-names> </name><name name-style="western"><surname>Schols</surname><given-names>J</given-names> </name></person-group><article-title>Care dependency and management of urinary incontinence in nursing homes: a descriptive study</article-title><source>J Adv Nurs</source><year>2021</year><month>04</month><volume>77</volume><issue>4</issue><fpage>1731</fpage><lpage>1740</lpage><pub-id pub-id-type="doi">10.1111/jan.14702</pub-id><pub-id pub-id-type="medline">33277758</pub-id></nlm-citation></ref><ref id="ref6"><label>6</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>O&#x2019;Dell</surname><given-names>KK</given-names> </name><name name-style="western"><surname>Jacelon</surname><given-names>C</given-names> </name><name name-style="western"><surname>Morse</surname><given-names>AN</given-names> </name></person-group><article-title>I&#x2019;d rather just go on as I am&#x2019;--pelvic floor care preferences of frail, elderly women in residential care</article-title><source>Urol Nurs</source><year>2008</year><month>02</month><volume>28</volume><issue>1</issue><fpage>36</fpage><lpage>47</lpage><pub-id pub-id-type="medline">18335696</pub-id></nlm-citation></ref><ref id="ref7"><label>7</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Hillary</surname><given-names>CJ</given-names> </name><name name-style="western"><surname>Slovak</surname><given-names>M</given-names> </name><name name-style="western"><surname>McCarthy</surname><given-names>A</given-names> </name><name name-style="western"><surname>Hashim</surname><given-names>H</given-names> </name><name name-style="western"><surname>Chapple</surname><given-names>CR</given-names> </name></person-group><article-title>Recent developments in technology for the assessment and management of incontinence</article-title><source>J Med Eng Technol</source><year>2015</year><month>10</month><day>3</day><volume>39</volume><issue>7</issue><fpage>434</fpage><lpage>440</lpage><pub-id pub-id-type="doi">10.3109/03091902.2015.1088088</pub-id></nlm-citation></ref><ref id="ref8"><label>8</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Omotunde</surname><given-names>M</given-names> </name><name name-style="western"><surname>Wagg</surname><given-names>A</given-names> </name></person-group><article-title>Technological solutions for urinary continence care delivery for older adults: a scoping review</article-title><source>J Wound Ostomy Continence Nurs</source><year>2023</year><volume>50</volume><issue>3</issue><fpage>227</fpage><lpage>234</lpage><pub-id pub-id-type="doi">10.1097/WON.0000000000000965</pub-id><pub-id pub-id-type="medline">36856187</pub-id></nlm-citation></ref><ref id="ref9"><label>9</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lin</surname><given-names>SH</given-names> </name><name name-style="western"><surname>Kajiyama</surname><given-names>K</given-names> </name><name name-style="western"><surname>Wu</surname><given-names>T</given-names> </name></person-group><article-title>Smart diaper: how it works</article-title><conf-name>Proceedings of the 2017 ACM International Joint Conference on Pervasive and Ubiquitous Computing and Proceedings of the 2017 ACM International Symposium on Wearable Computers</conf-name><conf-date>Sep 11-15, 2017</conf-date><conf-loc>Maui, HI, USA</conf-loc><fpage>29</fpage><lpage>132</lpage><pub-id pub-id-type="doi">10.1145/3123024.3123167</pub-id></nlm-citation></ref><ref id="ref10"><label>10</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Rajabali</surname><given-names>SN</given-names> </name><name name-style="western"><surname>Hunter</surname><given-names>KF</given-names> </name><name name-style="western"><surname>Asaana</surname><given-names>P</given-names> </name><name name-style="western"><surname>McCreary</surname><given-names>ML</given-names> </name><name name-style="western"><surname>Nazari</surname><given-names>S</given-names> </name><name name-style="western"><surname>Wagg</surname><given-names>AS</given-names> </name></person-group><article-title>Effectiveness of a smart urinary continence care assessment system for nursing home residents: a quasi-experimental, sequential quantitative-qualitative methods trial</article-title><source>J Wound Ostomy Continence Nurs</source><year>2023</year><volume>50</volume><issue>1</issue><fpage>48</fpage><lpage>56</lpage><pub-id pub-id-type="doi">10.1097/WON.0000000000000937</pub-id><pub-id pub-id-type="medline">36640164</pub-id></nlm-citation></ref><ref id="ref11"><label>11</label><nlm-citation citation-type="thesis"><person-group person-group-type="author"><name name-style="western"><surname>De Wever</surname><given-names>N</given-names> </name><name name-style="western"><surname>Clarysse</surname><given-names>B</given-names> </name></person-group><article-title>De perceptie van Belgische woonzorgcentra op slimme luiers [Master&#x2019;s Dissertation in Dutch]</article-title><year>2021</year><access-date>2024-12-10</access-date><publisher-name>Universiteit Gent Faculteit Economie en Bedrijfskunde</publisher-name><comment><ext-link ext-link-type="uri" xlink:href="http://lib.ugent.be/catalog/rug01:003010197">http://lib.ugent.be/catalog/rug01:003010197</ext-link></comment></nlm-citation></ref><ref id="ref12"><label>12</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cho</surname><given-names>JH</given-names> </name><name name-style="western"><surname>Choi</surname><given-names>JY</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>NH</given-names> </name><etal/></person-group><article-title>A smart diaper system using bluetooth and smartphones to automatically detect urination and volume of voiding: prospective observational pilot study in an acute care hospital</article-title><source>J Med Internet Res</source><year>2021</year><month>07</month><day>30</day><volume>23</volume><issue>7</issue><fpage>e29979</fpage><pub-id pub-id-type="doi">10.2196/29979</pub-id><pub-id pub-id-type="medline">34328427</pub-id></nlm-citation></ref><ref id="ref13"><label>13</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ostaszkiewicz</surname><given-names>J</given-names> </name><name name-style="western"><surname>Tomlinson</surname><given-names>E</given-names> </name><name name-style="western"><surname>Hutchinson</surname><given-names>AM</given-names> </name></person-group><article-title>&#x201C;Dignity&#x201D;: a central construct in nursing home staff understandings of quality continence care</article-title><source>J Clin Nurs</source><year>2018</year><month>06</month><volume>27</volume><issue>11-12</issue><fpage>2425</fpage><lpage>2437</lpage><pub-id pub-id-type="doi">10.1111/jocn.14293</pub-id><pub-id pub-id-type="medline">29396885</pub-id></nlm-citation></ref><ref id="ref14"><label>14</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Strauven</surname><given-names>H</given-names> </name><name name-style="western"><surname>Spiel</surname><given-names>K</given-names> </name><name name-style="western"><surname>D&#x2019;Haeseleer</surname><given-names>I</given-names> </name><name name-style="western"><surname>Hallez</surname><given-names>H</given-names> </name><name name-style="western"><surname>Vanrumste</surname><given-names>B</given-names> </name><name name-style="western"><surname>Vanden Abeele</surname><given-names>V</given-names> </name></person-group><article-title>From promoting dignity to installing distrust: understanding the role of continence care technology in nursing homes</article-title><conf-name>Proceedings of the 11th Nordic Conference on Human-Computer Interaction: Shaping Experiences, Shaping Society</conf-name><conf-date>Oct 25-29, 2020</conf-date><conf-loc>Tallinn, Estonia</conf-loc><fpage>1</fpage><lpage>11</lpage><pub-id pub-id-type="doi">10.1145/3419249.3420104</pub-id></nlm-citation></ref><ref id="ref15"><label>15</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Lokavee</surname><given-names>S</given-names> </name><name name-style="western"><surname>Tantrakul</surname><given-names>V</given-names> </name><name name-style="western"><surname>Pengjiam</surname><given-names>J</given-names> </name><name name-style="western"><surname>Kerdcharoen</surname><given-names>T</given-names> </name></person-group><article-title>A sleep monitoring system using force sensor and an accelerometer sensor for screening sleep apnea</article-title><conf-name>2021 13th International Conference on Knowledge and Smart Technology (KST)</conf-name><conf-date>Jan 21-24, 2021</conf-date><conf-loc>Bangsaen, Chonburi, Thailand</conf-loc><fpage>208</fpage><lpage>213</lpage><pub-id pub-id-type="doi">10.1109/KST51265.2021.9415835</pub-id></nlm-citation></ref><ref id="ref16"><label>16</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Nam</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Kim</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>J</given-names> </name></person-group><article-title>Sleep monitoring based on a tri-axial accelerometer and a pressure sensor</article-title><source>Sensors (Basel)</source><year>2016</year><month>05</month><day>23</day><volume>16</volume><issue>5</issue><fpage>750</fpage><pub-id pub-id-type="doi">10.3390/s16050750</pub-id><pub-id pub-id-type="medline">27223290</pub-id></nlm-citation></ref><ref id="ref17"><label>17</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Yi</surname><given-names>R</given-names> </name><name name-style="western"><surname>Enayati</surname><given-names>M</given-names> </name><name name-style="western"><surname>Keller</surname><given-names>JM</given-names> </name><name name-style="western"><surname>Popescu</surname><given-names>M</given-names> </name><name name-style="western"><surname>Skubic</surname><given-names>M</given-names> </name></person-group><article-title>Non-invasive in-home sleep stage classification using a ballistocardiography bed sensor</article-title><conf-name>2019 IEEE EMBS International Conference on Biomedical &#x0026; Health Informatics (BHI)</conf-name><conf-date>May 19-22, 2019</conf-date><conf-loc>Chicago, IL, USA</conf-loc><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1109/BHI.2019.8834535</pub-id><pub-id pub-id-type="medline">31849551</pub-id></nlm-citation></ref><ref id="ref18"><label>18</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Laurino</surname><given-names>M</given-names> </name><name name-style="western"><surname>Arcarisi</surname><given-names>L</given-names> </name><name name-style="western"><surname>Carbonaro</surname><given-names>N</given-names> </name><name name-style="western"><surname>Gemignani</surname><given-names>A</given-names> </name><name name-style="western"><surname>Menicucci</surname><given-names>D</given-names> </name><name name-style="western"><surname>Tognetti</surname><given-names>A</given-names> </name></person-group><article-title>A smart bed for non-obtrusive sleep analysis in real world context</article-title><source>IEEE Access</source><year>2020</year><volume>8</volume><issue>8</issue><fpage>45664</fpage><lpage>45673</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2020.2976194</pub-id></nlm-citation></ref><ref id="ref19"><label>19</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Gong</surname><given-names>J</given-names> </name><name name-style="western"><surname>Rose</surname><given-names>KM</given-names> </name><name name-style="western"><surname>Emi</surname><given-names>IA</given-names> </name><etal/></person-group><article-title>Home wireless sensing system for monitoring nighttime agitation and incontinence in patients with alzheimer&#x2019;s disease</article-title><conf-name>Proceedings of the Conference on Wireless Health</conf-name><conf-date>Oct 14-16, 2015</conf-date><conf-loc>Bethesda Maryland</conf-loc><pub-id pub-id-type="doi">10.1145/2811780.2822324</pub-id></nlm-citation></ref><ref id="ref20"><label>20</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>T&#x2019;Jonck</surname><given-names>K</given-names> </name><name name-style="western"><surname>Kancharla</surname><given-names>CR</given-names> </name><name name-style="western"><surname>Hallez</surname><given-names>H</given-names> </name><name name-style="western"><surname>Boydens</surname><given-names>J</given-names> </name></person-group><article-title>Accelerometer based activity tracking to support elderly care in nursing homes</article-title><conf-name>2020 XXIX International Scientific Conference Electronics (ET)</conf-name><conf-date>Sep 16-18, 2020</conf-date><conf-loc>Sozopol, Bulgaria</conf-loc><fpage>1</fpage><lpage>4</lpage><pub-id pub-id-type="doi">10.1109/ET50336.2020.9238180</pub-id></nlm-citation></ref><ref id="ref21"><label>21</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>T'jonck</surname><given-names>K</given-names> </name><name name-style="western"><surname>Kancharla</surname><given-names>CR</given-names> </name><name name-style="western"><surname>Vankeirsbilck</surname><given-names>J</given-names> </name><name name-style="western"><surname>Hallez</surname><given-names>H</given-names> </name><name name-style="western"><surname>Boydens</surname><given-names>J</given-names> </name><name name-style="western"><surname>Pang</surname><given-names>B</given-names> </name></person-group><article-title>Real-time activity tracking using tinyml to support elderly care</article-title><conf-name>2021 XXX International Scientific Conference Electronics (ET)</conf-name><conf-date>Sep 15-17, 2021</conf-date><conf-loc>Sozopol, Bulgaria</conf-loc><fpage>1</fpage><lpage>6</lpage><pub-id pub-id-type="doi">10.1109/ET52713.2021.9579991</pub-id></nlm-citation></ref><ref id="ref22"><label>22</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cole</surname><given-names>RJ</given-names> </name><name name-style="western"><surname>Kripke</surname><given-names>DF</given-names> </name><name name-style="western"><surname>Gruen</surname><given-names>W</given-names> </name><name name-style="western"><surname>Mullaney</surname><given-names>DJ</given-names> </name><name name-style="western"><surname>Gillin</surname><given-names>JC</given-names> </name></person-group><article-title>Automatic sleep/wake identification from wrist activity</article-title><source>Sleep</source><year>1992</year><month>10</month><volume>15</volume><issue>5</issue><fpage>461</fpage><lpage>469</lpage><pub-id pub-id-type="doi">10.1093/sleep/15.5.461</pub-id><pub-id pub-id-type="medline">1455130</pub-id></nlm-citation></ref><ref id="ref23"><label>23</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Hanson</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Powell</surname><given-names>HC</given-names> </name><name name-style="western"><surname>Frysinger</surname><given-names>RC</given-names> </name><etal/></person-group><article-title>Teager energy assessment of tremor severity in clinical application of wearable inertial sensors</article-title><conf-name>2007 IEEE/NIH Life Science Systems and Applications Workshop</conf-name><conf-date>Nov 8-9, 2007</conf-date><conf-loc>Bethesda, MD, USA</conf-loc><fpage>136</fpage><lpage>139</lpage><pub-id pub-id-type="doi">10.1109/LSSA.2007.4400903</pub-id></nlm-citation></ref><ref id="ref24"><label>24</label><nlm-citation citation-type="web"><source>Drybuddy</source><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://drybuddy.com/">https://drybuddy.com/</ext-link></comment></nlm-citation></ref><ref id="ref25"><label>25</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Barth</surname><given-names>AT</given-names> </name><name name-style="western"><surname>Hanson</surname><given-names>MA</given-names> </name><name name-style="western"><surname>Powell Jr.</surname><given-names>HC</given-names> </name><name name-style="western"><surname>Lach</surname><given-names>J</given-names> </name></person-group><article-title>TEMPO 3.1: a body area sensor network platform for continuous movement assessment</article-title><conf-name>2009 Sixth International Workshop on Wearable and Implantable Body Sensor Networks</conf-name><conf-date>Jun 3-5, 2009</conf-date><conf-loc>Berkeley, CA, USA</conf-loc><fpage>71</fpage><lpage>76</lpage><pub-id pub-id-type="doi">10.1109/BSN.2009.39</pub-id></nlm-citation></ref><ref id="ref26"><label>26</label><nlm-citation citation-type="preprint"><person-group person-group-type="author"><name name-style="western"><surname>Grandini</surname><given-names>M</given-names> </name><name name-style="western"><surname>Bagli</surname><given-names>E</given-names> </name><name name-style="western"><surname>Visani</surname><given-names>G</given-names> </name></person-group><article-title>Metrics for multi-class classification: an overview</article-title><source>arXiv</source><comment>Preprint posted online on  Aug 13, 2020</comment><pub-id pub-id-type="doi">10.48550/arXiv.2008.05756</pub-id></nlm-citation></ref><ref id="ref27"><label>27</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Marino</surname><given-names>J</given-names> </name></person-group><article-title>Tempur-Med: choosing the correct pressure area care system</article-title><source>Br J Nurs</source><year>2001</year><volume>10</volume><issue>20</issue><fpage>1364</fpage><lpage>1367</lpage><pub-id pub-id-type="doi">10.12968/bjon.2001.10.20.9356</pub-id><pub-id pub-id-type="medline">11873231</pub-id></nlm-citation></ref><ref id="ref28"><label>28</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Bai</surname><given-names>DL</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>TW</given-names> </name><name name-style="western"><surname>Chou</surname><given-names>HL</given-names> </name><name name-style="western"><surname>Hsu</surname><given-names>YL</given-names> </name></person-group><article-title>Relationship between a pressure redistributing foam mattress and pressure injuries: an observational prospective cohort study</article-title><source>PLoS One</source><year>2020</year><volume>15</volume><issue>11</issue><fpage>e0241276</fpage><pub-id pub-id-type="doi">10.1371/journal.pone.0241276</pub-id><pub-id pub-id-type="medline">33166300</pub-id></nlm-citation></ref><ref id="ref29"><label>29</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cullum</surname><given-names>N</given-names> </name><name name-style="western"><surname>McInnes</surname><given-names>E</given-names> </name><name name-style="western"><surname>Bell-Syer</surname><given-names>SEM</given-names> </name><name name-style="western"><surname>Legood</surname><given-names>R</given-names> </name></person-group><article-title>Support surfaces for pressure ulcer prevention</article-title><source>Cochrane Database Syst Rev</source><year>2004</year><issue>3</issue><fpage>CD001735</fpage><pub-id pub-id-type="doi">10.1002/14651858.CD001735.pub2</pub-id><pub-id pub-id-type="medline">15266452</pub-id></nlm-citation></ref><ref id="ref30"><label>30</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Defloor</surname><given-names>T</given-names> </name></person-group><article-title>The effect of position and mattress on interface pressure</article-title><source>Appl Nurs Res</source><year>2000</year><month>02</month><volume>13</volume><issue>1</issue><fpage>2</fpage><lpage>11</lpage><pub-id pub-id-type="doi">10.1016/s0897-1897(00)80013-0</pub-id><pub-id pub-id-type="medline">10701278</pub-id></nlm-citation></ref><ref id="ref31"><label>31</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lopez</surname><given-names>R</given-names> </name><name name-style="western"><surname>Micoulaud-Franchi</surname><given-names>JA</given-names> </name><name name-style="western"><surname>Peter-Derex</surname><given-names>L</given-names> </name><name name-style="western"><surname>Dauvilliers</surname><given-names>Y</given-names> </name></person-group><article-title>Nocturnal agitation: from sleep state dissociation to sleep-related dissociative state</article-title><source>Rev Neurol (Paris)</source><year>2023</year><month>10</month><volume>179</volume><issue>7</issue><fpage>675</fpage><lpage>686</lpage><pub-id pub-id-type="doi">10.1016/j.neurol.2023.07.003</pub-id><pub-id pub-id-type="medline">37625976</pub-id></nlm-citation></ref><ref id="ref32"><label>32</label><nlm-citation citation-type="web"><source>Byteflies</source><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://byteflies.com/">https://byteflies.com/</ext-link></comment></nlm-citation></ref><ref id="ref33"><label>33</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Strauven</surname><given-names>H</given-names> </name><name name-style="western"><surname>D&#x2019;Haeseleer</surname><given-names>I</given-names> </name><name name-style="western"><surname>T&#x2019;Jonck</surname><given-names>K</given-names> </name><etal/></person-group><article-title>Towards an ambient support system for continence management in nursing homes: an exploratory study</article-title><conf-name>13th International Conference on Health Informatics</conf-name><conf-date>Feb 24-26, 2020</conf-date><conf-loc>Valletta, Malta</conf-loc><fpage>438</fpage><lpage>446</lpage><pub-id pub-id-type="doi">10.5220/0008963404380446</pub-id></nlm-citation></ref><ref id="ref34"><label>34</label><nlm-citation citation-type="web"><article-title>Garmin Venu&#x00AE; Sq</article-title><source>Garmin</source><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.garmin.com/en-US/p/707174">https://www.garmin.com/en-US/p/707174</ext-link></comment></nlm-citation></ref><ref id="ref35"><label>35</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Razjouyan</surname><given-names>J</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>H</given-names> </name><name name-style="western"><surname>Parthasarathy</surname><given-names>S</given-names> </name><name name-style="western"><surname>Mohler</surname><given-names>J</given-names> </name><name name-style="western"><surname>Sharafkhaneh</surname><given-names>A</given-names> </name><name name-style="western"><surname>Najafi</surname><given-names>B</given-names> </name></person-group><article-title>Improving sleep quality assessment using wearable sensors by including information from postural/sleep position changes and body acceleration: a comparison of chest-worn sensors, wrist actigraphy, and polysomnography</article-title><source>J Clin Sleep Med</source><year>2017</year><month>11</month><day>15</day><volume>13</volume><issue>11</issue><fpage>1301</fpage><lpage>1310</lpage><pub-id pub-id-type="doi">10.5664/jcsm.6802</pub-id><pub-id pub-id-type="medline">28992827</pub-id></nlm-citation></ref><ref id="ref36"><label>36</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Defloor</surname><given-names>T</given-names> </name><name name-style="western"><surname>De Bacquer</surname><given-names>D</given-names> </name><name name-style="western"><surname>Grypdonck</surname><given-names>MHF</given-names> </name></person-group><article-title>The effect of various combinations of turning and pressure reducing devices on the incidence of pressure ulcers</article-title><source>Int J Nurs Stud</source><year>2005</year><month>01</month><volume>42</volume><issue>1</issue><fpage>37</fpage><lpage>46</lpage><pub-id pub-id-type="doi">10.1016/j.ijnurstu.2004.05.013</pub-id><pub-id pub-id-type="medline">15582638</pub-id></nlm-citation></ref><ref id="ref37"><label>37</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Barandas</surname><given-names>M</given-names> </name><name name-style="western"><surname>Folgado</surname><given-names>D</given-names> </name><name name-style="western"><surname>Fernandes</surname><given-names>L</given-names> </name><etal/></person-group><article-title>TSFEL: Time Series Feature Extraction Library</article-title><source>SoftwareX</source><year>2020</year><month>01</month><volume>11</volume><issue>11</issue><fpage>100456</fpage><pub-id pub-id-type="doi">10.1016/j.softx.2020.100456</pub-id></nlm-citation></ref><ref id="ref38"><label>38</label><nlm-citation citation-type="confproc"><person-group person-group-type="author"><name name-style="western"><surname>Chen</surname><given-names>T</given-names> </name><name name-style="western"><surname>Guestrin</surname><given-names>C</given-names> </name></person-group><article-title>XGBoost: a scalable tree boosting system</article-title><conf-name>Proceedings of the 22nd ACM SIGKDD International Conference on Knowledge Discovery and Data Mining</conf-name><conf-date>Aug 13-17, 2016</conf-date><conf-loc>San Francisco, CA, USA</conf-loc><fpage>785</fpage><lpage>794</lpage><pub-id pub-id-type="doi">10.1145/2939672.2939785</pub-id></nlm-citation></ref><ref id="ref39"><label>39</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Ambati</surname><given-names>LS</given-names> </name><name name-style="western"><surname>El-Gayar</surname><given-names>O</given-names> </name></person-group><article-title>Human activity recognition: a comparison of machine learning approaches</article-title><source>J Midwest Assoc Inf Syst</source><year>2021</year><issue>1</issue><fpage>49</fpage><lpage>60</lpage><pub-id pub-id-type="doi">10.17705/3jmwa.000065</pub-id></nlm-citation></ref><ref id="ref40"><label>40</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Wu</surname><given-names>Y</given-names> </name><name name-style="western"><surname>Qi</surname><given-names>S</given-names> </name><name name-style="western"><surname>Hu</surname><given-names>F</given-names> </name><name name-style="western"><surname>Ma</surname><given-names>S</given-names> </name><name name-style="western"><surname>Mao</surname><given-names>W</given-names> </name><name name-style="western"><surname>Li</surname><given-names>W</given-names> </name></person-group><article-title>Recognizing activities of the elderly using wearable sensors: a comparison of ensemble algorithms based on boosting</article-title><source>Sensor Review</source><year>2019</year><month>11</month><day>18</day><volume>39</volume><issue>6</issue><fpage>743</fpage><lpage>751</lpage><pub-id pub-id-type="doi">10.1108/SR-11-2018-0309</pub-id></nlm-citation></ref><ref id="ref41"><label>41</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Cahoolessur</surname><given-names>DK</given-names> </name><name name-style="western"><surname>Rajkumarsingh</surname><given-names>B</given-names> </name></person-group><article-title>Fall detection system using XGBoost and IoT</article-title><source>R&#x0026;D Journal</source><year>2020</year><pub-id pub-id-type="doi">10.17159/2309-8988/2020/v36a2</pub-id></nlm-citation></ref><ref id="ref42"><label>42</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Lundberg</surname><given-names>SM</given-names> </name><name name-style="western"><surname>Erion</surname><given-names>G</given-names> </name><name name-style="western"><surname>Chen</surname><given-names>H</given-names> </name><etal/></person-group><article-title>From local explanations to global understanding with explainable AI for trees</article-title><source>Nat Mach Intell</source><year>2020</year><month>01</month><volume>2</volume><issue>1</issue><fpage>56</fpage><lpage>67</lpage><pub-id pub-id-type="doi">10.1038/s42256-019-0138-9</pub-id><pub-id pub-id-type="medline">32607472</pub-id></nlm-citation></ref><ref id="ref43"><label>43</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Refaeilzadeh</surname><given-names>P</given-names> </name><name name-style="western"><surname>Tang</surname><given-names>L</given-names> </name><name name-style="western"><surname>Liu</surname><given-names>H</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Liu</surname><given-names>L</given-names> </name><name name-style="western"><surname>&#x00D6;zsu</surname><given-names>MT</given-names> </name></person-group><article-title>Cross-validation</article-title><source>Encyclopedia of Database Systems</source><year>2009</year><publisher-name>Springer</publisher-name><fpage>532</fpage><lpage>538</lpage><pub-id pub-id-type="doi">10.1007/978-0-387-39940-9_565</pub-id></nlm-citation></ref><ref id="ref44"><label>44</label><nlm-citation citation-type="journal"><person-group person-group-type="author"><name name-style="western"><surname>Gholamiangonabadi</surname><given-names>D</given-names> </name><name name-style="western"><surname>Kiselov</surname><given-names>N</given-names> </name><name name-style="western"><surname>Grolinger</surname><given-names>K</given-names> </name></person-group><article-title>Deep neural networks for human activity recognition with wearable sensors: leave-one-subject-out cross-validation for model selection</article-title><source>IEEE Access</source><year>2020</year><volume>8</volume><fpage>133982</fpage><lpage>133994</lpage><pub-id pub-id-type="doi">10.1109/ACCESS.2020.3010715</pub-id></nlm-citation></ref><ref id="ref45"><label>45</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Lundberg</surname><given-names>SM</given-names> </name><name name-style="western"><surname>Lee</surname><given-names>SI</given-names> </name></person-group><person-group person-group-type="editor"><name name-style="western"><surname>Guyon</surname><given-names>I</given-names> </name><name name-style="western"><surname>Luxburg</surname><given-names>UV</given-names> </name><name name-style="western"><surname>Bengio</surname><given-names>S</given-names> </name><etal/></person-group><article-title>A unified approach to interpreting model predictions</article-title><source>Advances in Neural Information Processing Systems</source><year>2017</year><publisher-name>Curran Associates, Inc</publisher-name></nlm-citation></ref><ref id="ref46"><label>46</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Castro</surname><given-names>R</given-names> </name></person-group><article-title>Lecture 1-introduction and the empirical CDF</article-title><source>Rui Castro</source><year>2013</year><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://www.win.tue.nl/~rmcastro/AppStat2013/files/lecture1.pdf">https://www.win.tue.nl/~rmcastro/AppStat2013/files/lecture1.pdf</ext-link></comment></nlm-citation></ref><ref id="ref47"><label>47</label><nlm-citation citation-type="web"><person-group person-group-type="author"><name name-style="western"><surname>Brownlee</surname><given-names>J</given-names> </name></person-group><article-title>How to use an empirical distribution function in Python</article-title><source>Machine Learning Mastery</source><year>2020</year><access-date>2024-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://machinelearningmastery.com/empirical-distribution-function-in-python">https://machinelearningmastery.com/empirical-distribution-function-in-python</ext-link></comment></nlm-citation></ref><ref id="ref48"><label>48</label><nlm-citation citation-type="book"><person-group person-group-type="author"><name name-style="western"><surname>Schneider</surname><given-names>P</given-names> </name><name name-style="western"><surname>Xhafa</surname><given-names>F</given-names> </name></person-group><article-title>Data stream processing: models and methods</article-title><source>Anomaly Detection and Complex Event Processing over IoT Data Streams</source><year>2022</year><publisher-name>Elsevier</publisher-name><fpage>29</fpage><lpage>47</lpage><pub-id pub-id-type="doi">10.1016/B978-0-12-823818-9.00012-2</pub-id></nlm-citation></ref><ref id="ref49"><label>49</label><nlm-citation citation-type="web"><source>ChatGPT</source><access-date>2023-09-24</access-date><comment><ext-link ext-link-type="uri" xlink:href="https://openai.com/chatgpt">https://openai.com/chatgpt</ext-link></comment></nlm-citation></ref></ref-list></back></article>