<!DOCTYPE article
PUBLIC "-//NLM//DTD JATS (Z39.96) Journal Archiving and Interchange DTD with MathML3 v1.3 20210610//EN" "JATS-archivearticle1-3-mathml3.dtd">
<article xmlns:mml="http://www.w3.org/1998/Math/MathML" xmlns:xlink="http://www.w3.org/1999/xlink" dtd-version="1.3" xml:lang="en" article-type="research-article"><?properties manuscript?><processing-meta base-tagset="archiving" mathml-version="3.0" table-model="xhtml" tagset-family="jats"><restricted-by>pmc</restricted-by></processing-meta><front><journal-meta><journal-id journal-id-type="nlm-journal-id">100890609</journal-id><journal-id journal-id-type="pubmed-jr-id">31704</journal-id><journal-id journal-id-type="nlm-ta">Health Promot Pract</journal-id><journal-id journal-id-type="iso-abbrev">Health Promot Pract</journal-id><journal-title-group><journal-title>Health promotion practice</journal-title></journal-title-group><issn pub-type="ppub">1524-8399</issn></journal-meta><article-meta><article-id pub-id-type="pmid">38819174</article-id><article-id pub-id-type="pmc">11607142</article-id><article-id pub-id-type="doi">10.1177/15248399241255375</article-id><article-id pub-id-type="manuscript">HHSPA1999523</article-id><article-categories><subj-group subj-group-type="heading"><subject>Article</subject></subj-group></article-categories><title-group><article-title>Systematic Screening and Assessment of Hospital-Based Youth Violence Prevention Programs</article-title></title-group><contrib-group><contrib contrib-type="author"><contrib-id contrib-id-type="orcid" authenticated="false">http://orcid.org/0000-0002-2584-482X</contrib-id><name><surname>Piervil</surname><given-names>Esther</given-names></name><degrees>PhD</degrees><xref rid="A1" ref-type="aff">1</xref></contrib><contrib contrib-type="author"><name><surname>Wong</surname><given-names>Leslyn</given-names></name><degrees>MPH</degrees><xref rid="A1" ref-type="aff">1</xref></contrib><contrib contrib-type="author"><name><surname>Marshall</surname><given-names>Khiya J.</given-names></name><degrees>PhD</degrees><xref rid="A3" ref-type="aff">3</xref></contrib><contrib contrib-type="author"><name><surname>Earl</surname><given-names>Tara</given-names></name><degrees>PhD</degrees><xref rid="A2" ref-type="aff">2</xref></contrib><contrib contrib-type="author"><name><surname>Leonard</surname><given-names>Scotti</given-names></name><degrees>MPH</degrees><xref rid="A1" ref-type="aff">1</xref></contrib><contrib contrib-type="author"><name><surname>Waajid</surname><given-names>Malikah</given-names></name><degrees>PhD</degrees><xref rid="A1" ref-type="aff">1</xref></contrib><contrib contrib-type="author"><contrib-id contrib-id-type="orcid" authenticated="false">http://orcid.org/0000-0001-6575-2359</contrib-id><name><surname>Jones</surname><given-names>Tiffany</given-names></name><degrees>DrPH</degrees><xref rid="A1" ref-type="aff">1</xref></contrib><contrib contrib-type="author"><name><surname>Katapodis</surname><given-names>Nicole</given-names></name><degrees>MPP</degrees><xref rid="A2" ref-type="aff">2</xref></contrib><contrib contrib-type="author"><name><surname>Marbach</surname><given-names>Alexis</given-names></name><degrees>MPH</degrees><xref rid="A2" ref-type="aff">2</xref></contrib><contrib contrib-type="author"><name><surname>Schneiderman</surname><given-names>Stephanie</given-names></name><degrees>MPP</degrees><xref rid="A2" ref-type="aff">2</xref></contrib><contrib contrib-type="author"><name><surname>Bartholow</surname><given-names>Brad</given-names></name><degrees>PhD</degrees><xref rid="A3" ref-type="aff">3</xref></contrib></contrib-group><aff id="A1"><label>1</label>Karna LLC, Atlanta, GA, USA</aff><aff id="A2"><label>2</label>Abt Associates, Atlanta, GA, USA</aff><aff id="A3"><label>3</label>Centers for Disease Control and Prevention, Atlanta, GA, USA</aff><author-notes><corresp id="CR1">Address correspondence to Esther Piervil, Karna LLC, 2800 Century Parkway Suite 700, Atlanta, GA 30345, USA; <email>piervilesther@gmail.com</email>.</corresp></author-notes><pub-date pub-type="nihms-submitted"><day>13</day><month>6</month><year>2024</year></pub-date><pub-date pub-type="epub"><day>31</day><month>5</month><year>2024</year></pub-date><pub-date pub-type="pmc-release"><day>30</day><month>11</month><year>2025</year></pub-date><fpage>15248399241255375</fpage><lpage>15248399241255375</lpage><abstract id="ABS1"><p id="P1">Youth violence is a preventable public health issue. Few hospital-based programs intentionally focus on youth violence prevention. This project aimed to describe the Systematic Screening and Assessment (SSA) methodology used to identify existing hospital-based youth violence prevention (HBYVP) programs ready for future rigorous evaluation. To identify promising HBYVP programs currently in use and assess readiness for evaluation, data from the 2017 American Hospital Association (AHA) Annual Survey of Hospitals was used to identify hospitals with Level I-III trauma centers with reported HBYVP programs. Information for each program was gathered via environmental scan and key informant interviews. A total of 383 hospital-based violence prevention programs were identified. Two review panels were conducted with violence prevention experts to identify characteristics of programs suitable for an evaluability assessment (EA). Fifteen programs focused on youth (10&#x02013;24 years old) and were identified to be promising and evaluable. Three of the 15 programs were determined to have the infrastructure and readiness necessary for rigorous evaluation. Lessons learned and best practices for SSA project success included use of streamlined outreach efforts that provide program staff with informative and culturally tailored project materials outlining information about the problem, project goals, proposed SSA activities, and altruistic benefit to the community at the initial point of contact. In addition, success of review panels was attributed to use of software to streamline panelist review processes and use of evaluation and data analysis subject matter experts to serve as panel facilitators. Communities experiencing high youth violence burden and hospitals serving these communities can improve health outcomes among youth by implementing and evaluating tailored HBYVP programs.</p></abstract><kwd-group><kwd>systematic screening and assessment</kwd><kwd>evaluability assessment</kwd><kwd>hospital-based</kwd><kwd>youth</kwd><kwd>violence prevention programs</kwd></kwd-group></article-meta></front><body><p id="P2">Violence among youth is a significant and preventable public health issue:</p><disp-quote id="Q1"><p id="P3">Youth violence is the intentional use of physical force or power to threaten or harm others by young people ages 10-24. It typically involves young people hurting peers who are unrelated to them and who they may or may not know well. Youth violence can include fighting, bullying, threats with weapons, and gang-related violence. A young person can be involved with youth violence as a victim, offender, or witness. (<xref rid="R8" ref-type="bibr">David-Ferdon et al., 2016</xref>)</p></disp-quote><p id="P4">In 2019, homicide in the United States was the fourth leading cause of death among youth aged 10&#x02013;14 years and the third leading cause of death among youth aged 15&#x02013;24 years (<xref rid="R5" ref-type="bibr">Centers for Disease Control and Prevention [CDC], 2021</xref>). It has also been reported that youth present to the emergency department (ED) for violence-related injuries daily and will possibly experience assault-related re-injuries or perpetrate violence against others within a few years of the first time they present to the ED for treatment (<xref rid="R6" ref-type="bibr">Cunningham et al., 2015</xref>).</p><p id="P5">Since 2014, the American College of Surgeons has required that all Level I, II, and III trauma centers implement injury prevention strategies (<xref rid="R9" ref-type="bibr">Drake et al., 2020</xref>). Though not a traditional aim of conventional hospital models, violence prevention is aligned with the common mission to improve the health of individuals and communities. Thus, an increasing number of hospitals have implemented violence prevention programs for individuals presenting to the ED with violence-related injuries and have partnered with community organizations to provide critical services for individuals who are at higher risk for recidivism and re-injury after discharge (<xref rid="R1" ref-type="bibr">Aboutanos et al., 2011</xref>; <xref rid="R11" ref-type="bibr">Health Research and Educational Trust [HRET], 2015</xref>). In an example of the successful implementation of a hospital-based youth violence prevention (HBYVP) program, a retrospective case&#x02013;control study found that youth who participated in a hospital-based peer intervention program following a hospitalization due to violent injuries were 70% less likely to be arrested for any offense 6 months post-injury when compared with youth in the control group (<xref rid="R3" ref-type="bibr">Becker et al., 2004</xref>).</p><p id="P6">These HBYVP programs typically conduct needs assessments to identify gaps in available resources, use motivational interviewing techniques to help modify risky behaviors, conduct training to develop youth skills and risk awareness, and facilitate the connection of youth to community-based prevention services, resulting in reduced violence-related injuries and decreased future involvement in the criminal justice system (<xref rid="R3" ref-type="bibr">Becker et al., 2004</xref>; <xref rid="R6" ref-type="bibr">Cunningham et al., 2015</xref>). While several violence prevention programs have been implemented in various cities around the United States, most are not widely known and/or lack the resources that are necessary to conduct rigorous evaluation (<xref rid="R7" ref-type="bibr">Cunningham et al., 2012</xref>; <xref rid="R8" ref-type="bibr">David-Ferdon et al., 2016</xref>).</p><p id="P7">The Systematic Screening and Assessment (SSA) Method identifies innovations that real-world practitioners have developed and then systematically assesses which innovations will offer the greatest payoff from further evaluation, that is, helping to identify innovations most likely to be effective and increasing the potential for beneficial outcome evaluation (<xref rid="R12" ref-type="bibr">Leviton &#x00026; Gutman, 2010</xref>). Correspondingly, evaluability assessment (EA), also known as exploratory evaluation, is used to help determine the extent to which an activity or project can be evaluated in a reliable and credible fashion, resulting in useful information (<xref rid="R15" ref-type="bibr">Organisation for Economic Co-operation and Development-DAC [OECD-DAC], 2010</xref>). This project aimed to apply the SSA methodology to identify existing HBYVP programs and describe the results of a review that used EA to assess existing HBYVP programs&#x02019; readiness for future rigorous evaluation.</p><sec id="S1"><title>METHODS AND RESULTS</title><p id="P8">Guided by <xref rid="R12" ref-type="bibr">Leviton and Gutman (2010)</xref>, this project systematically sequences SSA methodology, EA activities, and a review panel consisting of violence prevention experts to help identify the programs that have the most potential to reduce youth violence, as well as the capacity to document program activities and evaluate program outcomes. While EAs provide useful information about the feasibility and scope of an evaluation, they are just one of six important steps in the SSA methodology. <xref rid="F1" ref-type="fig">Figure 1</xref> summarizes the six-step methodology that guided this project. Additional details related to each step of this project are outlined below.</p><sec id="S2"><title>Step 1: Establish Priorities and Program Inclusion and Exclusion Criteria</title><p id="P9">Programs selected for review were divided into two priority groups (i.e., &#x0201c;first priority&#x0201d; or &#x0201c;second priority&#x0201d;) based on the degree to which program goals and characteristics aligned with project focus and the potential to withstand rigorous evaluation. Further details related to priority groups and inclusion/exclusion criteria are outlined below.</p><p id="P10">Programs classified as first priority for inclusion in the review met the following criteria:</p><list list-type="bullet" id="L1"><list-item><p id="P11">Primarily focused on reducing/preventing:
<list list-type="bullet" id="L2"><list-item><p id="P12">Youth violence-related re-injury</p></list-item><list-item><p id="P13">Youth violence-related morbidity and mortality rates</p></list-item><list-item><p id="P14">Youth involvement in criminal justice system</p></list-item></list>
</p></list-item><list-item><p id="P15">Initial contact with youth, ages 10&#x02013;24, occurs in the hospital</p></list-item><list-item><p id="P16">Program fully implemented for at least 1 year</p></list-item><list-item><p id="P17">Monitored and measured youth violence prevention outcomes</p></list-item><list-item><p id="P18">Included community-based partnerships or wraparound services to prevent re-injury and negative outcomes of violence</p></list-item><list-item><p id="P19">Addressed disparities, the social determinants of health, and other circumstances that may contribute to violence</p></list-item><list-item><p id="P20">Documented that services originated in a hospital setting</p></list-item></list><p id="P21">Programs classified as second priority for inclusion in the review met the following criteria:</p><list list-type="bullet" id="L3"><list-item><p id="P22">Prevention program primarily focused on an area that is related to, but not directly aligned with youth violence-related injury (i.e., sexual health and teen dating violence)</p></list-item><list-item><p id="P23">Reported little to no violence prevention-related data collection activities</p></list-item><list-item><p id="P24">Focused on coordinated nurse response rather than prevention outcomes</p></list-item><list-item><p id="P25">Documented potentially limited capacity to partner on a rigorous evaluation (e.g., small staff and data extraction challenges)</p></list-item></list><p id="P26">Programs excluded from review met the following criteria:</p><list list-type="bullet" id="L4"><list-item><p id="P27">Identified as a community-based program (e.g., services originated in the community)</p></list-item><list-item><p id="P28">Services originated outside of the hospital but delivered in the hospital (e.g., referral programs from community partners)</p></list-item><list-item><p id="P29">Program implemented for less than 12 months</p></list-item><list-item><p id="P30">Program no longer exists</p></list-item><list-item><p id="P31">Program currently or previously involved in an outcome evaluation</p></list-item></list></sec><sec id="S3"><title>Step 2: Identify Programs for Further Consideration</title><p id="P32">To identify HBYVP programs for inclusion in the review, we conducted a search of the 2017 American Hospital Association (AHA) Annual Survey of Hospitals to identify potential HBYVP programs. The AHA Annual Survey of Hospitals database represents information that is directly provided by hospitals and health care systems and covers an array of data points, including demographics, operations, service lines, staffing, expenses, physician organization structures, beds, utilization, and population health (<xref rid="R2" ref-type="bibr">American Hospital Association [AHA], 2017</xref>). A research analyst from the project team extracted and organized program details for all programs. Additional information for each program was gathered via environmental scan and other EA activities (i.e., interviews with key program staff).</p><p id="P33">There were 6,282 hospital organizations (hospitals or hospital systems) in the 2017 AHA database. Of the 6,282 hospital organizations included in the AHA database, 1,200 hospitals included a Level I, II, or III trauma center. Nine hundred of the 1,200 hospital organizations with a trauma center reported having an affiliated violence prevention program operating in or around the hospital. Thus, the universe of programs under consideration was comprised of 900 hundred violence prevention programs.</p><p id="P34">Of the 900 violence prevention programs reported to be affiliated with a Level I-III trauma center, there were 465 violence prevention programs that reported specifically operating within a Level I-III trauma center. Of the 465 programs operating within a Level I-III trauma center, 82 were duplicates and excluded from review. The remaining 383 programs were reviewed and classified into priority groups as outlined by the SSA methodology.</p><p id="P35">From this review of the 383 non-duplicated programs operating within a Level I-III trauma center, 105 programs were selected for inclusion in the review. Of the 105 programs included in the review, 57 programs were classified as first priority for inclusion in review as they focused on youth and child violence and matched the inclusion criteria (i.e., implementation for at least 1 year, monitoring and measurement of youth violence prevention outcomes, and inclusion of community-based partnerships or wraparound services). Forty-eight additional programs were classified as second priority for inclusion in the review as they did not have a primary focus on violence prevention, reported little to no violence prevention-related data collection, and focused on a coordinated nurse response rather than prevention. Additional information was not available for the remaining 278 programs that were excluded.</p><p id="P36">The team was able to reach 31 of the 105 first and second-priority programs identified via email or phone to verify and collect additional information. Programs that were not reachable tended to have outdated contact information listed and/or were no longer in operation. After initial outreach activities, 15 programs were confirmed to have met the inclusion criteria and were recommended to be included in the panel review. The other 16 programs were not recommended as they were either community-based programs with services that did not originate in the hospital, were implemented for less than 12 months, were currently involved in an outcome evaluation, and/or are no longer in existence. <xref rid="F2" ref-type="fig">Figure 2</xref> outlines the screening process used to identify HBYVP programs included in the review.</p></sec><sec id="S4"><title>Step 3: Convene Initial Review Panel and Ratings Meeting</title><p id="P37">In preparation for the review panel, the team populated a program description form using information collected during the environmental scan. The program description form included details about the program&#x02019;s location, delivery setting, number of youths served, priority areas, implementation duration, violence prevention data, target population, a detailed description of the program, type of data collected, and data collection method. Additional information included on the program description forms was gathered from discussions conducted with hospital staff at each of the 15 programs. Discussions with hospital staff were guided by formative questions used to help confirm that the programs were considered active and were providing services to the target population at the time of the review. Additional questions were used to help identify additional and knowledgeable point(s) of contact for each program, guiding theories/theory of change used by the program, and other important background information related to program development and implementation. The program description forms served as the primary reference materials for the reviewers during the initial review panel.</p><p id="P38">To reduce the likelihood of bias during the review process, team analysts ensured that all programs were de-identified, assigned a color pseudonym, and all potential identifiable information was removed from the forms shared with reviewers (e.g., program city, name of affiliated programs, and partnerships). This study received an exemption from the Office of Management and Budget responsible for reviewing federal regulations and handling proposals and communications under the Paperwork Reduction Act of 1980.</p><p id="P39">Two review panels were conducted with 12 violence prevention experts experienced in various aspects of HBYVP (e.g., program administrators, designers, facilitators, and researchers) to identify characteristics of programs suitable for an EA. Reviewers assessed and provided feedback for each program described in the program description forms using an online platform known as SurveyMonkey Apply (SurveyMonkey Inc., San Mateo, California). Team analysts also recorded meeting minutes during the review panels. The scoring guide used to rank the programs included six categories (i.e., program design, potential impact, evaluation capacity, sustainability, replicability, and transferability). Reviewers were provided with the scoring criteria/ description for each category and asked to rank the program using a point system. Cumulative scores were used to rank the programs. Thematic analysis of the panel discussion notes was used to identify common themes that were shared during the panel session. These themes were reviewed by the group of panelists as a part of the member-checking process. The panelist suggested the following summary of characteristics as those to consider when selecting programs for an evaluability assessment (EA).</p><sec id="S5"><title>Characteristics for Strong Consideration for an EA.</title><p id="P40">Programs that received the most favorable assessments of their characteristics were those that shared the following:</p><list list-type="bullet" id="L5"><list-item><p id="P41">Components that address social determinants of health and included community wraparound services</p></list-item><list-item><p id="P42">Adequate number of trained staff to implement the program, maintain the caseload, and provide an opportunity for scaling the program</p></list-item><list-item><p id="P43">Aims and goals that align with the established goals of the CDC initiative to reduce youth violence-related mortality, morbidity, and recidivism</p></list-item><list-item><p id="P44">Focus population within the desired age range (10&#x02013;24 years)</p></list-item><list-item><p id="P45">Strong capacity for or demonstrated a history of evaluation</p></list-item><list-item><p id="P46">Systematic tracking of violence-related outcomes (e.g., rates of re-injury, community referrals, and service utilization)</p></list-item></list></sec><sec id="S6"><title>Characteristics for Limited Consideration for an EA.</title><p id="P47">Programs that were not suggested for consideration for evaluation were those that shared the following:</p><list list-type="bullet" id="L6"><list-item><p id="P48">Poorly described details about the intervention components</p></list-item><list-item><p id="P49">Misaligned focus areas (i.e., not aimed at reducing youth violence-related morbidity and mortality rates)</p></list-item><list-item><p id="P50">Focus population outside the ages of 10&#x02013;24 years of age</p></list-item><list-item><p id="P51">Lacking outcome data related to re-injury and recidivism or capacity to monitor data related to re-injury and recidivism</p></list-item><list-item><p id="P52">Limited or no follow-up or tracking of participant outcomes</p></list-item></list><p id="P53">Of the fifteen programs considered by the review panel, two programs were not highly ranked by any of the panel members; three programs did not capture youth violence prevention outcomes (e.g., rates of re-injury, community referrals, and service utilization) and did not have the organizational capacity to sustain an evaluability assessment; and one program was not selected due to its inclusion in an existing evaluation project, resulting in six programs not selected for EA. The remaining nine programs were selected for inclusion in the EA.</p></sec></sec><sec id="S7"><title>Step 4: Conduct Evaluability Assessments (EAs)</title><p id="P54">EA is a pre-evaluation activity designed to maximize the chances that any subsequent evaluation of programs, practices, or policies will result in useful information (<xref rid="R13" ref-type="bibr">Leviton et al., 2010</xref>). Nine programs were contacted to conduct an EA site visit. One program did not respond to numerous outreach efforts, resulting in eight programs included in the EA process. While EAs are traditionally conducted in-person, travel restrictions and safety concerns related to the COVID-19 pandemic resulted in a minor limitation, causing the project team to adjust the original approach and develop a virtual alternative. As a result, the EA site visits for this project were conducted virtually using the Zoom web conferencing platform (<xref rid="R10" ref-type="bibr">Gray et al., 2020</xref>).</p><p id="P55">Prior to each virtual visit, program materials were reviewed by the site visit team, followed by virtual site visit interviews, and virtual site visit debrief. During each virtual visit, at least two members of the project team assessed program implementation, data collection, and outcomes. All virtual EA site visit sessions lasted approximately 1 hour. Semi-structured guides were developed by the project team during study development and the guides focused on program design, potential impact, evaluation potential, sustainability, replicability, and transferability. Questions for the key informant interview guides were grouped into the following categories and sub-categories: (a) History (Program Description, Implementation Feasibility, Replicability/Transferability); (b) HBYVP Program (Program Design, Potential Impact, Implementation Feasibility, Sustainability, Replicability/Transferability); (c) Community Support and Partnerships (Reach to Target Population, Implementation Feasibility, Sustainability, Replicability/Transferability, Staff/Organizational Capacity); (d) Evaluation (Potential Impact, Evaluation Capacity, Options for Further Evaluation, Staff/Organizational Capacity); (e) Funding (Implementation Feasibility, Sustainability, Replicability/Transferability). EA project team members interviewed key program staff, including lead program administrators, partners, program staff, evaluators, and other stakeholders.</p><p id="P56">During each virtual EA site visit, two EA project team members took detailed notes of the program&#x02019;s responses. Thematic analysis was used to group the notes into major categories and themes. Team members compared the content of their notes and themes identified. The notes were synthesized into a report identifying any areas of ambiguity. This summary report was shared with the programs after each virtual site visit as a part of the collaborative member-checking process. A preliminary logic model (based on the formative data from environmental scan activities) was drafted prior to each virtual EA site visit to describe the program&#x02019;s resources, activities, and outputs. This logic model was reviewed and further refined with the program during the virtual EA site visit and debrief. EA team members then drafted a summary report, describing the program&#x02019;s approach, major elements as planned and implemented, observations and findings from the team members, and the program&#x02019;s evaluation potential, impact, and recommendations. The report also included the detailed updated logic model of the program and an appendix of reviewed documents. Programs were instructed to review the notes and logic model and provide clarifying details to the EA team within 2 weeks following the virtual interview.</p><p id="P57">After the summary report was verified by the program staff, EA project team members were responsible for de-identifying all program documents including any available annual reports and classifying information for each program using a color-coding system to classify programs before they were shared with the panel reviewers. This de-identified information, along with the initial program information, was made available to the review panel for a second review during Step 5.</p><p id="P58">Characteristics of violence prevention programs ready for a rigorous evaluation included a program champion, a strong connection to the community, warm referrals/handoffs to community resources, robust or tailored case management, and some experience with violence prevention, either through training or past lived experience. If programs were built on a violence prevention theory, it was either the Cure Violence model (<xref rid="R4" ref-type="bibr">Butts et al., 2015</xref>), SNUG (guns spelled backward and an adaptation from Cure Violence), or some other strengths-based prevention model designed to highlight and build on existing resources and strengths of the community and individuals served. All programs had some qualitative or quantitative data that could be used or optimized for future evaluation efforts. Overall, each of the programs&#x02019; features was tailored to their respective city or county depending on the community or hospital needs.</p></sec><sec id="S8"><title>Step 5: Reconvene Panel to Review Programs and Rate EA Findings</title><p id="P59">Nine of the original panelists returned to review the EA findings for the eight programs that participated in the EA and provided feedback using SurveyMonkey Apply during a second virtual panel review meeting. In preparation for the virtual panel review, panelists reviewed program materials and resources (i.e., program brochures and overview documents, intake forms, data collection forms, available annual reports, and other program documents as available). After reviewing the information, panelists were asked to score the programs using the scoring guide and criteria used in the initial review (i.e., program design, potential impact, evaluation capacity, sustainability, replicability, and transferability). Like the initial review panel meeting outlined in Step 3, panelists highlighted shared characteristics of the programs that should be considered for further review and potential selection as a program with potential to be recommended for rigorous evaluation. Panelists&#x02019; scores and feedback from the initial and second panel reviews were extracted, compiled for thematic analysis, and shared with panelists as a part of the member-checking process during the panel session.</p><p id="P60">Across the eight programs, major themes and concepts discussed included the target population (the total number of clients served within the target population [youth 10&#x02013;24 years old]); program goals (the program&#x02019;s alignment with priority areas and outcomes); data infrastructure (the program&#x02019;s capacity to collect and measure youth prevention); sustainability (the funding mechanisms for longevity of the program); and transferability/replicability (the strategies that could or could not be duplicated in different and similar settings).</p><p id="P61">Major areas for discussion included the program&#x02019;s primary goals, data accessibility, service availability, and program duration. Some of the programs&#x02019; priorities were not within the scope of this project&#x02019;s specific aims. For example, two programs did not offer a holistic approach to violence prevention (e.g., referral and access to community services, complementary and alternative therapies, and outpatient clinical services). Instead, only one aspect of a promising violence prevention program, such as mental health, was highlighted. Strengths of the programs included access to multiple data sources, such as the hospital&#x02019;s trauma registry and/or electronic health records, and the ability to share data among these systems. Although three programs had access to these data, the ability to merge data across systems did not appear feasible without major modifications or resource investments. Seven programs implemented a client-centered approach and provided wraparound services to their clients. However, the client&#x02019;s duration from enrollment to completion of the program was not clear or indefinite for all programs.</p></sec><sec id="S9"><title>Step 6: Disseminate Information and Identify Programs for Future Rigorous Evaluation</title><p id="P62">Findings from the SSA methodology, including the EA site visits and two virtual panel reviews, identified three hospital-based programs ready for a more rigorous outcome evaluation. The remaining four programs revealed there were factors that indicated a strong readiness for evaluation, even if the program was not yet prepared. The remaining hospital-based program did not include factors that made them ready for an evaluation. These factors included:</p><list list-type="bullet" id="L7"><list-item><p id="P63">Commitment to evidence-based violence prevention</p></list-item><list-item><p id="P64">Strong champions of the program within the hospital and community</p></list-item><list-item><p id="P65">Strong data infrastructure that can be replicated and shared across data systems</p></list-item><list-item><p id="P66">Intensive case management and mentorship that addresses risk factors</p></list-item><list-item><p id="P67">Well-developed client-centered treatment plans</p></list-item><list-item><p id="P68">Various sources of local and state funding</p></list-item><list-item><p id="P69">Adequately staffed, including a commitment to staff&#x02019;s professional development</p></list-item><list-item><p id="P70">Access to and/or collecting community-level data</p></list-item><list-item><p id="P71">Tracking of violent re-injury and re-arrests</p></list-item><list-item><p id="P72">Inclusion of preliminary data outcomes in reducing recidivism</p></list-item><list-item><p id="P73">Efforts to address the target population that match current project priorities</p></list-item><list-item><p id="P74">Broad selection of wraparound services</p></list-item><list-item><p id="P75">Infrastructure and components are based on an existing model</p></list-item></list><p id="P76">Factors associated with limited readiness during the virtual panel review included:</p><list list-type="bullet" id="L8"><list-item><p id="P77">Lack of standards for selecting and training essential program staff</p></list-item><list-item><p id="P78">Unimplemented program components and aspirational outcomes</p></list-item><list-item><p id="P79">Lack of outcome data and the necessary evaluation staff</p></list-item><list-item><p id="P80">Underdeveloped data infrastructure leading to inaccessible data and/or limited data sharing</p></list-item><list-item><p id="P81">Program mission was not clearly in alignment with priority focus areas and aims of reducing youth violence-related injury, involvement in criminal justice system, and morbidity and mortality rates</p></list-item><list-item><p id="P82">Unclear number of clients served within the target population (youth ages 10&#x02013;24 years old)</p></list-item><list-item><p id="P83">No access to baseline data or criminal justice data</p></list-item><list-item><p id="P84">Highly individualized plan for participants causing a decrease in program replicability and transferability</p></list-item></list></sec></sec><sec id="S10"><title>DISCUSSION</title><p id="P85">Key activities and desired outcomes varied across the identified violence prevention programs. Overall, panelists believed that programs with strong evaluation potential could be replicated, sustained financially, and presented clear goals and outcomes for their programs. They placed a high priority on data collection and management, including long-term outcomes. They also displayed strong partnerships within the hospital and community. Those programs also focused on clients&#x02019; needs, providing access to a wide selection of services within the hospital and community. They include credible messengers and/or dedicated staff that have trained experience, and/or have lived experience with violence. These programs often build on existing HBYVP program models and make improvements, including the use of violence prevention theories and addressing social determinants of health (e.g., transportation, housing, and food insecurity).</p><p id="P86">The use of existing databases, environmental scans, review panels, and in-depth interviews with program staff resulted in a set of criteria that can be used to determine future investments in HBYVP programs and activities. Panelists recognized that some programs not yet ready for rigorous evaluation could benefit from partnering with researchers to bolster their data capacity. The SSA methodology combined with the use of subject matter experts to objectively review and provide insights about related programs is a valuable approach that can be used to help outline, dissect, and compare complex programs in practice and further facilitate the uptake of evidence-based practice and research into regular use by other practitioners, researchers, and policymakers. In addition, using the implementation science as a guide, this methodology has the potential to not only inform the field of hospital-based violence prevention but also to improve the public health response to reducing violence-related injuries and decreasing future involvement of youth in the criminal justice system overall.</p></sec><sec id="S11"><title>LESSONS LEARNED</title><p id="P87">To ensure that outreach to HBYVP programs is successful, it is important to provide informative SSA project materials at the initial point of contact. Providing the program points of contact with a project factsheet that used accessible and culturally appropriate terminology to outline information about the problem, project goals, proposed activities, and altruistic benefit to the community was a successful strategy for recruiting promising HBYVP programs for this project.</p><p id="P88">Additionally, successful recruitment of blinded subject matter experts for the project review panels was attributed to (a) the use of SurveyMonkey Apply to streamline panelist review and rating of the materials, (b) the use of an experienced panel facilitator with knowledge of program evaluation, and (c) the accommodation of panelist schedules. Panel members agreed to participate in both the initial and final panel review to ensure consistency in review. This strategy helped ensure the reliability of both review panels. Due to the time between panel review sessions, there was some attrition due to organizational changes.</p><sec id="S12"><title>Implications for Practice</title><p id="P89">The capacity for future rigorous evaluation should be a significant consideration during program planning and design. Findings of this review indicate that there is a need to educate hospital-based program designers on the importance of including multi-level evaluation (i.e., formative, process, and outcome) at every stage of program design, from program planning to program implementation and dissemination of findings. The CDC&#x02019;s <italic toggle="yes">Framework for Program Evaluation in Public Health</italic> states that program evaluation should be &#x0201c;practical, ongoing. . .and involve all program stakeholders, not just evaluation experts&#x0201d; (<xref rid="R14" ref-type="bibr">Milstein &#x00026; Wetterhall, 1999</xref>). Programs that do not incorporate these elements, including the creation of evaluation plans at the beginning of program development, risk implementation of programs that do not have the infrastructure necessary to identify areas for improvement, or ability to demonstrate a return on investment.</p></sec></sec><sec id="S13"><title>CONCLUSION</title><p id="P90">The purpose of this project was to use the SSA methodology to identify innovative HBYVP programs focusing on youth ages 10&#x02013;24 that were evaluable, scalable, and had the potential to improve youth health outcomes. Given that HBYVP programs exist in health care settings and few programs have been rigorously evaluated (<xref rid="R7" ref-type="bibr">Cunningham et al., 2012</xref>; <xref rid="R8" ref-type="bibr">David-Ferdon et al., 2016</xref>), the results of this review can be used to identify characteristics of the most promising HBYVP programs, as well as the performance measures and indicators necessary for the completion of rigorous evaluation to support continual program improvement and long-term sustainability.</p></sec></body><back><ack id="S14"><p id="P91">Theresa Armstead and Arielle Arzu who provided project guidance and technical support. Program staff from the 15 hospitals that provided their time to help the project team understand their violence prevention program. American Hospital Association for their provision to access their hospital survey database. Panel members who reviewed the programs and provided critical feedback. Study was funded by the National Center for Injury Prevention and Control, US Centers for Disease Control and Prevention (# GS35F393AA and task order #75D30119F06945).</p><sec id="S15"><title>Disclaimer</title><p id="P92">The findings and conclusions in this report are those of the authors and do not necessarily represent the official position of the Centers for Disease Control and Prevention.</p></sec></ack><ref-list><title>REFERENCES</title><ref id="R1"><mixed-citation publication-type="journal"><name><surname>Aboutanos</surname><given-names>MB</given-names></name>, <name><surname>Jordan</surname><given-names>A</given-names></name>, <name><surname>Cohen</surname><given-names>R</given-names></name>, <name><surname>Foster</surname><given-names>RL</given-names></name>, <name><surname>Goodman</surname><given-names>K</given-names></name>, <name><surname>Halfond</surname><given-names>RW</given-names></name>, <name><surname>Poindexter</surname><given-names>R</given-names></name>, <name><surname>Charles</surname><given-names>R</given-names></name>, <name><surname>Smith</surname><given-names>SC</given-names></name>, <name><surname>Wolfe</surname><given-names>LG</given-names></name>, <name><surname>Hogue</surname><given-names>B</given-names></name>, &#x00026; <name><surname>Ivatury</surname><given-names>RR</given-names></name> (<year>2011</year>). <article-title>Brief violence interventions with community case management services are effective for high-risk trauma patients</article-title>. <source>Journal of Trauma and Acute Care Surgery</source>, <volume>71</volume>(<issue>1</issue>), <fpage>228</fpage>&#x02013;<lpage>237</lpage>.</mixed-citation></ref><ref id="R2"><mixed-citation publication-type="webpage"><collab>American Hospital Association</collab>. (<year>2017</year>). <source>AHA hospital statistics</source>, <edition>2017 edition</edition>. <comment><ext-link xlink:href="https://www.aha.org/2016-12-27-aha-hospital-statistics-2017-edition" ext-link-type="uri">https://www.aha.org/2016-12-27-aha-hospital-statistics-2017-edition</ext-link></comment></mixed-citation></ref><ref id="R3"><mixed-citation publication-type="journal"><name><surname>Becker</surname><given-names>MG</given-names></name>, <name><surname>Hall</surname><given-names>JS</given-names></name>, <name><surname>Ursic</surname><given-names>CM</given-names></name>, <name><surname>Jain</surname><given-names>S</given-names></name>, &#x00026; <name><surname>Calhoun</surname><given-names>D</given-names></name> (<year>2004</year>). <article-title>Caught in the crossfire: The effects of a peer-based intervention program for violently injured youth</article-title>. <source>Journal of Adolescent Health</source>, <volume>34</volume>(<issue>3</issue>), <fpage>177</fpage>&#x02013;<lpage>183</lpage>.</mixed-citation></ref><ref id="R4"><mixed-citation publication-type="journal"><name><surname>Butts</surname><given-names>JA</given-names></name>, <name><surname>Roman</surname><given-names>CG</given-names></name>, <name><surname>Bostwick</surname><given-names>L</given-names></name>, &#x00026; <name><surname>Porter</surname><given-names>JR</given-names></name> (<year>2015</year>). <article-title>Cure violence: A public health model to reduce gun violence</article-title>. <source>Annual Review of Public Health</source>, <volume>36</volume>, <fpage>39</fpage>&#x02013;<lpage>53</lpage>.</mixed-citation></ref><ref id="R5"><mixed-citation publication-type="webpage"><collab>Centers for Disease Control and Prevention, National Center for Injury Prevention and Control</collab>. (<year>2021</year>). <source>Web-based Injury Statistics Query and Reporting System (WISQARS)</source>
<comment>(online 20179)</comment>. <comment><ext-link xlink:href="https://www.cdc.gov/injury/wisqars/index.html/" ext-link-type="uri">https://www.cdc.gov/injury/wisqars/index.html/</ext-link></comment></mixed-citation></ref><ref id="R6"><mixed-citation publication-type="journal"><name><surname>Cunningham</surname><given-names>RM</given-names></name>, <name><surname>Carter</surname><given-names>PM</given-names></name>, <name><surname>Ranney</surname><given-names>M</given-names></name>, <name><surname>Zimmerman</surname><given-names>MA</given-names></name>, <name><surname>Blow</surname><given-names>FC</given-names></name>, <name><surname>Booth</surname><given-names>BM</given-names></name>, &#x00026; <name><surname>Walton</surname><given-names>MA</given-names></name> (<year>2015</year>). <article-title>Violent reinjury and mortality among youth seeking emergency department care for assault-related injury: A 2-year prospective cohort study</article-title>. <source>JAMA Pediatrics</source>, <volume>169</volume>(<issue>1</issue>), <fpage>63</fpage>&#x02013;<lpage>70</lpage>.<pub-id pub-id-type="pmid">25365147</pub-id>
</mixed-citation></ref><ref id="R7"><mixed-citation publication-type="journal"><name><surname>Cunningham</surname><given-names>RM</given-names></name>, <name><surname>Chermack</surname><given-names>ST</given-names></name>, <name><surname>Zimmerman</surname><given-names>MA</given-names></name>, <name><surname>Shope</surname><given-names>JT</given-names></name>, <name><surname>Bingham</surname><given-names>CR</given-names></name>, <name><surname>Blow</surname><given-names>FC</given-names></name>, &#x00026; <name><surname>Walton</surname><given-names>MA</given-names></name> (<year>2012</year>). <article-title>Brief motivational interviewing intervention for peer violence and alcohol use in teens: One-year follow-up</article-title>. <source>Pediatrics</source>, <volume>129</volume>(<issue>6</issue>), <fpage>1083</fpage>&#x02013;<lpage>1090</lpage>.<pub-id pub-id-type="pmid">22614776</pub-id>
</mixed-citation></ref><ref id="R8"><mixed-citation publication-type="book"><name><surname>David-Ferdon</surname><given-names>C</given-names></name>, <name><surname>Vivolo-Kantor</surname><given-names>AM</given-names></name>, <name><surname>Dahlberg</surname><given-names>LL</given-names></name>, <name><surname>Marshall</surname><given-names>KJ</given-names></name>, <name><surname>Rainford</surname><given-names>N</given-names></name>, &#x00026; <name><surname>Hall</surname><given-names>JE</given-names></name> (<year>2016</year>). <part-title>A comprehensive technical package for the prevention of youth violence and associated risk behaviors</part-title>. <source>National Center for Injury Prevention and Control, Centers for Disease Control and Prevention</source>.</mixed-citation></ref><ref id="R9"><mixed-citation publication-type="journal"><name><surname>Drake</surname><given-names>SA</given-names></name>, <name><surname>Holcomb</surname><given-names>JB</given-names></name>, <name><surname>Yang</surname><given-names>Y</given-names></name>, <name><surname>Thetford</surname><given-names>C</given-names></name>, <name><surname>Myers</surname><given-names>L</given-names></name>, <name><surname>Brock</surname><given-names>M</given-names></name>, &#x00026; <name><surname>Kao</surname><given-names>L</given-names></name> (<year>2020</year>). <article-title>Establishing a regional trauma preventable/potentially preventable death rate</article-title>. <source>Annals of Surgery</source>, <volume>271</volume>(<issue>2</issue>), <fpage>375</fpage>&#x02013;<lpage>382</lpage>.<pub-id pub-id-type="pmid">30067544</pub-id>
</mixed-citation></ref><ref id="R10"><mixed-citation publication-type="journal"><name><surname>Gray</surname><given-names>LM</given-names></name>, <name><surname>Wong-Wylie</surname><given-names>G</given-names></name>, <name><surname>Rempel</surname><given-names>GR</given-names></name>, &#x00026; <name><surname>Cook</surname><given-names>K</given-names></name> (<year>2020</year>). <article-title>Expanding qualitative research interviewing strategies: Zoom video communications</article-title>. <source>The Qualitative Report</source>, <volume>25</volume>(<issue>5</issue>), <fpage>1292</fpage>&#x02013;<lpage>1301</lpage>.</mixed-citation></ref><ref id="R11"><mixed-citation publication-type="webpage"><collab>Health Research and Educational Trust</collab>. (<year>2015</year>). <article-title>Hospital approaches to interrupt the cycle of violence</article-title>. <source>Health Research &#x00026; Educational Trust</source>. <comment><ext-link xlink:href="https://www.hpoe.org/" ext-link-type="uri">https://www.hpoe.org/</ext-link></comment></mixed-citation></ref><ref id="R12"><mixed-citation publication-type="book"><name><surname>Leviton</surname><given-names>LC</given-names></name>, &#x00026; <name><surname>Gutman</surname><given-names>M</given-names></name> (<year>2010</year>). <part-title>Overview and rationale for the Systematic Screening and Assessment Method</part-title>. In <name><surname>Leviton</surname><given-names>LC</given-names></name>, <name><surname>Khan</surname><given-names>L</given-names></name>, &#x00026; <name><surname>Dawkins</surname><given-names>N</given-names></name> (Eds.), <source>The systematic screening and assessment method: Finding innovations worth evaluating. New directions for evaluation</source> (vol. <volume>125</volume>, pp. <fpage>7</fpage>&#x02013;<lpage>31</lpage>). <publisher-name>American Evaluation Association</publisher-name>.</mixed-citation></ref><ref id="R13"><mixed-citation publication-type="journal"><name><surname>Leviton</surname><given-names>LC</given-names></name>, <name><surname>Khan</surname><given-names>LK</given-names></name>, <name><surname>Rog</surname><given-names>D</given-names></name>, <name><surname>Dawkins</surname><given-names>N</given-names></name>, &#x00026; <name><surname>Cotton</surname><given-names>D</given-names></name> (<year>2010</year>). <article-title>Evaluability assessment to improve public health policies, programs, and practices</article-title>. <source>Annual Review of Public Health</source>, <volume>31</volume>(<issue>1</issue>), <fpage>213</fpage>&#x02013;<lpage>233</lpage>.</mixed-citation></ref><ref id="R14"><mixed-citation publication-type="book"><name><surname>Milstein</surname><given-names>B</given-names></name>, &#x00026; <name><surname>Wetterhall</surname><given-names>SF</given-names></name> (<year>1999</year>). <source>Framework for program evaluation in public health</source>. <publisher-name>U.S. Department of Health &#x00026; Human Services, Centers for Disease Control and Prevention</publisher-name>.</mixed-citation></ref><ref id="R15"><mixed-citation publication-type="journal"><collab>Organisation for Economic Co-operation and Development-DAC</collab>. (<year>2010</year>). <source>Glossary of key terms in evaluation and results-based management</source>.</mixed-citation></ref></ref-list></back><floats-group><fig position="float" id="F1"><label>FIGURE 1</label><caption><p id="P93">Overview of the Systematic Screening and Assessment Approach (Adapted From <xref rid="R12" ref-type="bibr">Leviton &#x00026; Gutman, 2010</xref>)</p></caption><graphic xlink:href="nihms-1999523-f0001" position="float"/></fig><fig position="float" id="F2"><label>FIGURE 2</label><caption><p id="P94">Identifying Programs for Further Consideration: Hospital-Based Youth Violence Prevention Program SSA Environmental Scanning Process</p></caption><graphic xlink:href="nihms-1999523-f0002" position="float"/></fig></floats-group></article>