{"response":{"docs":[{"id":"dlg_ggpd_y-ga-bb400-b-pa15-b2018-belec-p-btext","title":"Quality management annual report, FY 2018","collection_id":"dlg_ggpd","collection_title":"Georgia Government Publications","dcterms_contributor":["Georgia. Department of Behavioral Health and Developmental Disabilities"],"dcterms_spatial":["United States, Georgia, 32.75042, -83.50018"],"dcterms_creator":["Georgia Collaborative ASO"],"dc_date":["2018"],"dcterms_description":["Began with: FY 2016.","FY 2016 (harvested on May 6, 2019 from dbhdd.georgia.gov); title from PDF cover (Georgia Government Publications database, viewed March 18, 2020).","FY 2018 (harvested on May 29, 2019 from dbhdd.georgia.gov) (Georgia Government Publications database, viewed March 18, 2020)."],"dc_format":["application/pdf"],"dcterms_identifier":null,"dcterms_language":["eng"],"dcterms_publisher":["Atlanta, Ga. : Georgia. Department of Behavioral Health and Developmental Disabilities"],"dc_relation":null,"dc_right":["http://rightsstatements.org/vocab/InC/1.0/"],"dcterms_is_part_of":null,"dcterms_subject":["Georgia Collaborative ASO--Evaluation--Periodicals","Mental health services--Georgia--Evaluation--Periodicals","Developmentally disabled--Services for--Georgia--Evaluation--Periodicals","Developmentally disabled--Services for--Evaluation","Evaluation","Mental health services--Evaluation","Georgia","Annual reports","Periodicals"],"dcterms_title":["Quality management annual report, FY 2018"],"dcterms_type":["Text"],"dcterms_provenance":["University of Georgia. Map and Government Information Library"],"edm_is_shown_by":["https://dlg.galileo.usg.edu/do:dlg_ggpd_y-ga-bb400-b-pa15-b2018-belec-p-btext"],"edm_is_shown_at":["https://dlg.galileo.usg.edu/id:dlg_ggpd_y-ga-bb400-b-pa15-b2018-belec-p-btext"],"dcterms_temporal":null,"dcterms_rights_holder":null,"dcterms_bibliographic_citation":null,"dlg_local_right":null,"dcterms_medium":["reports"],"dcterms_extent":null,"dlg_subject_personal":null,"iiif_manifest_url_ss":null,"dcterms_subject_fast":null,"fulltext":"FY 2018 Quality Management \nAnnual Report \n \n Table of Contents \n \nQuality Management Annual Report FY 2018 \n \nTABLE LISTING ........................................................................................................................................................4 \nFIGURE LISTING......................................................................................................................................................5 \nSECTION 1: EXECUTIVE SUMMARY ........................................................................................................................7 \nFY 2018 Provider Performance ..................................................................................................................... 8 FY 2018 Quality Management Activities ...................................................................................................... 9 FY 2018 Key Findings .................................................................................................................................. 11 FY 2019 Areas of Focus............................................................................................................................... 16 In Conclusion .............................................................................................................................................. 18 \nSECTION 2: INTRODUCTION.................................................................................................................................19 \nSECTION 3: BEHAVIORAL HEALTH QUALITY REVIEWS..........................................................................................21 \nBACKGROUND ......................................................................................................................................................21 SAMPLING METHOD .............................................................................................................................................22 \nIndividual Records and Billing Review ........................................................................................................ 22 Individual and Staff Interviews ................................................................................................................... 24 BHQR OVERALL REVIEW SCORES............................................................................................................................25 BHQR Overall Scores................................................................................................................................... 26 BHQR Overall Scores by Tier and Provider Size .......................................................................................... 27 BHQR Billing Validation............................................................................................................................... 30 Figure 6. FY 2018 Percent of Discrepancy Reasons .................................................................................... 35 BHQR Assessment and Planning................................................................................................................. 38 BHQR Service Guidelines ............................................................................................................................ 39 Figure 7. FY 2018 Percent of Dollars Reviewed .......................................................................................... 40 by Non-Intensive Outpatient Service ......................................................................................................... 40 Figure 8. FY 2018 Percent of Dollars Reviewed by Specialty Service.......................................................... 41 BHQR Focused Outcome Areas (FOA) ........................................................................................................ 46 Figure 9. BHQR Focused Outcome Area(s) Scores by Year ......................................................................... 47 Figure 10. BHQR Focused Outcome Area by Year ...................................................................................... 48 Figure 11. BHQR Focused Outcome Area by Year ...................................................................................... 48 Figure 12. BHQR Focused Outcome Area by Year ...................................................................................... 49 Figure 13. BHQR Focused Outcome Area by Year ...................................................................................... 50 Figure 14. BHQR Focused Outcome Area by Year ...................................................................................... 50 Figure 15. BHQR Focused Outcome Area by Year ...................................................................................... 51 BHQR Staff and Individual Interviews......................................................................................................... 51 ASSERTIVE COMMUNITY TREATMENT (ACT) .............................................................................................................55 \n \n1|P a g e \n \n Quality Management Annual Report FY 2018 \nACT Quality Review Overall Scores............................................................................................................. 56 ACT Billing Validation.................................................................................................................................. 57 ACT Assessment and Planning.................................................................................................................... 60 ACT Service Guidelines ............................................................................................................................... 61 ACT Focused Outcome Areas ..................................................................................................................... 63 CRISIS STABILIZATION UNIT QUALITY REVIEWS...........................................................................................................65 Crisis Stabilization Unit Sample Method .................................................................................................... 65 Crisis Stabilization Unit Quality Review ...................................................................................................... 66 CSUQR Individual Record Review (IRR) ...................................................................................................... 67 CSUQR Service Guidelines .......................................................................................................................... 71 CSU Focused Outcome Area ....................................................................................................................... 73 CSUQR Staff Interview and Individual Interview ........................................................................................ 77 REASSESSMENT FREQUENCY REVIEWS ......................................................................................................................80 BHQR Reassessment Review Findings ........................................................................................................ 80 CSU Reassessment Review Findings ........................................................................................................... 84 TECHNICAL ASSISTANCE/EXIT CONFERENCE...............................................................................................................86 SUMMARY OF FINDINGS AND RECOMMENDATIONS FOR BEHAVIORAL HEALTH PROVIDERS.................................................87 Provider Performance ................................................................................................................................ 87 FY 2018 BH Accomplishments .................................................................................................................... 89 BH Systems Strengths and Recommendations for Improvement .............................................................. 91 \nSECTION 4: INTELLECTUAL AND DEVELOPMENTAL DISABILITIES ........................................................................95 \nBACKGROUND ......................................................................................................................................................95 SAMPLING METHOD .............................................................................................................................................97 \nPerson-Centered Review (PCR)................................................................................................................... 97 Quality Enhancement Provider Review (QEPR).......................................................................................... 97 REVIEW PROCESSES...............................................................................................................................................98 PCR and QEPR............................................................................................................................................. 98 Quality Technical Assistance Consultation (QTAC) ................................................................................... 100 PERSON CENTERED REVIEW..................................................................................................................................100 PCR Scores by Tool.................................................................................................................................... 100 PCR Scores by Focused Outcome Area (FOA) ........................................................................................... 103 PCR Scores by Tool and Focused Outcome Area (FOA) ............................................................................ 104 FOAs by Demographics............................................................................................................................. 105 Opportunities for Growth by FOA ............................................................................................................ 108 Individual Service Plan Quality Assurance Checklist (ISP QA) .................................................................. 119 ISP Expectations ....................................................................................................................................... 121 Results Using Original ISP QA Checklist .................................................................................................... 121 Results Using New ISP QA Checklist ......................................................................................................... 123 \n2|P a g e \n \n Quality Management Annual Report FY 2018 \nPCR Results by Service.............................................................................................................................. 124 PCR Strengths and Recommendations ..................................................................................................... 126 QUALITY ENHANCEMENT PROVIDER REVIEW (QEPR) ...............................................................................................127 QEPR Scores by Size.................................................................................................................................. 127 Overall QEPR Score by Tool and Year........................................................................................................ 128 Overall Crisis Provider Scores ................................................................................................................... 129 Qualifications and Training (Q \u0026 T) and Service Specific (DDSS) .............................................................. 130 Qualifications \u0026 Training Opportunities for Improvement ...................................................................... 131 QEPR Scores by Provider Size ................................................................................................................... 133 QEPR Scores by Tool and FOA................................................................................................................... 135 QEPR Scores by FOA and Provider Size..................................................................................................... 137 QUALITY AND TECHNICAL ASSISTANCE CONSULTATION (QTAC)...................................................................................138 INTELLECTUAL AND DEVELOPMENTAL DISABILITY SUMMARY OF FINDINGS AND RECOMMENDATIONS ................................139 FY 2018 IDD Accomplishments................................................................................................................. 140 IDD System Strengths and Recommendations ......................................................................................... 141 SECTION 5: BEHAVIORAL HEALTH AND INTELLECTUAL DEVELOPMENTAL DISABILITIES: OVERALL RESULTS BY FOCUSED OUTCOME AREA ................................................................................................................................149 SECTION 6: FEEDBACK SURVEY RESULTS ...........................................................................................................151 SECTION 7: CONCLUSION...................................................................................................................................154 APPENDIX A: ABBREVIATIONS AND ACRONYMS ...............................................................................................157 APPENDIX B: SCORE DISTRIBUTIONS .................................................................................................................159 \n3|P a g e \n \n Table Listing \n(Links available by ctrl + clicking on table name) \n \nQuality Management Annual Report FY 2018 \n \nReviews Completed by Fiscal Year Number of Completed Reviews by Type FY 2018 Overview of BHQR and CSUQR Review Data BHQR and ACT Results by Category by Year BHQR and CSUQR Results by Category by Year FY 2018 Overview of IDD Review Data FY 2018 Summary by Tool and Review Type Table 1. BHQR Overall Scores by Tier Table 2. BHQR Category Scores by Provider Size Table 3. Amount Reviewed by Funding Source Table 4. FY 2018 Non-Intensive Outpatient Services Percent of Funds Justified Table 5. FY 2018 Specialty Services Percent of Funds Justified Table 6. BHQR Billing Discrepancy Reasons Table 7. BHQR Assessment \u0026 Planning Indicator Scores by Year Table 8. BHQR Service Guidelines Scores by Service Type Table 9. FY 2018 Improvement Results by Service Table 10. FY 2018 Service Type Declined Results Table 11. FY 2018 Interview Comparison Table 12. BHQR and ACT Results by Category by Year Table 13. ACT BHQR Billing Discrepancy Reasons by Year Table 14. ACT Service Guidelines Scores Table 15. Technical Assistance/Exit Conference Details for FY 2018 \nTable 16. BHQR, ACT, CSUQR Overall Averages by Year \nTable 17. BHQR and CSUQR FOA Scores \nTable 18. Reassessment Review Results \nTable 19. PCR Sample by Region \nTable 20. QEPR Provider Sample by Size \nTable 21. Number of Records by Review Tool and Review Type Table 22. Low Scoring Whole Health Indicators (PCR) Table 23. Low Scoring Safety Indicators (PCR) Table 24. Low Scoring Person Centered Practices Indicators (PCR) Table 25. Low Scoring Community Life Interview Indicators (PCR) Table 26. Low Scoring Community Life Record Review Indicators (PCR) Table 27. Low Scoring Choice Indicators (PCR) Table 28. Low Scoring Rights Indicators (PCR) \n \n4|P a g e \n \n Quality Management Annual Report FY 2018 \nTable 29. ISP QA Life Indicator by Region and Year Table 30. ISP QA Checklist Ratings by Expectation (FY 2018) Table 31. Average Rating by Expectation (0 - 4) Table 32. PCR PRR Results by Service and Year Table 33. FY 2018 PCR Record Review Results by FOA and Service Table 34. Top Strengths Identified During a PCR Table 35. Top Recommendations Identified During a PCR Table 36. QTACs by Referral Source and Type Table 37. QTAC Referral Reasons: FY 2018 Table 38. FY 2017 Summary by Tool and Review Type Table 39. The Collaborative Provider Feedback Surveys \nFigure Listing \n(Links available by ctrl + clicking on figure name) \nFigure 1. FY 2018 Diagnostic Categories Reviewed Figure 2. BHQR Fiscal Year Results by Category Figure 3. Fiscal Year 2018 Results by Provider Size Figure 4. BHQR Billing Validation Amount Reviewed by Fiscal Year Figure 5. BHQR Amount Reviewed by Funding Source and Fiscal Year Figure 6. FY 2018 Percent of Discrepancy Reasons Figure 7. FY 2018 Percent of Dollars Reviewed by Non-Intensive Outpatient Service Figure 8. FY 2018 Percent of Dollars Reviewed by Specialty Service Figure 9. BHQR Focused Outcome Areas Scores by Year Figure 10. BHQR Focused Outcome Area by Year  Whole Health Figure 11. BHQR Focused Outcome Area by Year  Safety Figure 12. BHQR Focused Outcome Area by Year  Person Centered Practices Figure 13. BHQR Focused Outcome Area by Year  Community Life Figure 14. BHQR Focused Outcome Area by Year - Choice Figure 15. BHQR Focused Outcome Area by Year - Rights Figure 16. BHQR Individual Interview Data by Fiscal Year Figure 17. BHQR Staff Interview Data by Fiscal Year Figure 18. ACT Fiscal Year Results by Category Figure 19. ACT Billing Validation by Fiscal Year Figure 20. ACT Assessment and Treatment Planning Scores by Year Figure 21. ACT FOA Scores by Category by Year Figure 22. CSU Overall Scores by Fiscal Year Figure 23. Individual Record Review Category Scores Figure 24. CSU Service Guidelines Staffing and Curriculum Results by Indicator Figure 25. CSUQR Service Guidelines Policy Adherence Indicators by Fiscal Year \n5|P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 26. CSUQR Service Guidelines Medication Policy Adherence Results by Year Figure 27. CSUQR FOA Results by Year Figure 28. CSU Individual and Staff Interview Results by Fiscal Year Figure 29. CSU Individual Interview Subcategory Data by Year Figure 30. CSU Staff Interview Subcategory Data by Year Figure 31. BHQR FY 2018 Overall Score Distribution of Reassessed Providers Figure 32. BHQR FY 2018 Billing Score Distribution of Reassessed Providers Figure 33. CSU #1 Category Score by Reassessment Figure 34. CSU #2 Category Score by Reassessment Figure 35. PCR Scores by Tool and Year Figure 36. PCR Scores by FOA and Year Figure 37. FY 2018 PCR Scores by Tool and FOA Figure 38. FY 2018 PCR Scores by FOA and Region Figure 39. FY 2018 PCR Scores by FOA and Residential Setting Figure 40. FY 2018 PCR Scores by FOA and Funding Source Figure 41. FY 2018 PCR Scores by FOA  Whole Health Figure 42. FY 2018 PCR Scores by FOA - Safety Figure 43. FY 2018 PCR Scores by FOA - Person Centered Practices Figure 44. FY 2018 PCR Scores by FOA - Community Life Figure 45. FY 2018 PCR Scores by FOA - Choice Figure 46. FY 2018 PCR Scores by FOA - Rights Figure 47. PCR ISP QA Life Indicator by Year Figure 48. \"New\" ISP QA Checklist Ratings Figure 49. FY 2016 - FY 2018 QEPR Samples by Provider Size and Type Figure 50. Overall QEPR Scores by Tool and Year Figure 51. Overall Crisis Provider Scores by Tool and Year Figure 52. QEPR Qualifications and Training and DDSS Results by Year Figure 53. Overall and Qualifications and Training Score by Provider Size and Review Year Figure 54. Overall QEPR Scores by FOA and Year Figure 55. FY 2018 QEPR Scores by Tool and Focus Outcome Area Figure 56. FY 2018 QEPR Provider Scores by FOA and Size Figure 57. FY 2018 QTAC Technical Assistance Provided Figure 58. Focused Outcome Areas by Review Type \n6|P a g e \n \n Section 1: Executive Summary \n \nQuality Management Annual Report FY 2018 \n \nSince 2016, the Georgia Collaborative Administrative Services Organization (the Collaborative) has assisted the Georgia Department of Behavioral Health \u0026 Developmental Disabilities (DBHDD) in its management of services and supports for individuals receiving Community Behavioral Health and Rehabilitation Services (CBHRS), New Options Waiver (NOW), Comprehensive Supports Waiver (COMP), and state funded behavioral health (BH) and intellectual and developmental disabilities (IDD) services. DBHDD delegates BH, Crisis Stabilization Unit (CSU), and IDD quality reviews to the Collaborative. On June 30, 2018, the Collaborative's Quality Management Department completed the third year of the contract with DBHDD. See the table below for the number of BH and IDD quality reviews and interviews conducted. \n \nReviews Completed by Fiscal Year \n \nFiscal Year \n \nNumber of Reviews \n \n2016 2017 \n \nBH/CSU* 177 208 \n \nIDD** 584 581 \n \n2018 \n \n197 \n \n584 \n \nTotal \n \n582 \n \n1,749 \n \nIndividuals Interviewed Staff Interviewed \n \nBH/CSU 643 735 \n \nIDD 1,151 921 \n \nBH/CSU 657 774 \n \nIDD 1,186 702 \n \n616 \n \n920 \n \n652 \n \n759 \n \n1,994 \n \n2,992 2,083 2,647 \n \n*The additional subset of Assertive Community Treatment (ACT) Quality Reviews are included in the BH/CSU totals (FY16: 17; FY17: 18; FY18: 20). Calculations specific to the ACT Quality Reviews are in a separate section of the report. \n** Crisis providers are included in the total number of reviews but are not included in the figures/graphs below (FY16: 1; FY17: 4; FY18: 1). Separate calculations of Quality Enhancement Provider Reviews (QEPR) and Person Centered Reviews (PCR) are included below. A separate QEPR calculation for crisis providers can be found throughout the report. \n \n7|P a g e \n \n FY 2018 Provider Performance \nBehavioral Health Quality Reviews \n \nQuality Management Annual Report FY 2018 \nCrisis Stabilization Unit Quality Reviews \n \nIDD: Quality Enhancement Provider Reviews1 \n \n1 The QEPR sample is designed to ensure all providers are reviewed at least once every three to four years. Comparisons across years are not appropriate. \n8|P a g e \n \n Quality Management Annual Report FY 2018 \n \nIDD: Person Centered Reviews by Focused Outcome Area and Year \n \n100% 75% \n \n93.6% 87.4% \n84.7% \n \n96.5% \n \n91.6% 89.1% 84.8% \n \n90.3% \n \n82.6% \n \n80.6% 76.9% \n \n71.1% \n \n90.7% 82.3% \n \n95.4% 92.0% 90.2% \n \n78.7% \n \n50% \n \n25% \n \n0% \nWhole Health \n \nSafety \n \nPerson Centered Practices \n \nCommunity Life \n \nChoice \n \nRights \n \nFY 2016 (N = 484) \n \nFY 2017 (N = 481) \n \nFY 2018 (N = 484) \n \nFY 2018 Quality Management Activities \nBased on FY 2017 analysis and dialogue with DBHDD, the following were the areas of focus for FY 2018: Behavioral Health \n To improve provider billing practices and the associated supporting documentation, the Billing Validation threshold for the Behavioral Health Quality Reviews (BHQR) was increased by DBHDD to 80 percent. The billing scores' average increased to 85 percent; reflecting improved provider performance and a decrease of funds at risk for recoupment. \n Behavioral health quality assessors continued to identify billing practices that put providers and the larger system at risk. Quality Management made recommendations for increased oversight of providers with uncorrected or ongoing billing and documentation practices. \n9|P a g e \n \n Quality Management Annual Report FY 2018 \n A new quality of care process was piloted to collect, monitor, and report various issues to DBHDD specific to providers' physical environments, staffing, and quality of services which are outside of the scope of the quality reviews. \n To standardize and mirror the sampling processes with IDD reviews, Quality Management grouped BH providers into three categories (Small, Medium, Large) based on the number of unique individuals served in the six months preceding a review. Subsequent revisions to BHQR sample sizes reflect provider groupings. \n There was implementation of an oversampling process for both BHQRs and Crisis Stabilization Unit Quality Reviews (CSUQR) to ensure appropriate sample sizes. \n To increase engagement of individuals with complex care needs and reduce readmission rates, Quality Management educated Crisis Stabilization Unit providers about the benefits of the Collaborative's Care Coordination team. \n On July 1, 2018, CSUQR process and tools were incorporated into the electronic system (Disabilities Quality Management System) to enhance accuracy, standardization, and formality through electronic reporting. \n Quality Management revised and updated the quality section on the Collaborative's website to increase ease of use and access to resources available. \nIntellectual and Developmental Disabilities The Collaborative's continuous improvement efforts for the IDD system focused on three key areas: training for stakeholders, the new IDD Case Management system, and tool revisions for the review processes. Following are the initiatives for each of these areas. \n Throughout the year, the Collaborative's Quality Management team and DBHDD met to discuss and review findings from FY 2017, current fiscal year findings, and other identified areas of need across the provider network. Training sessions on Focused Outcome Areas were developed and presented throughout the year based on this analysis and discussion. The following is a list of trainings provided for stakeholders in the 2018 fiscal year: \n10 | P a g e \n \n Quality Management Annual Report FY 2018 \no Whole Health: Empowering Individuals to Maintain their Healthcare; Techniques on How to Help Educate People on Medications \no Person Centered: Person Centered Documentation Parts 1 and 2; What is Person Centered Language Part 1 and 2; Staff Matching, Why It Works \no Community: How to Support Community Inclusion o Rights and Choice: The importance of supporting concepts of Choice and Rights from \nthe perspectives of people receiving services and providers \n In FY 2018, a training plan was developed for the new IDD Case Management System project. The plan was developed for internal (the Collaborative and designated DBHDD staff) and external (individuals applying for services, providers of IDD services, and Support Coordination staff) users and other stakeholders to provide guidance on navigating and using the integrated case management system. \n Through workgroups with the DBHDD Quality Improvement Office, IDD program staff and other stakeholders, implementation of tool revisions occurred over the course of FY 2018 to align with the DBHDD IDD Provider Manual, policies and procedures. The following tools were revised in FY 2018: Individual Interview, Observation, Staff Interview, Provider Record Review, ISP QA Checklist, and Support Coordination Interview. \nFY 2018 Key Findings \nBased on data collected from the past three years, the following have been identified as strengths and areas for improvement: Behavioral Health Strengths \n The BHQR Service Guidelines was the highest scoring category of the four in FY 2016, with an average of 90 percent. Although it declined slightly in FY 2017 by two points, it has since returned to a 90 percent result. Moreover, nearly three quarters of the Service Guidelines scores within the BHQR met or exceeded 90 percent and nine reviews received a perfect score; three of which were providers reviewed for the first time. \n11 | P a g e \n \n Quality Management Annual Report FY 2018 \n The BHQR Assessment and Planning, a category comprised of 10 indicators, had significant increases from the previous year. \n Documentation reviewed during the BHQR clearly demonstrated evidence that the \"Individual had been provided supports and service options\" (98%) as well as having their \"Known preferences followed to the extent possible by the service delivery provider\" (98%). \n Through ongoing technical assistance and assessment of practices that support whole-person treatment, reviewed documentation indicated providers incorporated techniques to address the whole health and wellness of individuals more frequently in FY 2018. Significant improvement in the indicators within Assessment and Planning and the Focused Outcome Area related to Whole Health demonstrated increased awareness, assessment, and service planning by providers with individuals. \n All child and adolescent (C\u0026A) CSU records demonstrated that the individual was included in the modification of their plans, providers honored youths' preferences and differences, and providers offered individuals options of supports and services. \n When comparing scores to FY 2017, there were specific areas that show statistically significant improvement (p \u003c .01). These included the following: o FY 2018 demonstrated even higher levels of documentation specific to the BHQR Choice FOA subcategory than in FY 2017. While results of all indicators exceeded 90 percent in FY 2017 for the FOA Choice subcategory, increases in documentation of the providers supplying alternatives when barriers had been identified rose from 92 percent to 95 percent. o Specific to the CSUQR Individual Record Review (IRR) category, all areas demonstrated significant improvement in scoring from the previous year with the exception of Course of Stay and Treatment Planning. Furthermore, in analysis of C\u0026A populations versus Adult populations, several FOA indicators specifically reviewed for C\u0026A individuals met or exceeded 90 percent, some specifically reaching 100 percent. o In Crisis Stabilization Unit records, the score for \"Discharge summary/note included the necessary criteria\" improved from the previous fiscal year by 30 points (92%). \n12 | P a g e \n \n Quality Management Annual Report FY 2018 \nIntellectual and Developmental Disabilities Strengths \n Most providers and staff were aware of unique safety needs and achievements of individuals they serve (average Safety FOA of 95.6%). Most individuals were aware of how to selfpreserve (88.6%) and felt safe in their work and living environments (99.6%). \n Most direct service providers have a clear understanding of the unique communication styles and skills for each individual they serve (94.7%). \n Based on observations, staff and providers took advantage of opportunities to provide education on health and rights, and with one exception, all individuals expressed that they were free from abuse, neglect, and exploitation. \n When comparing scores to FY 2017, there were specific areas that showed statistically significant improvement regarding documentation practices by providers. These included the following: o Providers showed improvement in ensuring documentation for age and genderspecific preventative healthcare:  Prostate-Specific Antigen tests increased 25.3 points to 56.6 percent met.  Bone density tests increased 24.3 points to 60.7 percent met.  Mammogram screening increased 11.2 points to 51.0 percent met. o Providers showed improvement in ensuring documentation for specialty healthcare reports, as applicable:  Neurological evaluation/supports increased 15.6 points to 71.7 percent met.  Gastrointestinal evaluation/supports increased 13.2 points to 86.2 percent met.  Psychiatric evaluation/supports increased 10.3 points to 78.8 percent met. o Significant improvement was found in documentation on how staff provided education to individuals and families (if approved by the individual) on all prescribed medications (increase of 22.6 percentage points) and to the individual on risks and benefits of medication (increase of 18.2 percentage points). \n13 | P a g e \n \n Quality Management Annual Report FY 2018 \no Documentation showed providers were much more likely to ensure critical incident reports were completed and follow-up had occurred at an increase of 18.2 percentage points. \no Based on information in the medical record, providers were more likely to document evidence of identifying, addressing, and seeking prevention of abuse, neglect, and exploitation; addressing rights education; and that individual rights and responsibilities were presented in an understandable language/format than in FY 2017. \nBehavioral Health Assessed Areas of Need \n Although there was an increase in the inclusion of medication consent forms in medical records for FY 2018, this FOA Safety indicator remained one of the lowest scores for both BHQRs (78%) and CSUQRs (82%). \n Providers did not address all identified/assessed areas of need in the individualized recovery or resiliency plan (IRP) specific to BHQR (67%). \n Co-occurring health conditions were addressed within the IRPs reviewed in both the BHQR (60%) and CSUQR (54%). This is a slight increase from FY 2017. \n Although Crisis Stabilization Units (CSU) demonstrated improvement in their compliance with medication storage and safe handling practices, this continued to be the lowest-scored indicator within CSUQR Compliance with Service Guidelines with a score of 67 percent met. \n Discharge planning with individuals served continued to be a challenging area for providers. At 66 percent, it was the second-lowest scored indicator in the area of the BHQR Assessment \u0026 Planning; particularly, in the required area that staff assist individuals in identifying clinical benchmarks that indicate progress in their treatment and recovery. Because it is important for individuals entering services to have the expectation of improvement, discharge planning is a critical and useful clinical process and can aid in advancing the expectation of recovery. Quality Management will continue to provide technical assistance on discharge planning during quality reviews. \n14 | P a g e \n \n Quality Management Annual Report FY 2018 \n Performance on ACT Assessment and Planning indicators improved in FY 2018; however, some scores remained low to include the indicators: o \"IRP addressed co-occurring health conditions\" (70%) o \"Assessed needs were addressed on the IRP\" (68%) o \"Discharge planning criteria were specified\" (66%) \n While the Service Guidelines score improved from the previous fiscal year in the BHQR, documentation of coordination and collaboration with either family/significant others (59%) or additional healthcare providers (70%) remained an area of need. \nIntellectual and Developmental Disabilities Assessed Areas of Need \n Provider documentation indicated most providers of IDD services did not offer education on prescribed medications, including the risks and side effects, for individuals receiving services. Over the previous three years, individuals receiving services appeared to be less likely to know what medications they are taking, why they are taking them, what the side effects may be, or to have an awareness of the color and shape of medication. \n The following crisis and safety issues were identified: o Approximately 30 percent of individuals interviewed did not know how to respond to an emergency or crisis situation if supports/providers were incapacitated. o Forty percent of staff did not provide education to the person on the use of the Crisis Hotline. o Close to 58 percent of employee records reviewed this year (43.4% in FY 2017) indicated staff/provider training on the Georgia Crisis Response System had not been completed as required. o Based on record reviews, in F8 2018 providers were more likely than in FY 2017 to document evidence of \"Identifying, addressing, and seeking prevention of abuse, neglect, and exploitation; addressing rights education\" (up 16.6 points to 39.5%); and to have \"Individual rights and responsibilities written in an understandable language/format\" (up 11.5 points to 87.3%). \n15 | P a g e \n \n Quality Management Annual Report FY 2018 \no Support Coordinators were often unaware of needed crisis plans (47.7%), behavior support plans (57.2%), or the individualized techniques needed to follow a plan for the individual (53.3%). Support Coordinators' awareness of the triggers that may be the catalyst to a crisis scored below seventy percent: relapse (60.6%), decomposition (57.3%), or behavioral issues (69.7%). \n Community Life remains the lowest scoring area in the PCR and QEPR: o Twenty-five to 30 percent of individuals receiving services indicated they had not been given the opportunity to learn about and develop new social roles in the community or experience new community activities. o Provider and Support Coordinator documentation often did not show support for individuals to be engaged in the community, develop social roles, explore new activities, or participate in activities the same as other citizens. \nFY 2019 Areas of Focus \nBehavioral Health \n As the lowest scoring indicator of CSUQR Service Guidelines (67%), the safe handling and storage of medications remains an area of focus for improvement. Quality Management will conduct a statewide provider training on the development of and adherence to policies/procedures related to the safe storage and handling of medications in accordance with state requirements in FY 2019. \n Due to increased national and local suicide rates, Quality Management will continue a focus on safety planning and risk reduction at the provider level through the following efforts: o Continue ongoing partnership with the Collaborative's Care Coordination department to provide training and education to providers on safety and crisis planning. o In conjunction with DBHDD, maintain ongoing tracking, trending, analysis, and identification of areas for improvement, with continued suggestions of appropriate interventions that address the needs of and support for individuals identified with suicide risk when served at crisis stabilization units. \n16 | P a g e \n \n Quality Management Annual Report FY 2018 \n The nationally recognized opioid epidemic continues to claim thousands of lives; therefore, the Quality Management department will continue to support DBHDD in addressing this disease by participating in training and development of inclusion of Medication Assisted Treatment (MAT) standards, conducting analysis on findings, and reporting to DBHDD with recommendations. \n Quality Management will continue to conduct sample size analysis to ensure records sampled are representative of the provider and statewide utilization levels for BHQR. \n Additional inclusion of \"Immediate Action Items/Recommendations\" at exit conferences will occur to offer actions that providers should take for immediate quality enhancement, risk reduction and for those items that will be submitted to the Collaborative's Compliance Department for further investigation. \n Lastly, to further enhance the review of billing practices and assure all providers are billing from approved Medicaid sites, the Collaborative, at the Direction of DBHDD, Quality Management will propose a pilot project to establish baseline data on billing locations for both BH and IDD providers. \nIntellectual and Developmental Disabilities \n Quality Management conducted training in June of 2018 to help providers and Support Coordinators learn how to provide education to individuals on regarding medications and promote independence towards self-directing health. The Collaborative recommends DBHDD consider revising policies to provide more emphasis on delivering education in these areas to individuals and families. The Collaborative, in conjunction with DBHDD, will track indicators related to medication education and self-directing health to determine if additional training would be beneficial. \n Quality Management will collaborate with DBHDD to develop specific training for Support Coordination as it relates to behavior supports and how to evaluate this service. \n DBHDD may want to consider requiring providers to attend annual training on developing community connections for individuals receiving services. Training and related resources could include practical application of ideas and examples to help staff and Support \n17 | P a g e \n \n Quality Management Annual Report FY 2018 \nCoordinators understand different and realistic ways to connect an individual to the community.  DBHDD may want to consider conducting a targeted safety campaign that includes resources, and web-based and face-to-face training throughout the state. The DBHDD monthly newsletter could include a new tip or resource that providers, individuals, and families can use to promote independence in the area of safety; for example, including web resources such as Classroom, which provides education about stranger awareness. \nIn Conclusion \nDuring FY 2018, the continued collaboration between DBHDD and Quality Management resulted in increased quality related to justification of billing (BHQR), service provision (BHQR/CSUQR), and education on health and rights (QEPR); however, both IDD and BH quality reviews indicate areas of focus such as safety/crisis planning and medication education. It has been a goal for DBHDD and the Collaborative to integrate quality measurement processes for both behavioral health and intellectual and developmental disability services. Side-by-side comparisons are difficult to evaluate due to the use of different types of sampling methods for each discipline, as well as differences in the tools and standards used. During FY 2019, Quality Management, in conjunction with the DBHDD Office of Quality and divisional subject matter experts, will review the behavioral health and intellectual and developmental disability record review tools to identify modifications and enhance the ability to make these types of comparisons. The above analysis and recommendations are discussed in more detail throughout this report. A summary of review processes and sampling procedures are included. The report provides an indepth analysis of data from the behavioral health, intellectual and developmental disability, and CSU quality reviews; as well as tabular and graphic displays of findings. \n18 | P a g e \n \n Section 2: Introduction \n \nQuality Management Annual Report FY 2018 \n \nThe Georgia Collaborative Administrative Services Organization (the Collaborative), contracted by Georgia's Department of Behavioral Health and Developmental Disabilities (DBHDD), employed 30 quality assessors to complete more than 2,300 quality reviews and over 9,600 individual and staff interviews in the past three fiscal years across Georgia. The mission of Quality Management within the Collaborative is to provide DBHDD with valuable and beneficial data for use in decision-making regarding quality of services, funding, and development of programs (among other quality improvement initiatives). \nThis annual report is the result of assessing, gathering, compiling, analyzing, and measuring the quality of the service-delivery system through assessment of and technical assistance provided to DBHDD's behavioral health and intellectual and developmental disability providers. Furthermore, this report contains suggestions for modifications in some processes, methods, approaches, and tools to measure the quality and impact of services. Quality Management recognizes that quality assessment or measurement processes must have a quality review periodically to examine and determine if we are measuring the things most important and meaningful to individuals receiving services within Georgia. \nThis third annual report includes behavioral health, intellectual and developmental disability, and crisis stabilization unit (CSU) findings as well as a detailed explanation of the review processes, analysis, and comparisons of network performance across providers and information from individuals about the services received. A difference in proportions test was used to determine statistical significance and, where applicable, determine change from year to year.2 Areas highlighted in blue within tables reflect significance at a p \u003c .01 or have the p-value defined. While there are similarities in the review processes, there are also distinct differences; therefore, behavioral health, intellectual and developmental disability, and CSU results are reported separately. The report is divided into the following sections: \n \n2 Bohrnstedt, George W. \u0026 Knoke, David. (1988). Statistics for Social Data Analysis, 2nd Edition. Itasca, Illinois. F.E. Peacock Publishers, Inc., pgs. 198-200. \n19 | P a g e \n \n Quality Management Annual Report FY 2018 \n Behavioral Health Quality Reviews o Assertive Community Treatment o Crisis Stabilization Unit Quality Reviews \n Intellectual and Developmental Disabilities o Person Centered Reviews o Quality Enhancement Provider Reviews o Quality Technical Assistance Consultations \n Behavioral Health, and Intellectual and Developmental Disability Discussion  Provider and Individual Feedback Surveys  Two appendices are attached \no Acronym and abbreviation list, Appendix A o Distribution graphs for all tools used in the BHQR, CSUQR, and intellectual and \ndevelopmental disability reviews, Appendix B  Summaries of findings and recommendations after each service delivery system section to \naddress areas needing improvement or training/education programs to help improve the quality of services provided to Georgians \n20 | P a g e \n \n Quality Management Annual Report FY 2018 \nSection 3: Behavioral Health Quality Reviews \nBackground \nThe purpose of the Behavioral Health Quality Review (BHQR) is to determine providers' adherence to DBHDD's standards and to assess the quality of the service delivery system through individual record and claims reviews.3 Review indicators are based on DBHDD and Medicaid requirements, and are organized into four categories: Billing Validation, Assessment and Planning, Service Guidelines, and Focused Outcome Areas (FOA). The score for each category represents the percent of relevant indicators met or present. The BHQR Overall score is calculated by averaging the scores for the four categories.4 Each category accounts for 25 percent of the overall rating. In collaboration with DBHDD, minor alterations were made before and throughout the FY 2018 review process to adhere to DBHDD and Medicaid requirement changes, as well as language changes specific to tools, providing more clarity for assessors and providers. \nConsistent with previous year reviews; when a BHQR is completed, the lead assessor convenes the team to analyze data gathered and plan the exit conference. The exit conference provides immediate, preliminary feedback of the BHQR findings to the provider. A report of these preliminary findings is left with the provider in the form of an exit conference report outlining the provider's identified strengths and any opportunities for improvement from the four primary categories of the review. Assessors also include any items falling outside the parameters of the review determined to be an area of concern or risk. Technical assistance is also provided during the review and exit conference. \nWithin 30 days of completion of a BHQR, a final assessment report is posted on the Collaborative's website and the provider is notified via email of the posting, along with the final scores identified as the final assessment. Like the exit conference report, the final assessment identifies strengths and \n3 Please refer to the following link to access a full description of the review process and review tools. The Georgia Collaborative - Quality Management BH 4 The FOA subcategories are individually scored and are not averaged for the final overall FOA result at the review level. The final overall FOA result is calculated by adding all \"yes\" or \"present\" responses of all FOA subcategories and dividing by the total \"yes\" or \"no\" responses of the combined subcategories for each review. \n21 | P a g e \n \n Quality Management Annual Report FY 2018 \nopportunities for growth in the four categories of FOA, Billing, Service Guidelines, and Assessment and Planning. The final assessment also highlights areas of concern falling outside the scope of scoring. The final assessment also includes recommendations for improvement and comparisons to a statewide average based on the previous year's results. \nProviders are offered an opportunity to appeal the BHQR findings. Appeal information, including timeframes for submission, is provided upon notification of the final report and is made available to providers by visiting the Collaborative's website or reviewing the Georgia Collaborative Provider Handbook. \nSampling Method \nIndividual Records and Billing Review \nDuring the 2018 fiscal year, 136 providers were eligible for review, as determined by DBHDD, and received a BHQR. This is a reduction from the implementation year (FY 2016) of 141 providers and an increase of 4 providers from FY 2017. Sixteen providers (12%) were reviewed for the first time in FY 2018. Provider eligibility is based on multiple factors including volume of individuals served, claim volume, and type of services provided. Additionally, while efforts to review all providers within the BH network are made, some providers may be classified as ineligible due to site closure, changes to location or site, or at the direction of DBHDD. A sample of individuals was selected for each of the record reviews, and a sample of those individuals' claims was used for the billing review. To be eligible for the sample, each individual selected must have had at least three claims/encounters in the three months (or longer, if necessary) preceding the BHQR. Providers deemed ineligible because of minimal claim availability or individuals served (\u003c 5) within the timeframe are reviewed periodically for reevaluation of eligibility. \nTo select the individuals as part of the record review, an unduplicated list of individuals receiving services from the provider was stratified by service and payer source: Medicaid, state-contract, and fee-for-service paid claims. The sample was selected proportionate to the providers' ratio of individuals served by payer source, ensuring each service provided and approved for review by DBHDD was represented. The sample for each provider consisted of up to 30 records for non- \n22 | P a g e \n \n Quality Management Annual Report FY 2018 \nintensive and specialty service providers, with an additional 15 individuals for providers who also offered assertive community treatment (ACT). The number of individuals selected per provider was based on the number of individuals served during the six months prior to review. \n Small Providers (serving 5-50 individuals) = 5-10 records selected  Medium Providers (serving 51-100 individuals) = 20 records  Large Providers (serving 101+ individuals) = 30 records  Crisis Stabilization Unit Quality Reviews = 15 records  Assertive Community Treatment Providers = 15 additional records \nProviders serving less than five individuals are monitored and added to the review process once at least 5 individuals have been served; however, these providers can be selected for an ad hoc review at the direction of DBHDD. Oversampling of individuals at each of the provider sites began in the second quarter of FY 2018 to provide for full sample completion respective to the number of records reviewed. The oversample includes additional, unique individuals following the above outlined sampling method. Providers receive the listing of individuals (including the oversample) at the start of the review process. Assessors ensure that records included in the sample adhere to the guidelines outlined above. Oversampled individuals are reviewed only at the time the original sample individuals are found to be ineligible for review based on criteria mentioned previously. Individuals listed on the oversample but not reviewed are deleted from the review to not skew BHQR outcomes. During FY 2018, 3,070 individuals were sampled for record reviews, averaging approximately 20 records per provider. This number of records is nearly 800 fewer than the previous year and averages approximately nine fewer records per provider. This is most likely due to the changes in sampling methods specific to provider size, as well as decline in overall reviews conducted. Less overall reviews may have been conducted because of provider eligibility requirements, improved provider scores compared to previous reviews, and/or at the direction of DBHDD. Furthermore, declines in reassessments also occurred from 35 in FY 2017 to 20 in FY 2018 due to increased scores on previous reviews; thereby reducing the number of records reviewed. See Reassessment Frequency Review section for more information. \nFY 2018 data also provided an additional level of detail specific to type of diagnosis reviewed for each individual, where 86 percent of records reviewed had a confirmed mental health diagnosis. \n23 | P a g e \n \n Quality Management Annual Report FY 2018 \nAlthough individuals may have more than one diagnosis, the following information shown in Figure 1, was made available demonstrating co-occurring reviews with IDD, physical diagnoses, as well as substance use diagnoses. \nFigure 1. FY 2018 Diagnostic Categories Reviewed \n \nIDD \n \nSubstance Use \n \n7% \n \n30% \n \nMental Health 86% \n \nFor each record, a random sample of up to 10 paid claims was selected for a billing review for both BHQR and ACT reviews. The number of claims reviewed per individual was based on services provided and claim submissions. The total number of claims reviewed for FY 2018 was 25,598 compared to 29,602 in FY 2017. The number of records reviewed directly affects the total number of claims reviewed. Thus, as there was a decrease in the number of records reviewed, there was also a reduction in number of claims reviewed. When providers did not have adequate claim submissions in the three months preceding their review claims selection timeframe was extended but did not precede the provider's previous BHQR timeframe (when applicable). \nBHQR Billing Validation and claim(s) reviews focused on specific services. The services included are listed in Table 4 and Table 5. All eligible providers were reviewed at least once during the fiscal year and had at least one claim per billed service included in the claims review, ensuring the complete array of services provided were included in the BHQR. \nIndividual and Staff Interviews \nSamples used for the Individual and Staff Interviews were selected by the provider and quality assessors conducting the BHQR; services received or provided were not considered in the selection of interviewees. Interview sampling methods remained the same for FY 2018 with no changes since inception. Quality assessors attempted to complete a minimum of five Individual and five Staff \n24 | P a g e \n \n Quality Management Annual Report FY 2018 \nInterviews per BHQR; however, the actual number interviewed fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of employees, and the number of individuals the provider served at the time of the review. If an individual or staff declined an interview, assessors selected an additional individual or staff to be interviewed, when possible. \nIn FY 2018, 616 Individual Interviews and 652 Staff Interviews were completed. Providers who served individuals for both non-intensive outpatient services as well as ACT had at least one individual receiving ACT services and one ACT team member interviewed when possible. This does not include interviews conducted during the crisis stabilization unit (CSU) reviews that are discussed later in the report. There was a decrease in both Individual and Staff Interview participation by approximately 120 interviews in each category from the previous year. Analysis has shown that fewer interviews were conducted at smaller provider locations (serving 5-50 individuals). Several Small providers had only three Individual/Staff Interviews conducted due to the availability of individuals served and staff available with some individuals/staff being previously interviewed. The Collaborative makes attempts to avoid duplication of individuals/staff interviewed. Additionally, in FY 2016 and FY 2017, oversampling of interviews occurred with some providers having six to seven Individual/Staff Interviews conducted at time of review, increasing the total interview counts for those years. Results from interviews conducted are not included in the BHQR Overall scores; however, results provide valuable, qualitative feedback to promote quality improvement activities. The results of interviews were shared with providers and DBHDD to provide direct communication and perception of individuals receiving services and staff providing services as an additional quality initiative. \nBHQR Overall Review Scores \nIn this report, data is aggregated and presented by overall provider scores as well as by category (Billing Validation, Assessment and Planning, Service Guidelines, Focused Outcome Areas [FOA], and Individual and Staff Interview). The four main categories, Billing Validation, Assessment and Planning, Service Guidelines and Focused Outcome Areas each account for 25 percent of the Overall \n25 | P a g e \n \n Quality Management Annual Report FY 2018 \nscore. Each FOA (Whole Health, Safety, Person Centered Practices, Community Life, Choice, and Rights) also has scores displayed and discussed. \nBHQR Overall Scores \nFigure 2 shows a graphical representation of the average Overall score for the 156 reviews and the scores for each category compared to the previous two fiscal years to demonstrate areas of improvement and decline. The mean of Overall scores for FY 2018 was 88 percent. This is an increase of four points over FY 2017, affected by the increases identified in all of the four categories. The FOA category demonstrated the most consistent increases across fiscal years with a FY 2018 result of 92 percent. Service Guidelines, the highest category in FY 2016 at 90 percent decreased in FY 2017 by two points to 88 percent yet returned to the baseline result for FY 2018 with significant improvement (p \u003c .01). The lowest scoring category remained Assessment and Planning for the third year, previously averaging 79 percent in FY 2016, 77 percent for FY 2017, and significantly increased (p \u003c.01) to 84 percent in FY 2018. The following are key findings of the overall category scores: \n Assessment and Planning had the greatest improvement from the previous fiscal year by ten points, compared to other BHQR categories. However, Assessment and Planning scores remain lower than other categories for the third consecutive year and continue to be driven by the lack of documentation to support incorporation of whole health and wellness goals into Individual Recovery/Resiliency Plans (IRPs), as well as lack of addressing co-occurring health conditions, discharge planning criteria, and addressing all assessed needs. \n Billing Validation steadily trended upward since contract inception with scores of 81 percent, 84 percent, and 85 percent respectively across the past three fiscal year as depicted in Figure 2 on the next page. \n The FOA category continued to demonstrate an upward trend in results through FY 2018. Providers have been receptive to education and technical assistance specific to this category and, therefore, have increased documentation and service treatment needs related to this area. \n26 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 2. BHQR Fiscal Year Results by Category \nBilling Validation FY16: 81% FY17: 84% FY18: 85% \n \nService Guidelines \nFY16: 90% FY17: 88% FY18: 90% \n \nOverall \nFY16: 84% (N = 141) FY17: 84% (N = 167) FY18: 88% (N = 156) \n \nFocused Outcome \nAreas FY16: 85% FY17: 89% FY18: 92% \n \nAssessment \u0026 Planning \nFY16: 79% FY17: 74% FY18: 84% \n \nBHQR Overall Scores by Tier and Provider Size \nIn July 2014, DBHDD implemented a community behavioral health provider network structure in which providers were classified using a four-tiered structure.5 Tiers are defined as follows: \n Tier 1: Comprehensive Community Providers  Tier 2: Community Medicaid Providers  Tier 2+: Community State Funded Providers  Tier 3: Specialty Providers \nTable 1 provides a snapshot of the BHQR Overall scores by tier and a distribution of scores by each tier and presented in detail in Appendix B. Results increased from FY 2017 to FY 2018 across tier levels 1, 2, and 2+. Tier 1 and 2 improved by four points and exceeded the statewide average of 88 \n \n5 Policies regarding the implementation and definition of the DBHDD Community Behavioral Health Provider Network Structure can be found at DBHDD's PolicyStat website: https://gadbhdd.policystat.com \n27 | P a g e \n \n Quality Management Annual Report FY 2018 \npercent. Tier 2+ providers reviewed (N = 6) previously had the highest score (compared to other tiers) increased by one percentage point yet fell behind Tiers 1 and 2. Consistent with results of previous years, scores remained generally lower for Tier 3 providers (83%) compared to the other tiers. \n \nTable 1. BHQR Overall Scores by Tier* \n \nFiscal Year \n \nTier 1 \n \nTier 2 Tier 2+ Tier 3 \n \nOverall Average \n \nFY 2016 \n \n86% \n \n85% \n \n88% \n \n78% \n \n84% \n \n(N = 24) (N = 90) (N = 5) (N = 22) (N = 141) \n \nFY 2017 \n \n85% \n \n85% \n \n86% \n \n83% \n \n84% \n \n(N = 28) (N = 115) (N = 6) (N = 18) (N = 167) \n \nFY 2018 \n \n90% \n \n89% \n \n87% \n \n83% \n \n88% \n \n(N = 29) (N = 93) (N = 6) (N = 28) (N = 156) \n \n*N represents the number of reviews conducted per Tier level. Some providers may have had a reassessment review thereby increasing the number as compared to previous years. \n \nFY 2018 sampling methods were altered based on a defined provider size procedure, established in \n \ncollaboration with DBHDD. Provider sizes were determined based on number of unique individuals \n \nserved within a six-month period and stratified into three categories by size: \"Small,\" \"Medium,\" and \n \n\"Large.\" Providers serving less than five individuals are evaluated for review eligibility throughout the \n \nyear and may receive an ad hoc quality review at the direction of DBHDD. Provider sizes are outlined \n \nbelow: \n \n Small Providers (serving 5-50 individuals)  Medium Providers (serving 51-100 individuals)  Large Providers (serving 101+ individuals) \nResults of both the Overall and Billing score based on provider size are identified in Figure 3. \n \n28 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 3. Fiscal Year 2018 Results by Provider Size \n \n85% \n \n91% \n \n89% \n \n88% \n \n80% \n \n90% \n \n90% \n \n85% \n \nOverall Score Small (N = 71) Medium (N = 29) \n \nBilling Score Large (N = 56) Overall (N = 156) \n \nBased on FY 2018 findings, Medium providers had the highest overall result at 91 percent and exceeded the statewide average of 88 percent. Specific to Billing scores, Medium and Large providers exceeded the 85 percent billing average. \n \nSmall providers had an average Billing score of 80 percent while Medium/Large providers had an average 90 percent. Additionally, Small providers scored lower in all categories, including Overall score. Table 2 includes category scores by provider size. \n \nTable 2. BHQR Category Scores by Provider Size FY 2018 \n \nProvider Size \n \nOverall \n \nBilling \n \nService Guidelines \n \nAssessment \u0026 Planning \n \nFOA \n \nSmall \n \n85% \n \n80% \n \n89% \n \nMedium \n \n91% \n \n90% \n \n92% \n \nLarge \n \n89% \n \n90% \n \n91% \n \nStatewide \n \n88% \n \n85% \n \n90% \n \n82% \n \n91% \n \n88% \n \n95% \n \n85% \n \n92% \n \n84% \n \n92% \n \n29 | P a g e \n \n Quality Management Annual Report FY 2018 \nWhile Small providers make up 45 percent of the reviews conducted (N = 71, or 61 unique providers), they have fewer individuals reviewed (5-10); thereby, increasing the impact of missing elements or standards assessed negatively. \nBHQR Billing Validation \nThe Billing Validation score for each BHQR is the percent of justified paid dollars divided by the total paid dollars for the reviewed claims. Billing scores are averaged across the network of annual reviews to obtain an annual statewide average. This category continued to trend upward from FY 2016. Improvement at the provider level is evident as in FY 2018; 19 providers scored a 99 percent or above whereas 15 providers scored such in FY 2017. One provider, not previously reviewed, scored a zero in FY 2018. See the distribution of BHQR Billing scores in Appendix B. Since an increase in Billing scores continued in FY 2018, the billing threshold will raise to 90 percent for FY 2019 to align with DBHDD's KPI (Key Performance Indicators). See Reassessment Frequency Review section for more details. Figure 4 shows the total dollar amount reviewed through an analysis of paid claims, for all providers reviewed during the BHQRs for FY 2018 ($2,430,951.50) as compared to FY 2017 ($2,934,560.52) and FY 2016 ($3,417,902.28) as well as the dollar amount found to be unjustified, $294,743.82 (12%), $463,049.93 (16%), and $807,050.16 (24%), respectively. In FY 2018, fewer funds reviewed may be due to several factors, including but not limited to: provider sample size, limited claims availability, or the types of service reviewed. Although the total funds reviewed for FY 2018 were fewer than reviewed in previous years, the amounts unjustified and susceptible to recoupment continued to positively trend downwards from 24 to 16 to 12 percent. \n30 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 4. BHQR Billing Validation Amount Reviewed by Fiscal Year6 \n \nTotal Amount Paid Claims Reviewed in FY 2016: $3,417,902.28 Total Amount Paid Claims Reviewed in FY 2017: $2,934,560.52 Total Amount Paid Claims Reviewed in FY 2018: $2,430,951.50 \n \n$3,500,000.00 $3,000,000.00 $2,500,000.00 $2,000,000.00 $1,500,000.00 $1,000,000.00 \n$500,000.00 $0.00 \n \n$807,050.16 24% \n \n$463,049.93 16% \n \n$2,610,852.12 \n \n76% \n \n$2,471,510.59 \n \n84% \n \n$294,743.82 12% \n$2,136,207.68 88% \n \nTotal $ Justified Total $ Unjustified \n \nFY 2016 \n \nFY 2017 \n \nFY 2018 \n \nFigure 5 provides the percent of reviewed claim amounts by funding source. In FY 2018, $1,930,310.97 in Medicaid funds were reviewed compared to the FY 2018 state funded amount of $500,640.53. \n \n6 The percent of justified and unjustified dollar amounts, in Figure 4, are total dollar amounts justified or unjustified divided by the total amount of funds reviewed in total for the year. The annual statewide average Billing score is not equivalent to the percent justified/unjustified depicted in the figure; statewide Billing score is calculated based on averaging all provider scores at year-end. \n31 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 5. FY 2018 BHQR Amount Reviewed by Funding Source \n \n21% State Funds* \nTotal Amount Reviewed \n$2,430,951.50 \n79% Medicaid \n \n*State funds are fee for service and state funded encounters combined. \nEighty-seven percent of Medicaid claims reviewed were found justified compared to 90 percent of state funded claims/encounters in FY 2018, see Table 3. \n \nTable 3. Amount Reviewed by Funding Source \n \nState Funds \n \nMedicaid \n \nFY 2018 \n \nFY 2018 \n \nAmount Justified \n \n$450,758.86 \n \n$1,685,448.82 \n \n(90%) \n \n(87%) \n \nAmount Unjustified \n \n$49,881.67 \n \n$244,862.15 \n \n(10%) \n \n(13%) \n \nTotal Amount Reviewed \n \n$500,640.53 \n \n$1,930,310.97 \n \nAlthough some services have substantially fewer state funds reviewed for FY 2018 compared to Medicaid funds, 100 percent of state funds for Community Support, Individual, and Psychological Testing were identified as justified. However, Family Skills Training and Development claims had only 35 percent of state-funded billing funds justified compared to 80 percent of Medicaid funds. Statefunded justified dollars also exceeded Medicaid justified dollars in terms of ACT, Community Support Team (CST), Intensive Case Management, and Intensive Family Intervention. Only 49 percent of state funded dollars reviewed for Mental Health (MH) Peer Support Individual were justified compared to \n \n32 | P a g e \n \n Quality Management Annual Report FY 2018 \n76 percent of reviewed Medicaid dollars. In FY 2019, Quality Management will further analyze the billing discrepancies between the funding sources for any trends. \n \nTable 4 and Table 5 provide the amount and percentages of funds justified by service, non-intensive outpatient and specialty services of all funds combined, respectively. \n \nTable 4. FY 2018 Non-Intensive Outpatient Services Percent of Funds Justified \n \nService \n \nAmount Justified \n \nAmount Reviewed \n \nJustified \n \nDiagnostic Assessment \n \n$37,270.98 $39,762.22 \n \n94% \n \nCommunity Transition Planning \n \n$3,103.01 \n \n$3,312.21 \n \n94% \n \nIndividual Outpatient Services \n \n$380,030.45 $409,683.46 \n \n93% \n \nBehavioral Health Assessment \n \n$75,895.66 $83,962.92 \n \n90% \n \nMedication Administration \n \n$3,494.80 \n \n$3,876.51 \n \n90% \n \nPsychiatric Treatment \n \n$67,178.02 $75,386.25 \n \n89% \n \nGroup Outpatient Services \n \n$107,754.77 $121,168.70 \n \n89% \n \nCommunity Support Individual \n \n$128,832.09 $18,089.04 \n \n88% \n \nService Plan Development \n \n$51,843.00 $59,530.24 \n \n87% \n \nNursing Assessment \u0026 Health Services \n \n$44,389.22 $51,337.63 \n \n86% \n \nFamily Counseling/Therapy \n \n$140,012.07 $166,185.24 \n \n84% \n \nCase Management \n \n$109,814.09 $134,652.93 \n \n82% \n \nPsychological Testing \n \n$29,166.90 $35,869.25 \n \n81% \n \nCrisis Intervention \n \n$9,301.10 $12,123.05 \n \n77% \n \nTotal \n \n$1,188,086.16 $1,343,771.74 \n \n88% \n \n33 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 5. FY 2018 Specialty Services Percent of Funds Justified \n \nService \n \nAmount Justified \n \nAmount Reviewed \n \nPeer Support Whole Health \u0026 Wellness - Individual \n \n$5,606.28 \n \n$5,606.28 \n \nPeer Support Whole Health \u0026 Wellness - Group \n \n$10,082.92 $10,225.74 \n \nMH Peer Support Program \n \n$83,508.23 $90,261.59 \n \nAssertive Community Treatment (ACT) \n \n$295,649.45 $325,851.60 \n \nPsychosocial Rehabilitation - Individual \n \n$151,633.45 $172,628.40 \n \nIntensive Case Management \n \n$16,751.46 $19,101.80 \n \nCommunity Support Team (CST) \n \n$13,042.33 $14,985.88 \n \nOpioid Maintenance \n \n$9,430.70 $11,063.50 \n \nPsychosocial Rehabilitation Program \n \n$66,408.84 $78,011.70 \n \nAddictive Disease Support Services (ADSS) \n \n$54,703.17 $64,477.32 \n \nIntensive Family Intervention \n \n$221,099.15 $267,624.22 \n \nMH Peer Support - Individual \n \n$19,806.24 $26,289.03 \n \nAD Peer Support - Individual \n \n$399.30 \n \n$1,052.70 \n \nGrand Total \n \n$948,121.52 $1,087,179.76 \n \nJustified 100% 99% 93% 91% 88% 88% 87% 85% 85% 85% 83% 75% 38% \n87% \n \nWhen a claim was found to be unjustified, assessors selected all applicable reasons a reviewed claim was identified as a discrepancy; therefore, one claim may have multiple reasons. Nearly 4,300 fewer discrepancies were found in FY 2018 than in the baseline year of FY 2016. Figure 6 provides the percent of discrepancies within the fiscal year. \n \n34 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 6. FY 2018 Percent of Discrepancy Reasons \n \nContent not support units billed (11.3%) Content not match service definition (10.6%) Content not support code billed (10.3%) Progress note is missing (8.3%) Missing/incomplete order (7.3%) Does not meet admission criteria (6.5%) Code is missing/different than code billed (6.2%) Staff credential missing (6.0%) Credential not supported by documentation (5.4%) Signature missing (4.7%) Date of entry missing (4.1%) Other* (3.8%) Content is not unique to the individual (3.6%) Intervention outside staff's scope/practice (3.1%) Diversionary activities billed (2.8%) Location missing (out-of-clinic) (2.7%) Units billed exceed time/units documented (2.3%) \n \n0% \n \n2% \n \n4% \n \n6% \n \n8% \n \n10% \n \n12% \n \n14% \n \n16% \n \n18% \n \n*Other instances in Figure 6 include the following discrepancy reasons: \n Intervention outside staff's scope/practice (3.1%)  Diversionary activities billed (2.8%)  Location missing (out-of-clinic) (2.7%)  Units billed exceed time/units documented (2.3%)  Multiple services billed at the same time (1.2%)  Date of service incorrect/missing (\u003c 1%)  Non-billable activity (\u003c 1%)  Consistency requirements missing (\u003c 1%)  Time in/time out missing (\u003c 1%)  Mutually exclusive services billed (\u003c 1%)  Record not submitted within timeframe (\u003c 1%) \n \n35 | P a g e \n \n Quality Management Annual Report FY 2018 \n Printed name missing (\u003c 1%) The most prevalent discrepancy reasons FY 2018 differed from previous years: \n Content not supporting units billed  For example, billed eight units for a medication check with no further documentation of interventions provided. \n Content not supporting code billed  For example, out-of-clinic modifier billed but session took place in office. \n Content of note not matching service definition  For example, billed CSI but documentation indicated the practitioner provided counseling. \nIn both FY 2016 and FY 2017, the most common discrepancy reasons found were missing/incomplete service orders, individuals not meeting admission criteria, and missing progress notes. \n Individuals not meeting admission criteria declined by 61 percent; an improvement.  Instances of missing progress notes declined in FY 2018 by 44 percent or 326 instances; also \nan improvement.  Two reasons for unjustified claims, \"Progress note missing\" and \"Individual did not meet \nadmission criteria,\" became two of the most improved areas in FY 2018, in addition to \"Time in/out missing,\" which declined in instances by 76 percent. Table 6 provides the number of all discrepancy reasons, in ranking order for FY 2018, and the change in percent from FY 2016 to FY 2018. A negative difference demonstrates improvement from year to year with highlighted areas demonstrating a statistically significant change. Overall, 19 (83%) of the observed indicators (N = 23) improved from the previous fiscal year. \n36 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 6. BHQR Billing Discrepancy Reasons* \n \nFY 2016 \n \nFY 2017 \n \nFY 2018 \n \nInstances Instances Instances \n \nClaims Reviewed Missing/incomplete order Does not meet admission criteria Quantitative \n \n31,213 1,612 (5.2%) 1,132 (3.6%) \nInstances \n \n29, 602 496 (1.7%) 748 (2.5%) Instances \n \n25,598 285 (1.1%) 254 (1.0%) Instances \n \nPercent Change \nFY16  FY17 \n-5.16% \n-67.5% \n-30.2% Change \n \nProgress note is missing Code is missing/different than code billed \n \n779 (2.5%) 381 (1.2%) \n \n676 (2.3%) 379 (1.3%) \n \n326 (1.3%) 244 (1.0%) \n \n-8.6% 4.9% \n \nStaff credential missing \n \n299 (1.0%) 295 (1.0%) 236 (0.9%) 4.4% \n \nSignature missing Date of entry missing \n \n275 (0.9%) 263 (0.8%) \n \n194 (0.7%) 185 (0.6%) \n \n182 (0.7%) -25.1% 160 (0.6%) -26.4% \n \nUnits billed exceed time/units documented \nConsistency requirements missing \n \n236 (0.8%) 202 (0.7%) \n \n112 (0.4%) 57 (0.2%) \n \n90 (0.4%) 31 (0.1%) \n \n-49.7% -70.6% \n \nCredential not supported by documentation \n \n199 (0.6%) 276 (0.9%) 211 (0.8%) 45.9% \n \nRecord not submitted within timeframe Location missing (out-of-clinic) Time in / time out missing \n \n197 (0.6%) 166 (0.5%) 92 (0.3%) \n \n19 (0.1%) 155 (0.5%) 68 (0.2%) \n \n10 (\u003c 0.1%) 106 (0.4%) 14 (0.1%) \n \n-90.5% -2.2% -22.0% \n \nDate of service incorrect / missing Printed name missing \nPerformance Standards Content does not match service definition Content does not support code billed \n \n63 (0.2%) 42 (0.1%) FY 2016 Instances 735 (2.4%) 428 (1.4%) \n \n42 (0.1%) 2 (\u003c0.1%) FY 2017 Instances 489 (1.7%) 397 (1.3%) \n \n34 (0.1%) 8 (\u003c0.1%) FY 2018 Instances 413 (1.6%) 403 (1.6%) \n \n-30.6% -92.6% Percent Change -29.9% -2.3% \n \nContent does not support units billed Intervention outside staff's scope/practice Content is not unique to the Individual \n \n375 (1.2%) 348 (1.1%) 137 (0.4%) \n \n518 (1.8%) 79 (0.3%) 243 (0.8%) \n \n441 (1.7%) 120 (0.5%) 140 (0.5%) \n \n45.7% -75.8% 86.8% \n \nMultiple services billed at the same time \n \n95 (0.3%) \n \n79 (0.3%) \n \n45 (0.2%) -11.3% \n \nNon-billable activity \n \n76 (0.2%) \n \n59 (0.2%) \n \n34 (0.1%) -17.9% \n \nDiversionary activities billed \n \n48 (0.2%) \n \n26 (0.1%) 111 (0.4%) -41.5% \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .01 and indicators with n \u003c 90 were not tested due to low volume. \n \nPercent Change \nFY17  FY18 \n-13.5% -33.7% -60.8% Change -44.1% -25.5% -7.8% 7.7% 0.8% \n-7.5% -36.3% \n-11.4% -34.9% -20.4% -76.2% -5.1% \u003e100% Percent Change -2.2% 17.5% -1.6% 73.6% -33.3% -34.9% -33.6% \u003e100% \n \n37 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nBHQR Assessment and Planning \nAssessment and Planning consisted of ten indicators answered once per record reviewed. Table 7 contains the indicators and percent \"yes\" in ranking order. All indicators increased or remained equal to FY 2017 results, with areas of growth continuing to include the following: \n Similar to FY 2016 and FY 2017, the lowest-scoring indicator remains \"Co-occurring health conditions have been addressed in the IRP\" with FY 2018 results of 60 percent. \n When all other needs were identified (housing, employment, childcare, higher education, etc.), they were only addressed in 59 percent of the plans in FY 2016, 48 percent in FY 2017, and 67 percent in FY 2018. \n In FY 2016, 80 percent of records were scored as having a discharge plan present. In collaboration with DBHDD, the indicator was re-written for FY 2017 to include that discharge plans meet requirements of having a specific discharge date, specific step-down service, and criteria to measure progress. This seemingly small shift in this indicator caused an overall decline in this indicator score to 58 percent in FY 2017, however, scores increased in FY 2018 to 66 percent; an improvement by eight points. In order to gather additional data, FY 2019 review tool revisions will require Assessors to document specifically the missing elements through a drop-down menu. \n \nTable 7. BHQR Assessment \u0026 Planning Indicator Scores by Year \n \nIndicator \n \nFY 2016 FY 2017 \n \nCurrent medical screening is present \n \n98% \n \n97% \n \nIndividual meets admission criteria \n \n95% \n \n97% \n \nGoals/objectives honor hopes, choice, preferences, outcomes 91% \n \n89% \n \nCurrent behavioral health assessment \n \nN/A \n \n94% \n \nIRP is individualized in personalized language \n \n78% \n \n87% \n \nInterventions/objectives are goal-linked \u0026 service-consistent \n \n96% \n \n91% \n \nWhole health \u0026 wellness in IRP \n \n59% \n \n56% \n \nAll assessed needs are addressed \n \n59% \n \n48% \n \nDischarge plan defines criteria \n \n80% \n \n58% \n \nCo-occurring health conditions addressed in IRP \n \n36% \n \n34% \n \nAssessment \u0026 Planning Result \n \n79% \n \n77% \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .01 as tested from the previous year. \n \nFY 2018 98% 97% 94% 94% 94% 92% 69% 67% 66% 60% 84% \n \n38 | P a g e \n \n Quality Management Annual Report FY 2018 \nComparisons in scoring continue to reflect a deficit in a comprehensive, whole-person, whole-health approach to recovery/resiliency planning, as it remains the lowest of all BHQR categories. However, the average score for Assessment and Planning increased significantly (p \u003c .01) from 77 to 84 percent for FY 2018. All outcomes increased significantly, except \"Individual meets admission criteria\" (97% for FY 2017 and FY 2018). The greatest improvement was among \"All assessed needs are addressed\" (48% in FY 2017, 67% in FY 2018). Data suggests that although records contained required assessment documentation, such as medical screenings and current behavioral health assessments, a vast percentage of recovery/resiliency plans lacked the documentation of discharge planning, co-occurring health conditions, or a plan for increased whole health and wellness. \nBHQR Service Guidelines \nCompliance with service guideline indicators were answered once per record reviewed. The number of indicators answered varied, specific to the service. The service reviewed for each record was dependent upon on the services contained in the billing claims sample for the respective individual; therefore, multiple services could be reviewed within one individual's record. FY 2018 reviews contained 16 non-intensive outpatient services and 14 specialty services, as shown in Figure 7 and Figure 8. In FY 2018, Community Transition Planning was added as a scored service and included in the billing sample of 17 reviews (18 records). Furthermore, Peer Support Whole Health and Wellness differentiated between individual and group services in the latter part of FY 2018. \n39 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 7. FY 2018 Percent of Dollars Reviewed by Non-Intensive Outpatient Service \n \nIndividual Counseling (27%) \n \nPsychosocial Rehabilitation - Individual (11%) \n \nCommunity Support Individual (10%) \n \nCase Management (9%) Family Counseling/ Therapy (8%) Group Outpatient Services (8%) \n \nPsychiatric Treatment (5%) \n \nIndividual Counseling (27%) \n \nBehavioral Health Assessment (6%) Psychiatric Treatment (5%) \n \nBehavioral Health Assessment (6%) \n \nService Plan Development (4%) \n \nGroup Outpatient \n \nPsychosocial \n \nNursing Assessment \u0026 Health Services (3%) Services (8%) \n \nRehabilitation - Individual \n \nFamily Skills Training and Development (3%) Diagnostic Assessment (3%) Psychological Testing (2%) \n \nFamily \n \n(11%) \n \nCounseling/ \n \nCommunity \n \nTherapy (8%) Case \n \nSupport \n \nManagement Individual (10%) \n \nCrisis Intervention (1%) \n \n(9%) \n \nMedication Administration (\u003c1%) \n \n40 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 8. FY 2018 Percent of Dollars Reviewed by Specialty Service \n \nAssertive Community Treatment (36%) \n \nIntensive Family Intervention (29%) \n \nMH Peer Support Program (10%) Psychosocial Rehabilitation Program (8%) Addictive Disease Support Services (7%) MH Peer Support - Individual (3%) Intensive Case Management (2%) \n \nAddictive Disease Support Services \n(7%) \nPsychosocial Rehabilitation Program (8%) \n \nAssertive Community Treatment (36%) \n \nCommunity Support Team (2%) Opioid Maintenance (1%) \n \nMH Peer Support Program (10%) \n \nPeer Support Whole Health \u0026 Wellness (1%) \n \nPeer Support Whole Health \u0026 Wellness - Individual (1%) Psychosocial Rehabilitation-Group (\u003c1%) \n \nIntensive Family Intervention (29%) \n \nAD Peer Support - Individual (\u003c1%) \n \nPeer Support Whole Health \u0026 Wellness - Group (\u003c1%) \n \nTable 8 provides the scores by service type and year of review and represents the number of reviews assessed for the service, based on the claims sample. Additionally, in Table 8 (r) references the number of reviews, and (n), the number of records. For FY 2016, each provider was reviewed only once; thus, the (r) also represents the number of providers reviewed for each service for that specific fiscal year. However, in FY 2017 and again in FY 2018 multiple providers were reviewed more than once; thus, the (r) representation has been changed to reflect the number of reviews, which allows for consistency in the year-to-year comparisons. \n \n41 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 8. BHQR Service Guidelines Scores by Service Type \n(r) represents the number of reviews and (n) the number of records) \n \nService Type \nAD Peer Support - Individual Addictive Disease Support Services Assertive Community Treatment Case Management Community Support Community Support Team Community Transition Planning Family Counseling/ Training Group Counseling/ Training Individual Counseling Intensive Case Management Intensive Family Intervention MH Peer Support  Individual MH Peer Support Program Nursing Assessment \u0026 Health Opioid Maintenance Treatment \n \nFY 2016 \n79% (r = 1, n = 7) 81% (r = 48, n = 224) 85% (r = 19, n = 314) 85% (r = 69, n = 557) 84% (r = 84, n = 484) 94% (r = 7, n = 23) N/A 96% (r = 92, n = 667) 93% (r = 63, n = 418) 97% (r = 113, n = 1288) 95% (r = 10, n = 55) 86% (r = 30, n = 221) 87% (r = 7, n = 119) 95% (r = 33, n = 234) 85% (r = 85, n = 884) N/A \n \nFY 2017 \n48% (r = 1, n = 13) 85% (r = 69, n = 293) 88% (r = 20, n = 334) 84% (r = 90, n = 746) 83% (r = 114, n = 637) 92% (r = 7, n = 13) N/A 91% (r = 129, n = 904) 90% (r = 78, n = 561) 94% (r = 150, n = 1980) 94% (r = 13, n = 53) 85% (r = 37, n = 185) 95% (r = 6, n = 38) 86% (r = 25, n = 199) 80% (r = 116, n = 897) 98% (r = 3, n = 69) \n \nFY 2018 \n80% (r = 2, n = 9) \n \nPercent Percent Change Change FY16- FY17- \nFY17 FY18 \n-64.6% 66.7% \n \n80% (r = 61, n = 252) \n \n4.7% \n \n2.4% \n \n84% (r = 21, n = 336) \n \n3.4% -4.5% \n \n88% (r = 85, n = 644) \n \n-1.2% 4.8% \n \n86% (r = 102, n = 446) \n \n-1.1% 3.6% \n \n99% (r = 10, n = 22) \n \n-2.2% 7.6% \n \n100% (r = 17, n = 18) \n \nN/A \n \nN/A \n \n94% (r = 109, n = 625) \n \n-6.2% 3.3% \n \n95% (r = 75, n = 535) \n \n-3.2% 5.6% \n \n96% (r = 132, n = 1390) -3.3% 2.1% \n \n94% (r = 11, n = 46) \n \n-1.2% 0.0% \n \n90% (r = 33, n = 149) \n \n-1.2% 5.9% \n \n90% (r = 20, n = 92) \n \n8.4% -5.3% \n \n91% (r = 36, n = 210) \n \n-10.5% 5.8% \n \n87% (r = 104, n = 649) \n \n-5.5% 8.7% \n \n92% (r = 6, n = 52) \n \nN/A \n \n-6.1% \n \nPeer Support Whole Health \u0026 Wellness Individual Group \nPsychiatric Treatment \n \n92% (r = 7, n = 45) N/A N/A \n95% (r = 91, n = 843) \n \nPsychosocial Rehabilitation Program 90% (r = 75, n = 196) \n \nPsychosocial Rehabilitation - Individual 96% (r = 25, n = 649) \n \n68% (r = 12, n = 67) N/A N/A 92% (r = 128, n = 950) \n93% (r = 26, n = 179) \n85% (r = 98, n = 904) \n \n86% (r = 7, n = 28) 93% (r = 2, n = 28) 80% (r = 2, n = 3) 97% (r = 111, n = 717) \n96% (r = 26, n = 153) \n90% (r = 83, n = 679) \n \n-35.3% N/A N/A -3.3% \n3.2% \n-12.9% \n \n26.5% N/A N/A 5.4% \n3.2% \n5.9% \n \nNote: Areas highlighted in blue within table reflect significance at a p \u003c .01 from the previous year. AD Peer Support  Individual service was not statistically tested due to low response volume in FY 2017. \n \n42 | P a g e \n \n Quality Management Annual Report FY 2018 \nService Guidelines scored highest of the four categories in FY 2016 with an average of 90 percent. Although it declined slightly in FY 2017 by two points, it has since returned to 90 percent. Moreover, nearly three quarters of the reviews (71.8%, N = 112) met or exceeded 90 percent with nine reviews receiving a perfect score; three of whom were providers reviewed for the first time. \nPsychiatric Treatment, Psychosocial Rehabilitation Program, and Individual Counseling demonstrated the highest rate of compliance for FY 2018 compared to all services measured, exceeding 95 percent. Community Support Team was reviewed using 13 indicators and scored 100 percent on all but one indicator (91%) related to development of a crisis plan including both the provider and the individual. Furthermore, Community Transition Planning, a newly reviewed service for FY 2018, resulted in 100 percent compliance of records meeting the Compliance with Service Guidelines necessary standards. \nAll services reviewed in FY 2018 were scored in relation to documentation contained within progress notes. Three indicators were scored for all services and the results indicate significant improvement from the previous fiscal year (p \u003c .01) and included the following: \n Progress Notes document progress (or lack of) toward goals/objectives identified on the IRP: o FY 2018: 90 percent o FY 2017: 86 percent \n The staff interventions reflected in the progress notes are related to the staff interventions listed on the IRP: o FY 2018: 92 percent o FY 2017: 88 percent \n The progress notes document individual response to the staff intervention provided: o FY 2018: 99 percent o FY 2017: 98 percent \nThe most-improved services from FY 2017 to FY 2018 are outlined in Table 9 with (r) referencing the number of reviews, and (n), the number of records. Although these services were above the statewide average, specific indicators at the service level can lend to growth and opportunity. Table 9 provides for the lowest scoring indicator(s) by service. \n43 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 9. FY 2018 Improvement Results by Service \n \n(r) represents the number of reviews and (n) the number of records \n \nService Name and FY 2018 lowest scoring indicator(s): \n \nFY 2016 \n \nFY 2017 \n \nFY 2018 \n \nAD Peer Support - Individual \n \n79% (r = 1) \n \n48% (r = 1) \n \n80% (r = 2) \n \nThe providing practitioner is a Georgia-Certified Peer Specialist-AD (CPS-AD) \nPeer Support Whole Health \u0026 Wellness \nThere is a minimum of one contact weekly either face-toface or by phone (Review authorization period) Service is provided by a Whole Health and Wellness Coach (CPS) Collaboration with other healthcare providers to assure individual has access to needed services is documented (1 x per authorization) \nNursing Assessment \u0026 Health Services \n \n0% (n = 7) 92% (r = 7) 89% (n = 45) 100% (n = 45) \n77% (n = 45) 85% (r = 85) \n \n100% (n = 10) 68% (r = 12) 66% (n = 67) 71% (n = 66) \n51% (n = 67) 80% (r = 116) \n \n22% (n = 9) 86% (r = 7) 50% (n = 28) 68% (n = 28) \n68% (n = 28) 87% (r = 104) \n \nNursing goals and objectives are individualized and address health issues to include (but not limited to) medical, physical, nutritional, and behavioral needs \nCommunity Support Team \n \n67% (n = 883) 60% (n = 800) 75% (n = 648) \n \n94% (r = 7) \n \n92% (r = 7) 99% (r = 10) \n \nThere is a joint development of a crisis plan to include the provider and individual. The Provider is listed as primarily \n \n87% (n = 23) \n \nresponsible \n \nNote: Areas highlighted in blue within table reflect significance at a p \u003c .01 from the previous year. \n \n92% (n = 12) 91% (n = 22) \n \nThe BHQR data also suggest additional areas of focus noted through declines in scores from FY 2017 to FY 2018 by service type. Table 10 contains detailed results, with further analysis and trending included at the indicator level and supplemented by the lowest scoring indicators provided by service. \n \n44 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 10. FY 2018 Service Type Declined Results \n \n(r) represents the number of reviews and (n) the number of records \n \nService type and FY 2018 lowest scoring \n \nFY 2016 \n \nFY 2017 \n \nindicator(s): \n \nFY 2018 \n \nOpioid Maintenance Therapy \n \nN/A \n \n98% (r = 3) 92% (r = 6) \n \nThe staff interventions reflected in the progress notes are related to the staff interventions listed on the treatment plan \nMH Peer Support - Individual \n \nN/A 87% (r = 7) \n \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan \nAssertive Community Treatment \n \n77% (n = 119) \n85% (r = 19) \n \nThe ACT Team completes a Treatment Plan Review with the staff, the individual, and his/her family/informal supports prior to the reauthorization of services \n \n58% (n = 314) \n \nThere is evidence the ACT Team is working with \n \ninformal support systems/collateral contacts at \n \nleast 2-4 times per month with or without the individual present (and it is documented) to provide support and skills training to assist the \n \n43% (n = 314) \n \nIndividual in his/her recovery (Review \n \nauthorization period) \n \nNote: Areas highlighted in blue within table reflect significance at a p \u003c .01 from the previous year. \n \n99% (n = 69) 95% (r = 6) \n93% (n = 27) 88% (r = 20) \n59% (n = 307) \n39% (n = 283) \n \n83% (n = 52) 90% (r = 20) \n77% (n = 92) 84% (r = 21) \n28% (n = 321) \n33% (n = 306) \n \nMinimal reductions in scores for Service Guidelines occurred in FY 2018 related to Opioid Maintenance Therapy, MH Peer Support  Individual, and ACT. Specific ACT results are found within this report as a subset of the BHQR process, as requested by DBHDD. However, even in higher scoring services, indicators specific to the following fell below 80 percent: \n \"Appropriately credentialed practitioners\" o AD Peer Support  Individual, 22 percent o Assertive Community Treatment, 72 percent \n \"Coordination with family, services and resources, or other health care providers\" \n \n45 | P a g e \n \n Quality Management Annual Report FY 2018 \no Addictive Disease Support Services, 59 percent o Community Support, 63 percent o Peer Support Whole Health \u0026 Wellness - Group, 68 percent o Peer Support Whole Health \u0026 Wellness  Individual, 71 percent This demonstrates that while overall Compliance with Service Guideline results are promising with improvement from the previous year, there are still areas of improvement available depending upon the service. Lastly, Substance Use Intensive Outpatient and Medication Assisted Treatment are to be included with data collection for FY 2019. \nBHQR Focused Outcome Areas (FOA) \nFocused outcome areas indicators are answered once per record reviewed. Each FOA has a different number of indicators for a total of 22 indicators. Figure 9 outlines results of overall FOA results of FY 2018 compared to previous years. Additionally, each respective section contains a figure specific to the subcategory FY 2018 results as compared to previous years. \n46 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 9. BHQR Focused Outcome Area(s) Scores by Year \n \n84% 74% 63% \n \n92%91%95% \n \n96% 93% \n \n83% \n \n87% \n \n80% 78% \n \n96%97% 92% \n \n90%93% 93% \n \n89%92% \n \n85% \n \nWhole Health \n \nSafety \n \nPerson Centered Practices \n \nCommunity Life \n \nChoice \n \nRights \n \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nOverall Score \n \nWhole Health indicators address whether the records reviewed demonstrated individuals were treated holistically, with their physical health needs being assessed, documented, and monitored. Of the six FOAs, Whole Health with results shown in Figure 10 (historically the lowest scoring) has surpassed the subcategory of Safety (lowest scoring category in FY 2018) by six points. Whole Health for FY 2017 scored 74 percent compared to 63 percent in FY 2016 and has since increased significantly (p \u003c .01) to 84 percent. All four Whole Health indicators improved by approximately ten points each. \n \n47 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 10. BHQR Focused Outcome Area by Year \n \nWhole Health \n \n63% 74% 84% \n \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nSafety indicators address whether providers were attending to certain risk factors for individuals, such as their tendency to experience a crisis, have suicidal or homicidal thoughts or actions, and whether individuals have received information and education about the risks and benefits of prescribed medications. Safety represents the lowest-scoring FY 2018 FOA and significantly declined (p \u003c .01) from 83 percent to 78 percent; see Figure 11. \n For FY 2018, the indicator most often scored \"no\" was \"Individuals (or their legal guardians) has been educated on the risk/benefits of all medication prescribed and there is a signed consent form\". However, results of this indicator have trended upward (p \u003c .01) since contract inception to a result of 68 percent (58% in FY 2017). \n Results of documentation as it relates to how providers \"Work with individuals to develop, document, and implement safety/crisis plans\" remained consistent from FY 2017 to FY 2018 with a score of 88 percent. \nFigure 11. BHQR Focused Outcome Area by Year \n \nSafety \n \n80% 83% \n78% \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \n48 | P a g e \n \n Quality Management Annual Report FY 2018 \nPerson Centered Practices measures whether documentation indicates individuals actively participated in creating/modifying the care plans, as needed and desired. While the score of Person Centered Practices declined slightly in FY 2017, there was a significant improvement in FY 2018 from 91 percent to 95 percent. Figure 12 provides results for fiscal years 2016 through 2018. Scores for all indicators improved (p \u003c .01) in FY 2018; \"Individuals were active participants in the planning and receiving of services\" scored 99 percent. The indicator, \"Plan reassessed based upon any changing needs, circumstances and/or response by the individual\" had the lowest score at 87 percent. \nFigure 12. BHQR Focused Outcome Area by Year \n \nPerson \n \n92% \n \nCentered \n \n91% \n \nPractices \n \n95% \n \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nCommunity Life indicators address how individuals were engaged in their communities of choice and whether they held valued social roles. Figure 13 graphically presents the annual fiscal year result for the Community Life FOA. Only one of the five Community Life indicators declined from the previous year used to measure whether \"Individuals were assessed for their need to make changes in their living, learning, working, and/or social environments\" (99% in FY 2017, 98% in FY 2017). Results increased across the remaining four indicators for the second year since inception (p \u003c .01), with each indicator at or above 95 percent. \n \n49 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 13. BHQR Focused Outcome Area by Year \n \nCommunity Life \n \n87% 93% 96% \n \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nChoice indicators address how, and if, providers offered individuals options of services and encouraged individuals to make educated choices concerning supports and services provided. Figure 14 provides results of the Choice FOA by fiscal year. In 95 percent of records, \"If a barrier was identified, documentation demonstrated alternatives were explored\" were scored \"yes\". Ninetyeight percent of all records reviewed for FY 2018 demonstrated \"Individual's known preferences and differences were followed\". All improvements were significant per indicator at p \u003c .01. The indicator \"Documentation demonstrated how the individual was provided with options of supports and services\" scored 98 percent in FY 2018. \nFigure 14. BHQR Focused Outcome Area by Year \n \n92% \nChoice \n \n96% 97% \n \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nRights indicators address whether individuals were apprised of their rights while receiving services, at the point of entry with the provider and on an annual basis thereafter, and whether they had been informed of their rights under Federal HIPAA laws. There was significant improvement (p \u003c .01) from FY 2016 (90%) to FY 2017 (93%), but minimal change occurred from FY 2017 to FY 2018 with an \n \n50 | P a g e \n \n Quality Management Annual Report FY 2018 \noverall subcategory score of 93 percent, see Figure 15. There was significant improvement (p \u003c.01) for FY 2018 among the indicators \"Individuals informed of rights at onset of service delivery\" and \"HIPAA Privacy and Security Rules were reviewed with individual\". Yet, attention is still needed specific to \"Individuals informed of rights at least annually\", as only 73 percent of records reflected annual updates. However, this was an increase (p \u003c .01) of nearly 13 percent for FY 2018. \nFigure 15. BHQR Focused Outcome Area by Year \n \nRights \n \n90% 93% 93% \nFY 2016 (N = 141) FY 2017 (N = 167) FY 2018 (N = 156) \n \nBHQR Staff and Individual Interviews \nSix FOA categories comprise the Staff and Individual Interviews. Individual Interviews served to assess the individual's perception of care with the provider, services rendered, and support in working toward personal goals. Staff Interviews helped determine whether a person-centered approach was used in providing services and empowering individuals. \nData in Figure 16 represent the average scores for the individual FY 2018 interviews. Figure 17 represents average scores of Staff Interviews. \n \n51 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 16. BHQR Individual Interview Data by Fiscal Year \n \nWhole Health \n \n93% 94% 95% \n \nSafety \n \n96% 96% \n97% \n \nPerson Centered Practices \n \n96% 97% 99% \n \nCommunity Life \n \n95% 95% \n97% \n \nChoice \n \n97% 98% \n99% \n \nRights \n \n98% 98% \n99% \n \nOverall FY 2016 (N = 737) \n \nFY 2017 (N = 735) \n \n96% 96% \n98% \nFY 2018 (N = 616) \n \n52 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 17. BHQR Staff Interview Data by Fiscal Year \n \nWhole Health Safety \nPerson Centered Practices \nCommunity Life Choice Rights Overall FY 2016 (N = 753) \n \nFY 2017 (N = 774) \n \n98% 97% 98% \n98% 98% 98% \n98% 98% \n99% \n99% 99% 99% \n99% 99% 99% \n99% 100% 100% \n99% 98% 99% \nFY 2018 (N = 652) \n \nIt is notable that all areas scored in the 90th percentile for the third year in a row, indicating a high level of satisfaction for individuals served by the providers. Furthermore, very little difference was identified between scores of similar indicators asked of both individuals and staff. In some cases, results of Staff Interviews were elevated above responses made by individuals on indicators similar in nature. Examples of this include indicators outlined in Table 11. \n \n53 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 11. FY 2018 Interview Comparison \n \nIndicator \n \nIndividual \n \nStaff \n \nChoice \n \nIndividual: Individual's options for supports and services are offered. Staff: Staff is presenting options of services and supports. Please provide a specific example of how an individual in services has a choice of services, supports or staff. \nCommunity Life \n \n99% (N = 609) \n \n99.8% (N = 640) \n \nIndividual: Individual actively participates in the community. Staff: Individual actively participates in the community. \nWhole Health \u0026 Wellness \n \n97% (N = 2,813) 99% (N = 1,853) \n \nIndividual: Individual is offered needed education and resources/tools to help them manage his/her own health. Staff: Staff are communicating with other providers of care to ensure good communication in coordination of services. \nPerson Centered Planning \n \n98% (N = 540) \n \n97% (N = 615) \n \nIndividual is educated and/or offered support in developing a WRAP plan as requested. Staff: Staff is aware of and supports the individual as requested to develop a WRAP plan. \nRights \n \n93% (N = 371) \n \n91% (N = 396) \n \nIndividual: Individual feels they can say \"no\" to recommended treatment, services or supports without retaliation. Staff: Staff is aware of the right to refuse treatment, services or supports without retaliation. \nSafety \n \n99% (N = 603) 100% (N = 648) \n \nIndividual: Individual is aware of or recognizes what constitutes: \n \nAbuse, Neglect, Exploitation Staff: Staff is aware of what constitutes abuse, neglect and \n \n99% (N = 594) \n \nexploitation. \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .01 as tested between Individual to staff results. \n \n100% (N = 648) \n \nNotably, the most apparent discrepancy between Individual Interviews and Staff Interviews for FY 2017 was specific to the indicator of health-related needs addressed. Of the individuals interviewed, 90 percent felt their health needs were addressed, while staff perceived this to be the case in 97 \n \n54 | P a g e \n \n Quality Management Annual Report FY 2018 \npercent of the interviews conducted. Moreover, 93 percent of individuals had seen a primary care physician, 72 percent had seen a dentist, and 93 percent received a routine preventative screening within the past 12 months. In 92 percent of FY 2018 interviews, staff indicated they had received training in providing whole-health informed services and 100 percent of staff felt they supported individuals in overcoming barriers. \nAssertive Community Treatment (ACT) \nQuality Management conducted 21 ACT provider reviews in FY 2018 specific to 18 unique providers. FY 2018 included one ACT only provider with thirty ACT-specific individuals sampled. Of providers who offered both non-intensive outpatient services, as well as ACT services, sample selection included up to 15 additional individuals. Three ACT providers required a reassessment within the fiscal year due to low initial scores in FY 2017 or upon initial review in FY 2018. The ACT providers' reviews contained 334 records and 3,334 billing claims. While 88 percent of ACT records reviewed documented a mental health diagnosis, there was a greater percentage of physical and substance use diagnoses than in the BHQRs. Forty-nine percent of ACT records reviewed documented a co-occurring physical diagnosis while BHQR records documented 30 percent and ACT records documented 47 percent co-occurring substance use diagnoses while BHQR records documented 41 percent. The data in the following section represents findings from reviews of ACT services only. \n55 | P a g e \n \n Quality Management Annual Report FY 2018 \nACT Quality Review Overall Scores \nFigure 18 provides ACT statewide averages by category for the past three years of quality reviews. The Overall score for FY 2018 was 89 percent for all ACT reviews. \nFigure 18. ACT Fiscal Year Results by Category \nBilling Validation FY16: 92% FY17: 90% FY18: 91% \n \nService Guidelines \nFY16: 85% FY17: 88% FY18: 84% \n \nOverall \nFY16: 88% (N = 19)* FY17: 87% (N = 20)* FY18: 89% (N = 21)* \nAssessment \u0026 Planning \nFY16: 85% FY17: 80% FY18: 87% \n \nFocused Outcome \nAreas \nFY16: 88% FY17: 90% FY18: 93% \n \n*ACT only providers: FY16: 2; FY17: 2; FY18: 1 \n \nTable 127 provides the result by category of ACT reviews for FY 2016 through FY 2018 as compared to the overall BHQRs. Of note is that all of the category scores for ACT reviews, except for Service Guidelines, are higher than the corresponding BHQR scores for FY 2018. While these three areas \n \n7 ACT services are reviewed as a part of the BHQR process. For the purpose of this annual report, ACT scores have been extracted for evaluation, monitoring, and analysis. \n56 | P a g e \n \n Quality Management Annual Report FY 2018 \nincreased from the previous year, Service Guidelines declined in FY 2018 for ACT services by four points to 84 percent. \n \nTable 12. BHQR and ACT Results by Category by Year \n \nBilling \n \nValidation \n \nFOA \n \nBHQR FY 2016 \n \n81% \n \n85% \n \nBHQR FY 2017 \n \n84% \n \n89% \n \nBHQR FY 2018 \n \n85% \n \n92% \n \nAssessment \u0026 Planning 79% \n77% \n84% \n \nService Guidelines \n90% \n88% \n90% \n \nOverall 84% 84% 88% \n \nACT FY 2016 ACT FY 2017 ACT FY 2018 \n \n92% \n \n88% \n \n90% \n \n90% \n \n91% \n \n93% \n \n85% \n \n85% \n \n88% \n \n80% \n \n88% \n \n87% \n \n87% \n \n84% \n \n89% \n \nACT Billing Validation \nFigure 19 shows the total dollar amount reviewed through ACT claims analysis since inception, as well as the dollar amount found to be unjustified and justified across the years. While reviews contained a greater dollar amount in FY 2017, 90 percent were validated as justified which is less compared to the other years. The rate of justified amounts has since increased in FY 2018 by one percentage point ($296,038.97). \n \n57 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 19. ACT Billing Validation by Fiscal Year \nTotal Amount Reviewed in FY 2016: $306,628.32 Total Amount Reviewed in FY 2017: $340,428.04 Total Amount Reviewed in FY 2018: $325,845.60 \n \n$300,000.00 $250,000.00 $200,000.00 $150,000.00 $100,000.00 \n$50,000.00 $0.00 \n \n$23,422.57 8% \n$283,205.75 92% \n \n$32,707.800 10% \n$307,720.24 90% \n \n$29,806.63 9% \n$296,038.97 91% \n \nFY 2016 Total Amount Justified \n \nFY 2017 \n \nFY 2018 \n \nTotal Amount Unjustified \n \nInformation in Table 13 indicates the specific billing discrepancy reasons found during the ACT reviews with 3,334 claims reviewed for FY 2018, more than any other year. Assessors select all applicable discrepancy reasons for all reviewed claims; therefore, one claim may have multiple discrepancy reasons. While the prevalence of identified common discrepancies continued to decline from the baseline year to FY 2018, such as staff credential missing (2.3%, 1.2%, to 0.4%) and code missing/different than code billed (0.8%, 0.6%, to 0.4%) other reasons for discrepancies became more prevalent. These include intervention outside staff's scope of practice (42 instances in FY 2018 [1.2%]) and content not unique to the individual (56 instances in FY 2018, [1.6%]). \nWhile there was continued improvement in the number of instances from the past fiscal year's discrepancies, the following discrepancy reasons increased from the previous year: \n Content does not support code billed  Intervention outside staff's scope of practice  Multiple services billed at the same time  Content is not unique to the individual \n \n58 | P a g e \n \n Quality Management Annual Report FY 2018 \n Diversionary, non-billable and non-therapeutic in nature, activities billed (for example, watching a movie, playing bingo, etc.) \n \nThe following discrepancy reasons were not identified in FY 2018: \n \n Not meeting admission criteria for service  Missing/incomplete order  Time in/time out missing \n \nTable 13. ACT BHQR Billing Discrepancy Reasons by Year \n \nClaims Reviewed Not meeting admission criteria for service Missing/incomplete order Quantitative Staff credential missing Location missing (out-of-clinic) Code is missing / different than code billed Units billed exceed time / units documented \n \nFY 2016 Instances \n3029 3 (0.1%) 0 (0.0%) \n69 (2.3%) 39 (1.3%) 25 (0.8%) 17 0.6%) \n \nFY 2017 Instances \n3221 20 (0.6%) 12 (0.4%) Instances 40 (1.2%) 32 (1.0%) 19 (0.6%) 24 (0.7%) \n \nPercent \n \nFY 2018 Change Instances FY16- \n \nFY17 \n \n3427 \n \n192 \n \n0 (0.0%) \u003e100% \n \n0 (0.0%) N/A \n \nChange \n \n15 (0.4%) -45.5% \n \n32 (1.0%) -22.8% \n \n13 (0.4%) -28.5% \n \n10 (0.3%) 32.8% \n \nProgress note is missing \n \n16 (0.5%) \n \n17 (0.5%) 35 (1.0%) -0.1% \n \nDate of entry missing \n \n1 (0.03%) \n \n22 (0.7%) 4 (0.1%) \u003e100% \n \nSignature missing \n \n1 (0.03%) \n \n25 (0.8%) 4 (0.1%) \u003e100% \n \nTime in / time out missing Date of service incorrect/missing Performance Standards \n \n0 (0.0%) 0 (0.0%) \n \n10 (0.3%) 3 (0.1%) Instances \n \n0 (0.0%) N/A 1 (\u003c0.1%) N/A \nChange \n \nContent does not support units billed Content does not support code billed Intervention outside staff's scope of practice \n \n27 (0.9%) 17 (0.6%) 12 (0.4%) \n \n51 (1.6%) 24 (0.8%) 0 (0.0%) \n \n32 (0.9%) 34 (1.0%) 42 (1.2%) \n \n-77.6% -32.8% 100% \n \nNon-billable activity Multiple services billed at the same time \n \n9 (0.3%) 7 (0.2%) \n \n8 (0.2%) 2 (0.1%) \n \n8 (0.2%) 16.4% 6 (0.2%) 73.1% \n \nContent does not match service definition \n \n5 (0.2%) \n \n5 (0.2%) 3 (0.1%) \n \nContent is not unique to the individual \n \n4 (0.1%) \n \n5 (0.2%) 56 (1.6%) \n \n* Due to the low response volume, Quality did not conduct statistical difference in proportions testing on the above table. \n \n6.0% -17.5% \n \nPercent Change FY17 \nFY18 206 -100% -100% Change -66.7% 0.0% -33.3% -57.1% \n-80.0% \n-85.7% \n-87.5% \n-100% -100% Change -43.8% 25.0% 100% 0.0% 100% -50.0% \u003e 100% \n \n59 | P a g e \n \n Quality Management Annual Report FY 2018 \nThe Billing Validation score is the percent of justified billed dollars divided by the total paid/billed dollars for the reviewed claims. The statewide average ACT Billing score of 91 percent was higher than the BHQR statewide average of 85 percent in FY 2018. This remained consistent with the FY 2017 and baseline year result demonstrating ACT providers had fewer funds identified as unjustified compared to BHQR reviews overall. More than 75 percent of reviews conducted in FY 2018 resulted in a Billing score of 90 percent or greater. The remaining five reviews fell below 85 percent. Targeted technical assistance is conducted in cases where scores continually decline or fall below the threshold established. Alignment with the BHQR process, ongoing education and frequency of reviews for those providers who fall below the 90 percent threshold will continue at two intervals in the upcoming year. \nACT Assessment and Planning \nFigure 20 provides the ACT Assessment and Planning score results by specific indicator annually since FY 2016. Similar to BHQR results, indicators assessing whether co-occurring health conditions, whole health and wellness, assessed needs are addressed on the IRP, and discharge planning criteria remain the lower scoring indicators, however, with noted statistical improvement (p \u003c .01) from FY 2017. Whole health and wellness improved by ten points, co-occurring condition documentation increased by 21 points, while both assessed needs and discharge criteria improved by 17 and 20 points respectively. \n60 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 20. ACT Assessment and Treatment Planning Scores by Year \n \nCurrent medical screening is present Individual meets admission criteria \nInterventions/objectives are goal-linked \u0026 service-consistent Goals/objectives honor hopes, choice, preferences, outcomes \nIRP is individualized in personalized language N/A \nCurrent behavioral health assessment Whole health \u0026 wellness in IRP \nCo-occurring health conditions addressed in IRP All assessed needs are addressed Discharge plan defines criteria \n \n100% 99% 100% 100% 99% 100% 99% 97% 97% 94% 94% 97% 92% 97% 96% \n91% 89% 75% 76% 86% 47% 49% 70% 59% 51% 68% 90% 46% 66% \n \nFY 2016 (N = 19) FY 2017 (N = 20) FY 2018 (N = 21) \n \nACT Service Guidelines \nThe ACT service guidelines score contained 14 indicators. Table 14 shows the item-level detail to illustrate both the percentage point and percent change across indicators and comparing fiscal year results within the ACT Service Guidelines, ranked in descending order by FY 2018 results. The ACT Service Guidelines category score decreased significantly (p \u003c .01) from FY 2017 (88%) to FY 2018 (84%). Three indicators scored at or near 100 percent for all three fiscal years: \n \"Individual meets admission or continuing stay criteria\" \n \n61 | P a g e \n \n Quality Management Annual Report FY 2018 \n \"One of the contacts per month addresses the symptom assessment and management of medications (once a month)\" \n \"For discharged individuals, there are multiple documented attempts to locate and make contact with the individual prior to discharge (over a 45-day period)\" \n \nWhile success is evident with 11 of the 14 indicators exceeding 90 percent, the ACT team having all required staff substantially declined (p \u003c .01) in FY 2018 by 21 percent. Additionally, and consistent with previous years, continued improvement is needed specific to the reauthorization of ACT services. \"ACT team completes a treatment plan review with the staff, the individual, and his/her family/informal supports prior to the reauthorization of services\" significantly declined (p \u003c .01) by approximately 53 percent in FY 2018 to a result of 28 percent. Furthermore, evidence the \"ACT team is working with informal support/contacts at least two to four times per month prior to reauthorization\" decreased (p \u003c .01) by five points to a result of 34 percent. The indicator \"ACT team completed a treatment plan review with the staff, the individual, and his/her family/informal supports\" significantly declined in FY 2018 to 28 percent. \n \nTable 14. ACT Service Guidelines Scores \n \nFY 2016 FY 2017 \n \nDocumentation shows the individual meets admission or continuing stay criteria. \nFor discharged individuals, there are multiple documented attempts to locate and make contact with the individual prior to discharge (over a 45-day period). \nOne of the contacts per month addresses the symptom assessment and management of medications (once a month). \nThe progress notes document individual response to the staff intervention provided. \nThe staff interventions reflected in the progress notes are related to the staff interventions listed on the treatment plan. \n \n100% 100% 100% 99% 98% \n \n99% 100% 99.7% 99% 97% \n \nFY 2018 \n \nPercent Change \nFY16FY17 \n \nPercent Change \nFY17FY18 \n \n100% -1.0% 1.0% \n \n100% 0.0% 0.0% \n \n100% -0.3% 0.3% \n \n98% \n \n0.0% -1.0% \n \n96% -1.0% -1.0% \n \n62 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 14. ACT Service Guidelines Scores \n \nFY 2016 FY 2017 \n \nThere is documentation of individual's responses and a discussion to the agreement of services identified in the treatment planning/individual recovery planning. \nThere is documentation of individual's involvement in transition planning. \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan. \nThe ACT team is working with the individual toward educational or vocational needs, interests, per IRP (once per authorization). \nThere is documentation to support when substance use services are needed and are integrated into the treatment plan. \nFollowing admission to a psychiatric facility, the ACT team is involved in each individual's discharge planning. \nThe ACT team has all required staff. \nThere is evidence the ACT team is working with informal support systems/collateral contacts at least 2-4 times per month with or without the individual present (and it is documented) to provide support and skills training to assist the individual in his/her recovery. (Review specific to authorization period.) \nThe ACT team completes a treatment plan review with the staff, the individual, and his/her family/informal supports prior to the reauthorization of services. \nACT Service Guidelines Result \n \nN/A 82% 91% 95% 94% 87% 76% \n43% \n58% 85% (N = 19) \n \n92% 90% 99% 97% 93% 95% 91% \n39% \n59% 88% (N = 20) \n \nFY 2018 \n \nPercent Change \nFY16FY17 \n \nPercent Change \nFY17FY18 \n \n96% \n \nN/A 4.3% \n \n95% \n \n8.9% 5.6% \n \n94% \n \n8.1% -5.1% \n \n94% \n \n2.1% -3.1% \n \n92% -1.1% -1.1% \n \n91% \n \n8.4% -4.2% \n \n72% 16.5% -20.9% \n \n34% -10.3% -12.8% \n \n28% \n \n1.7% -52.5% \n \n84% (N = 21) \n \n3.4% \n \n-4.5% \n \nACT Focused Outcome Areas \nFocused Outcome Area (FOA) indicators are answered once per record reviewed. Each FOA has a different number of indicators for a combined total of 22 indicators. The Overall score for ACT FOA increased (p \u003c .01) from FY 2017 by three points (93%). Four of the six subcategories met or \n \n63 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nexceeded 95 percent and Choice remained consistent across all three years at 97 percent, see Figure 21. Identified declines for FY 2018 were specific to the Safety and Rights subcategory with only 70 percent of records having \"Annual updates to rights\" and \"Documentation of medication education and consent\". However, both individual indicators increased from 58 and 62 percent respectively. Figure 21 provides the ACT FOA results for each category by fiscal year. The greatest improvement in the FOA subcategory scores specific to ACT reviews, for the second consecutive year, was Whole Health, which increased from 76 percent to 90 percent in 2017 and significantly (p \u003c .01) to 95 percent in FY 2018. Furthermore, the Whole Health indicator \"Documented safeguards utilized for medication known to have substantial risk\" improved in FY 2018. \"Ongoing assessment to determine external referrals for health services, supports and treatment\" significantly increased (p \u003c .01) in FY 2018. \n \n95% 90% \n76% \n \nFigure 21. ACT FOA Scores by Category by Year \n \n82% 80% \n78% \n \n99% 96% \n91% \n \n97% 97% 94% \n \n97% 97% 97% 89% 87% 86% \n \n93% 90% 90% \n \nWhole Health \n \nSafety \n \nPerson Centered Practices \n \nFY 2016 (N = 19) \n \nCommunity Life \n \nChoice \n \nRights \n \nFY 2017 (N = 20) FY 2018 (N = 21) \n \nOverall \n \n64 | P a g e \n \n Crisis Stabilization Unit Quality Reviews \n \nQuality Management Annual Report FY 2018 \n \nAt DBHDD's request, implementation of the Crisis Stabilization Unit Quality Reviews (CSUQR) began in January of FY 2016. The purpose of the CSUQR is to assess the provider's overall practices, quality of service delivery, and to determine adherence to DBHDD standards through individual record reviews.8 When possible, Quality Management conducts the CSUQR and BHQR simultaneously; CSU providers received separate CSUQR scores and final assessment reports. \nThe CSUQR indicators are based on the DBHDD policies and Provider Manual for Community Behavioral Health Providers and are organized into three review categories: Individual Record Review (IRR), Service Guidelines, and FOAs. The score for each category represents the percent of applicable indicators met or present. The CSUQR Overall score is calculated by averaging the three categories, with each category accounting for 33.3 percent of the Overall score. \nDuring FY 2018, Quality Management conducted reviews at 19 eligible CSU providers. Like FY 2016, two were freestanding CSUs, and 17 occurred in conjunction with BHQRs. In FY 2017 four CSUs had a second review due to scores falling below the threshold (80 percent Overall score), this reduced to only two CSUs that required a reassessment in FY 2018. Results for the reassessment reviews were included within the overall results incorporating results of 21 reviews. Five of the 19 providers served child and adolescent (C\u0026A) as well as adults, the remaining CSU providers served adults only. \nCrisis Stabilization Unit Sample Method \nA random sample of 15 individuals who had received services within the three months preceding the review was selected for record review. When providers did not have an adequate number of individuals served in the three months (a minimum of three days/nights admission) preceding the review, samples were pulled from individuals served up to six months preceding the review. CSUs serving both populations of adults and C\u0026A have a sample of up to ten adults and five C\u0026A selected. \n \n8 Please refer to the following link to access a full description of the review process and review tools. http://georgiacollaborative.com/providers/prv-BH.html \n \n65 | P a g e \n \n Quality Management Annual Report FY 2018 \nFY 2018 CSUQRs consisted of 315 individual records, 290 adult records, and 25 C\u0026A records. All CSUQRs had the maximum 15 records reviewed. \nThe CSUQR mirrors the BHQR scoring and samples for the Individual Interviews and Staff Interviews (selected by the provider and quality assessors). Results from the interviews were not included in calculating the provider's overall CSUQR score. Quality assessors attempted to complete a minimum of five Individual and five Staff Interviews per CSUQR; however, the actual number fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of staff, and the number of individuals the provider served at the time of the review. Individuals selected for interviews were currently at the CSU, and the staff selected were providing services on the CSU. If an individual or staff declined an interview, assessors selected a different individual or staff. \nCrisis Stabilization Unit Quality Review \nFigure 22 shows the Overall and category scores for reviews completed for the past three years (FY 2016, FY 2017, and FY 2018). Areas to highlight based on FY 2018 findings specific to CSUQRs include: \n The Overall average score for CSUs has steadily trended upward to a FY 2018 result of 88 percent. This is a two-point increase from the previous fiscal year. Nineteen reviews (90%) scored within the 81 to 100 percent range compared to the FY 2016 total of thirteen (68%) and FY 2017 total of eighteen (78%). See distribution graphs in Appendix B. \n Both the Individual Record Review (IRR) and Service Guideline category increased for FY 2018, with the IRR demonstrating significant improvement (p \u003c .01). The category of Focused Outcome Areas (FOA) remained at 91 percent in FY 2018. \n Individual Record Review scores of CSUs serving both adults and C\u0026A scored two points higher (85%) than CSUs serving an adult only population; while Service Guideline scores for these CSU providers were three points lower (88%). No difference in FOA scores occured \n66 | P a g e \n \n Quality Management Annual Report FY 2018 \nbetween the populations (91%). Category sections below contain further details pertaining to the C\u0026A CSU record reviews. \nFigure 22. CSU Overall Scores by Fiscal Year \n \nService Guidelines \nFY16: 82% FY17: 87% FY18: 91% \n \nIndividual Record Review \nFY16: 79% FY17: 80% FY18: 83% \nOverall Score FY16: 83% (N = 19) FY17: 86% (N = 23) FY18: 88% (N = 21) \n \nFocused Outcome \nAreas \nFY16: 88% FY17: 91% FY18: 91% \n \nTwo CSU providers had a second review during FY 2018 due to Overall scores that were below the threshold. The results of these two specific CSUQR reassessment reviews are contained within the reassessment review section of this report in Figures 33 and 34. \nCSUQR Individual Record Review (IRR) \nIndividual Record Review (IRR) indicators were answered once per record reviewed. The IRR is comprised of six subcategories: Assessment and Planning, Admission, Initial Evaluation, CSU Course of Stay, Documentation, Transition/Discharge Summary, and CSU Planning. Each of the six subcategories had a unique number of indicators, with 30 total scored within the IRR category. Significant improvement (p \u003c .01) or change was identified for all subcategories except Course of Stay and Treatment Planning. Figure 23 identifies the subcategories and results for each by fiscal year. \n \n67 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 23. Individual Record Review Category Scores \n \nAssessment \u0026 Planning \nAdmission/ Initial Evaluation/ Screening for Risk Crisis Stabilization Specific Planning \nCrisis Stabilization Course of Stay \nDocumentation \nTransition/ Discharge Planning \nFY 2016 (N = 19) \n \n86% 85% \n88% \n \n69% 69% \n78% \n \n70% 72% 74% \n \n83% 86% 85% \n \n87% 89% \n85% \n \n63% 62% \n92% \n \nFY 2017 (N = 23) \n \nFY 2018 (N = 21) \n \n* The IRR subcategories were individually scored and are not averaged for the final overall IRR score. The final overall IRR score is calculated by adding all \"yes\" or \"present\" responses of all IRR indicators and dividing by the total \"yes\" or \"no\" responses combined for each record review. \n \nThe IRR score in FY 2016 was 79 percent and has since increased over the past two years to 83 percent in FY 2018. There is room for improvement when reviewing specific indicators or indicators of the CSUQR IRR. The following indicators received the lowest score for FY 2018 and were also low scoring indicators for FY 2017. These indicators are also evaluated by population, adult or C\u0026A. \n Documentation: Community transition plan present if individual is readmitted 19 percent o C\u0026A N/A due to no readmissions, Adult 19 percent \n Assessment \u0026 Planning: Co-occurring health conditions included in IRP/NCP 54 percent o C\u0026A 25 percent, Adult 55 percent \n \n68 | P a g e \n \n Quality Management Annual Report FY 2018 \n CSU Planning: IRP/NCP addresses safety issues 55 percent o C\u0026A 88 percent, Adult 51 percent \n Non-Scored: Vital signs every eight hours 62 percent o C\u0026A 18 percent, Adult 66 percent \n Non-Scored: Evidence of follow-up and continuing care connection 27 percent o C\u0026A 46 percent, Adult 25 percent \nAssessment and Planning section had a three-point increase from the previous year with noted improvement as 100 percent of records contained a current medical screening and documentation supporting the individual met admission criteria. \"Orders for admission to CSU\" were present (97%), as were \"Orders for withdrawal management regimens\" (94%). Moreover, despite significant increases (p \u003c .01) for both \"Discharge plan(s) defining criteria\" (69%) and \"Co-occurring conditions included in the IRP/NCP\" (54%), further growth remains obtainable. \nSpecific to CSU Course of Stay, indicators \"Individual is offered groups as needed\" and \"Individual participated in training/therapy\" showed a decline overall by four or five points; however, 100 percent of C\u0026A records documented evidence that the \"Individual is offered groups as needed\" and that the \"Individual participated in training/therapy\". Adult record reviews resulted in only 87 percent documenting the offering of group services and 82 percent participation in training/therapy. The third question in this IRR subcategory, \"Co-occurring conditions assessed and addressed simultaneously\", demonstrated an increase (p \u003c .01) from 69 percent in FY 2017 to 79 percent in FY 2018. When comparing the adult and C\u0026A population, a difference was identified specific to cooccurring disorders; 40 percent of C\u0026A records and 80 percent of adult records \"Co-occurring conditions assessed and addressed simultaneously\"\". \nOnly 55 percent of IRP/NCPs addressed all assessed safety issues, which is an increase from the previous fiscal year's results of 36 percent. A difference between C\u0026A and adult was evident in that 88 percent of C\u0026A records addressed safety issues compared to only 51 percent of adult records. Quality Management will conduct a safety planning training specific to CSUs in FY 2019. \n69 | P a g e \n \n Quality Management Annual Report FY 2018 \nDocumentation, previously the highest scoring subcategory of the IRR, declined from 89 percent in FY 2017 to 85 percent in FY 2018. The indicator assessing \"Community transition plan when the individual was readmitted within 30 days\" decreased to 19 percent, and \"Medication Administration Record (MAR) having a documented legend\" significantly decreased (p \u003c .01) to 65 percent. Medical progress notes (90%) and non-medical progress notes (83%) supplying evidence of progress toward goals and/or objectives remained consistent from FY 2017 to FY 2018 regardless of adult or C\u0026A population. One hundred percent of all C\u0026A records provided documentation specific to both medical and non-medical progress notes displaying evidence of progress compared to 89 percent (medical progress notes) and 82 percent (non-medical progress notes) of adult records reviewed. Although transition/discharge planning historically resulted in a relatively low score for FY 2017 and FY 2016, the indicator of transition/discharge plans containing the needed documentation has significantly increased (p \u003c .01) from the previous year's result from 62 percent to 92 percent. \n70 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nCSUQR Service Guidelines \nThe 16 CSU Service Guidelines indicators (14 scored and 2 non-scored) were answered once per \nreview to assess the CSU program. See Figure 24 and Figure 25. \nFigure 24. CSUQR Service Guidelines Staffing and Curriculum Results by Indicator by Year \n \nCSU Staffing Requirements Met (FY16 n = 19, FY17 n = 23, FY18 = 21) \nC\u0026A Minimum Staff Present (FY16 n = 3, FY17 n = 5, FY18 = 5) \nC\u0026A Staff Ratio Met (FY16 n = 3, FY17 n = 5, FY18 = 5) \nC\u0026A Nursing Staff Ratio (FY16 n = 3, FY17 n = 5, FY18 = 6) \nAccess to Addictionologist (FY16 n = 18, FY17 n = 23, FY18 = 21) \n \n100% 96% 95% \n100% 100% 100% \n100% 100% 100% \n100% 100% 100% \n67% 96% \n86% \n \nPsychiatrist Avilable for Consultation (FY16 n = 0, FY17 n = 18, FY18 = 21) \n \nPhysician Availability (FY16 n = 19, FY17 n = 22, FY18 = 21) \n \nC\u0026A Psychiatrist (Non-scored) (FY16 n = 3, FY17 n = 6, FY18 = 5) \n \nModel/Curriculum for SU treatment (Non-scored) (FY16 n = 16, FY17 n = 22, FY18 = 21) \n \nFY 2016 (N = 19) \n \nFY 2017 (N = 23) \n \n100% 100% \n100% 100% 100% \n67% 83% 100% \n69% 91% 90% \nFY 2018 (N = 21) \n \n71 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nCSU staffing requirements were met in 95 percent of the reviews conducted. Of the five CSUs serving children and adolescents, all met the staffing requirements and ratios for FY 2017 and FY 2018. An additional indicator was added to the FY 2017 review process specific to the \"Availability of a psychiatrist for consultation in cases in which the CSU physician(s) is/are not specialized in psychiatry\", which scored 100 percent both years since indicator inception. \n \nFigure 25. CSUQR Service Guidelines Policy Adherence Indicators by Fiscal Year \n \nDeaf, Deaf-Blind, Hard of Hearing Policies (FY16 n = 19, FY17 n = 23, FY18 = 17) \nInfection Control Plan Adherence (FY16 n = 19, FY17 n = 23, FY18 = 21) \nTheraputic Blood Level Monitoring (FY16 n = 19, FY17 n = 23, FY18 = 21) \nSeclusion \u0026 Restraint Policy Adherence (FY16 n = 19, FY17 n = 23, FY18 = 20) \nMedication Storage Policy Adherence (FY16 n = 19, FY17 n = 23, FY18 = 21) \nAdherence to Medication Notification Policy (FY16 n = 19, FY17 n = 23, FY18 = 21) \nProtocols for Handling Drugs (FY16 n = 19, FY17 n = 23, FY18 = 21) \n \n53% 65% \n \n100% \n \n84% 87% 86% \n \n89% 83% \n90% \n \n95% 91% 85% \n \n53% 57% 67% \n \n79% 83% 86% \n \n95% 87% \n95% \n \nFY 2016 (N = 19) \n \nFY 2017 (N = 23) \n \nFY 2018 (N = 21) \n \n72 | P a g e \n \n Quality Management Annual Report FY 2018 \nWhile the majority of policies reviewed improved from the previous year, see Figure 25, \"Provider's adherence to their seclusion and restraint policy\" declined in FY 2018 to a rate of 85 percent. However, improvement was evident in both \"Medication notification policy adherence\" and \"Medication storage policy adherence\". See Figure 26 for fiscal year results of three indicators related to medication policy adherence reviewed by assessors. \n \nFigure 26. CSUQR Service Guidelines Medication Policy Adherence Indicators by Fiscal Year \n \nMedication Notification Policy Adherence \nFY 2016 79% \nFY 2017 83% \nFY 2018 86% \n \nMedication Storage Policy Adherence \nFY 2016 53% \nFY 2017 57% \nFY 2018 67% \n \nProtocols for Handling Drugs \nFY 2016 95% \nFY 2017 87% \nFY 2018 95% \n \nFrom FY 2016 to FY 2017, scores increased across the CSU providers specific to Service Guidelines, with more than half of reviews scoring above 90 percent. Now with the Collaborative in its third year of the contract, two-thirds of providers are scoring above 90 percent for this CSUQR category. The median score in FY 2017 was 91 percent compared to the median score in FY 2016 at 80 percent. FY 2018 median provider score is 100 percent. Overall, the statewide average for Service Guidelines increased by four points (91%) in FY 2018. \nCSU Focused Outcome Area \nFocused Outcome Area (FOA) indicators were answered once per record reviewed. Each FOA had a unique number of indicators for a total of 23 indicators assessed overall. Please refer to the BHQR FOA section for a definition of the six FOAs. \n \n73 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nThe Overall FOA score for CSU providers increased from 88 percent to 91 percent in FY 2017 and remained at 91 percent for FY 2018. Consistent with both previous years' results for CSUQR FOAs, the highest subcategories were Choice (97%), Rights (97%), and Community (92%). Although both Choice and Community decreased somewhat from the previous year's review by one to two percentage points, respectively, Rights significantly increased (p \u003c .01) from 92 percent to 97 percent. Whole Health (88%) and Safety (82%) decreased from FY 2017, while Person Centered Practices (88%) remained unchanged. See Figure 27 for FOA results by year. \n \nFigure 27. CSUQR FOA Results by Year \n \nWhole Health \n \nRights FY16: 97% FY17: 92% FY18: 97% \nChoice FY16: 99% FY17: 98% FY18: 97% \n \nFY16: 83% FY17: 89% FY18: 88% \nFY16: 88% FY17: 91% FY18: 91% \nCommunity \nFY16: 92% FY17: 94% FY18: 92% \n \nSafety \nFY16: 76% FY17: 85% FY18: 82% \nPerson Centered Practices FY16: 77% FY17: 88% FY18: 88% \n \n* The FOAs were individually scored and are not averaged for the final overall FOA result at the review level. The final overall FOA result is calculated by adding all \"yes\" or \"present\" responses of all FOAs and dividing by the total \"yes\" or \"no\" responses combined for each review. \n \nThe following were findings based on each FOA for FY 2018: \n Whole Health demonstrated consistent findings in line with what is noted in the CSUQR IRR category. Documentation has improved compared to baseline results regarding evidence of whole health and wellness incorporated into the individual's length of stay. \"Documentation of medication safeguards\" declined from 85 percent in FY 2017 to 83 percent in FY 2018 as \n \n74 | P a g e \n \n Quality Management Annual Report FY 2018 \ndid \"Documentation regarding ongoing assessment for determination of need for external referrals\" (95 percent in FY 2017 to 91 percent in FY 2018). \no One hundred percent of all C\u0026A records reviewed met the criteria of \"Demonstrating ongoing assessment to determine external referrals\" compared to 91 percent of adult records reviewed. \n Safety documentation demonstrated significant declines (p \u003c .01) in both \"Services offered in a safe environment\" (96 to 87%) and \"Documentation of providers' assistance with development of safety/crisis planning when needed\" (95 to 86%). Improvement was noted in the \"Documentation of medication consent and education\" compared to FY 2017, with a nearly 10-point increase to 72 percent. o Important to note is the difference in the C\u0026A population versus the adult population specific to \"Services being offered in an environment that ensures the individual's safety\", as this indicator resulted in a score of only 44 percent for C\u0026A records reviewed compared to 91 percent of adult records documenting such information. \no Person Centered Practices documentation in FY 2016 did not support \"Individual was an active participant in CSU treatment planning\"; however, the result for FY 2017 (90%) and FY 2018 (91%) depicts higher scores. o Specific to the C\u0026A population, 88 percent of records documented evidence of participation in planning of services, which may include guardian participation as well. Additionally, findings remained consistent from the previous year with 83 percent of records documenting \"Individuals actively participated in the modification of treatment plan\". For FY 2018, 100 percent of C\u0026A records indicate inclusion in the modification of plans. o However, \"Treatment plan is reassessed based on changing needs or circumstances\" remained a prevalent area for improvement with only 76 percent of total records providing such (91% of C\u0026A records compared to 74% adult records). \n Community documentation supported a two-point improvement in \"Individuals assisted in identifying aftercare placement\" (97%). There was a decline in \"Transition planning \n75 | P a g e \n \n Quality Management Annual Report FY 2018 \nthroughout service delivery that involved the individual and/or their natural supports\" (89%). No change was found in individual's informed choice driving selection of housing options (95%). \no No significant differences were found between the C\u0026A population when compared to the indicators of the adult population on this FOA. \no One hundred percent of C\u0026A records received a score of \"yes\" on all Community FOA indicators with the exception of \"Individual and provider have discussed aftercare placement\" which was 95 percent. \n Choice reflected consistent findings with FY 2017 with the \"Individual's preferences for treatment were followed\" while at the CSU (99%) and when \"Barriers to treatment were identified\" documentation demonstrated that alternatives were explored to address the barriers (96%). o No significant difference was found between the indicator scores from FY 2017 to FY 2018 within the Choice FOA nor was a signficant difference found between the C\u0026A and adult record scores. \no However, it is important to note that 100 percent of C\u0026A records reviewed demonstrated following the individual's preferences and providing the individual with options of supports and services compared to 99 percent records documented following individual preferences and 97 percent documenting providing options within the adult records. \n Rights supported \"HIPAA Privacy and Security Rules were reviewed with the individual\" (98%) and \"Individuals were informed of rights at onset of service delivery\" (92%) both increased in FY 2018 with the latter indicator demonstrating significant improvement in total sample of records reviewed (p \u003c .01). o The C\u0026A records reviewed (n = 25) demonstrated 100 percent compliance in all areas of Rights, demonstrating higher results than the adult record reviews, especially regarding being informed of rights at onset of services (92%, n = 265). \n76 | P a g e \n \n CSUQR Staff Interview and Individual Interview \n \nQuality Management Annual Report FY 2018 \n \nThe CSUQR interview indicators were similar to the BHQR interview indicators and were divided into the six FOAs. Individual Interviews were used to assess the individual's perception of care with the provider, services rendered, and support in working toward personal goals. \nStaff Interviews helped determine if a person centered approach was used in providing services and empowering individuals. The data and anecdotal information gathered from the Individual and Staff Interviews supplemented the record review and enhanced the review process by providing a tailored perspective to illustrate the quality of care the agency provided using both a quantitative and qualitative approach. \nData in Figure 28 illustrates the results of Staff and Individual Interviews by fiscal year. Individual Interview scores declined slightly from the baseline year to FY 2017 yet increased to 95 percent for FY 2018. Staff Interview scores increased to 99.2 percent for FY 2018. \nFigure 28. CSUQR Individual and Staff Interview Results by Fiscal Year \n \nINDIVIDUAL INTERVIEW \n \nFY 2016 94.9% (N = 94) \nFY 2017 93.0% (N = 114) \nFY 2018 95.1% (N = 102) \n \nSTAFF INTERVIEW \n \nFY 2016 98.6% (N = 96) \nFY 2017 98.7% (N = 115) \nFY 2018 99.2% (N = 105) \n \nFigure 29 represents the average FOA scores for Individual Interviews while Figure 30 provides results of Staff Interviews for FY 2016, FY 2017, and FY 2018. Individual Interview results increased in FY 2018 to 95 percent. Again, all but one FOA scored in the 90th percentile. The lowest-scoring FOA \n \n77 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nfor Individual Interviews based on indicator level data remained Whole Health (88%), which may indicate an increased need for CSUs to attend to and plan for individuals' co-occurring health conditions or health needs such as preventive services, dental services, or primary care. This issue was also highlighted in the IRR and FOA categories both in FY 2018 and FY 2017. \n \nFigure 29. CSUQR Individual Interview Subcategory Data by Year \n \nWhole Health \n \n85% 83% \n88% \n \nSafety \n \n95% 96% 97% \n \nPerson-Centered Planning \n \n96% 94% 95% \n \nCommunity Life \n \n99% 96% 96% \n \nChoice \n \n97% 98% 98% \n \nRights \n \n100% 99% 98% \n \nOverall \n \n95% 93% \n95% \n \nFY 2016 (N = 94) \n \nFY 2017 (N = 114) \n \nFY 2018 (N = 102) \n \nAll Staff Interview indicators answered for FY 2018 exceeded 90 percent. However, the indicator specific to staff being aware of special diets for individuals with access to a nutritionist scored lower than all the others at 92 percent. Additionally, CSUQR and BHQR Staff Interview scores indicated \n \n78 | P a g e \n \n Quality Management Annual Report FY 2018 \nknowledge regarding providing services aligns with standards assessed by the FOA indicators, as well as knowledge of the individuals they serve. \nFigure 30. CSU Staff Interview Subcategory Data by Year \n \nWhole Health Safety \nPerson-Centered Planning Community Life Choice Rights Overall \n \n97% 98% 99% \n99% 98% 98% \n98% 100% 99% \n99% 99% 100% \n100% 100% 100% \n97% 100% 100% \n99% 99% 99% \n \nFY 2016 (N = 96) \n \nFY 2017 (N = 115) \n \nFY 2018 (N = 105) \n \n79 | P a g e \n \n Quality Management Annual Report FY 2018 \nReassessment Frequency Reviews \nBHQR Reassessment Review Findings \nBeginning in FY 2017, the frequency of a BHQR and CSUQR was based on minimum scoring thresholds: less than 80 percent Overall score or less than 70 percent Billing Validation. Providers scoring above the minimum threshold received one BHQR/CSUQR per fiscal year; providers falling below the threshold received two reviews. Providers remain on a frequency schedule of approximately every six months until two subsequent review scores meet threshold requirements. The threshold process continued throughout FY 2018. However, based on FY 2017 data, the threshold was increased for FY 2018 to 80 percent for both Overall score and Billing score. Twentyseven percent of providers required a reassessment in FY 2017 (N = 35), 15 percent (N = 20) required a reassessment in FY 2018, a 12 point decrease. Two of these providers had not been reviewed in FY 2017 due to either being a new provider for FY 2018 or had insufficient claims. Providers whose scores fell below the threshold of 80 percent for either Overall or Billing were scheduled for a repeat review approximately six months following their initial FY 2017 review, allowing ample time for claims submission and documentation to reflect any changes made by the provider based on previous review findings. There was an average of 184.6 days between reviews. Most reassessed providers in FY 2018 were Tier 2 (N = 14). Figure 31 displays the distribution of provider scores for the reassessed providers for scores obtained during the first and second review in FY 2018. Improvement in scores is illustrated when comparing providers' first and second reviews. For the first review of FY 2018, 55 percent of providers scored in the 81 to 90 percent range. Compared to previous years, this same percentage of providers scored in the 71 to 80 percent range for their first review. Additionally, some providers scored between 51 and 79 percent at the first review, and later progressed to the 71 to 80 percent category. \n80 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 31. BHQR FY 2018 Overall Score Distribution of Reassessed Providers \n \n70% \n \n60% \n \n60% \n \n55% \n \n50% \n \n40% \n \n35% \n \n30% \n \n20% \n \n20% \n \n15% \n \n10% \n \n5% 5% \n \n5% \n \n0% \n \nFirst Review FY 2018 (N = 20; Mean = 83%; Median = 84%) Second Review FY 2018 (N = 20; Mean = 91%; Median = 91%) \nOverall Score: The average result for the reassessed providers at first review of FY 2018 was 83 percent compared to 91 percent later in the year. Although 83 percent exceeds the threshold requirement for Overall score, 15 providers were reassessed due to the Billing score falling below the threshold. See section related to Billing score in the reassessment section on the next page. Further results show the following: \n Nine providers exceeded 85 percent at initial review for Overall score compared to 18 at time of second review. \n Eighty-five percent (N = 17) of providers who had a second review increased an Overall score by end of the fiscal year. \n The greatest increase in score for a provider was a 32-point change from initial to subsequent review. \n Two providers had a decline in Overall score from time of initial review to subsequent FY 2018 review, with an average decrease of two percent in their Overall score. \nDistributions of reassessed providers based on each scored category can be found in Appendix B. Those providers who fell below the established threshold at either of the two reviews during FY 2018 \n81 | P a g e \n \n Quality Management Annual Report FY 2018 \nwill be required to have an initial and subsequent review scheduled for the 2019 fiscal year regardless of improvement in one or both categories (Overall and Billing score). Additionally, due to the increased levels of scores across the network as well as the increased statewide average, the threshold for FY 2019 is anticipated to increase to 90 percent for both Overall and Billing. \nAssessment and Planning was the lowest category for the reassessed providers in FY 2017 and continued as such in FY 2018 with a result at initial review of 79 percent and an increase of ten points (89%) at subsequent review when averaged across providers. Similar to providers reviewed only once annually, the lowest scored indicators included \"Co-occurring condition documentation and assessment, whole health and wellness within the IRP\", \"Discharge plan defines criteria\", as well as \"All assessed needs addressed on the IRP\". Of the 85 percent of providers who demonstrated improvement in Assessment and Planning, half increased by 10 points or more. One provider had an increase of 47 points increasing from 37 percent at initial review to 84 percent by year-end. Improvement was also evident against the statewide average of 88 percent as 10 (50%) providers scored at or above this result at the time of their second FY 2018 review, compared to only three (15%) providers at the time of their first FY 2018 review. \nBilling Score: More than two-thirds (80%, N = 16) of providers reviewed for a second time in FY 2018 had an increase in the Billing score (average increase of 14 points). One provider had a substantial increase of 61 points, going from 28 percent to 89 percent. The average across reassessed providers at the time of their initial review was 75 percent compared to 89 percent following all providers' second reviews. Only, two of the twenty reassessed providers scored 100 percent for this category. See Figure 32 for the distribution scores of reassessed providers. Nearly 24 percent of claims were found unjustified at initial review and decreased to only 10 percent unjustified claims at the time of the providers' second review. The most prevalent instances of billing discrepancies identified for reassessed providers remained consistent with those found in FY 2017: meeting admission criteria, missing and/or incomplete service orders, as well as missing progress notes. \n82 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 32. BHQR FY 2018 Billing Score Distribution of Reassessed Providers \n \n60% \n \n55% \n \n50% \n \n40% \n \n40% \n \n35% \n \n30% \n \n25% \n \n20% \n \n10% \n \n10% \n \n10% \n \n10% \n \n5% \n \n5% \n \n5% \n \n0% \n \nFirst Review FY 2018 (N = 20; Mean = 75%; Median = 78%) Second Review FY 2018 (N = 20; Mean = 89%; Median = 92%) \nService Guidelines: The BHQR Service Guidelines score increased (88 percent in FY 2017 to 90 percent in FY 2018) as well as the reassessed providers and scores. The average score for the reassessed providers at the time of initial review was 87 percent, just slightly below the statewide average, yet improved to 92 percent at second review thereby exceeding the statewide average of 90 percent. Seventeen services were reviewed throughout the reassessments. Reassessments did not include review of Opioid Maintenance, AD Peer Support-Individual, or Community Transition Planning as providers requiring reassessments either did not provide these services or claims were not available in the time period sampled. \nMost services improved by an average of two to four points with substantial improvement noted in the area of Intensive Family Intervention, which showed a 14-point increase from initial review (74%) to subsequent review (88%). Improvement was noted in progress note documentation. This included \"Staff interventions related to the IRP\", \"Progress towards goals and objectives documented\", as well as the \"Individual's response to interventions contained in the progress notes\". The two exceptions of the above-mentioned improvement were found in Nursing Assessment and Health Services and Psychiatric Treatment; both had a decline from first to second review. \n \n83 | P a g e \n \n Quality Management Annual Report FY 2018 \nFocused Outcome Areas: Providers who were reviewed for a second time in FY 2018 demonstrated improvement in FOA scores, from 88 percent to 93 percent from first to second FY 2018 review. Fifteen providers, or 75 percent, demonstrated improvement in this area, while four had a decline in score, at an average of five points. Of all FOAs reviewed during a reassessment in FY 2018, Safety was the lowest scoring category both at first review (69%) and at second review (78%). Two providers scored a zero in Safety at onset or initial FY 2018 review. One of these providers remained at zero percent for the subsequent review while the other provider increased to a perfect 100 percent score. The Safety FOA remained an area for growth and improvement among not only reassessed providers but also the entire network. Furthermore, Whole Health remained one of the lower scoring FOAs for reassessed providers, both at initial as well as subsequent reviews, 76 percent compared to 83 percent, respectively. Documentation consistently lacked evidence of addressing medical conditions and safeguards of medications. \nCSU Reassessment Review Findings \nTwo of the 19 CSU providers had a reassessment during FY 2018 compared to four providers in FY 2017. Figure 33 and Figure 34 provides the Overall score result and category scores by each of the two CSU providers at the time of initial FY 2017 review and subsequent FY 2018 reviews. Both CSUs were slightly below the statewide Overall score average by one and four points in FY 2017, respectively. CSU #1 declined by 15 points in their first FY 2018 review with a decrease in all individual categories. This CSU did demonstrate progress from the first FY 2018 review to the second FY 2018 review yet will remain on the frequency schedule until two consecutive overall review scores meet the threshold. \n84 | P a g e \n \n 100% 80% 60% 40% 20% 0% \n \nQuality Management Annual Report FY 2018 \n \nFigure 33. CSU #1 Category Score by Reassessment \n \n79% \n72% 64% \n \n72% 57% 62% \n \n70% \n64% 55% \n \n95% \n91% 81% \n \nOVERALL \n \nIndividual Record Service Guidelines Review \n \nCSU 1 First FY 2018 Score \n \nCSU 1 Second FY 2018 Score \n \nFocused Outcome Areas \nCSU 1 FY 2017 \n \nFigure 34 outlines CSU #2 progress and improvement in each of the three categories as well as Overall score results. An initial result of 76 percent Overall is evident with an extremely low FY 2017 score specific to Service Guidelines (57%). While FOAs had a minor decline from FY 2017 to FY 2018. CSU #2 has been removed from the reassessment schedule due to completing two consecutive Overall score goals of 80 percent. \nFigure 34. CSU #2 Category Score by Reassessment \n \n100% 80% 60% 40% \n \n76% 83% 85% \n \n78% 88% 86% \n \n79% 57% 71% \n \n93% 89% 90% \n \n20% \n \n0% \n \nOVERALL Individual Record Service Guidelines Focused Outcome \n \nReview \n \nAreas \n \nCSU 2 First FY 2018 Score CSU 2 Second FY 2018 Score CSU 2 FY 2017 \n \n85 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTechnical Assistance/Exit Conference \nUpon completion of all BHQRs and CSUQRs, the lead assessor completed a formal exit conference. The exit conference supplied providers with tentative scores, provider strengths, and opportunities for growth. Providers received immediate, preliminary feedback of the BHQR and CSUQR findings at the time of the exit conference. Technical assistance was also provided during the exit conference and throughout the review process pertaining to opportunities for growth and areas of risk. The table below (Table 15) provides details on the technical assistance/exit conferences completed during FY 2018. \n \nTable 15. Technical Assistance/Exit Conference Details for FY 2018 \n \nFiscal Year 2018 \n \nQuarter 1 Quarter 2 Quarter 3 Quarter 4 \n \nTotal Year End \n \nTotal BHQR Exits \n \n41 \n \n40 \n \n44 \n \n31 \n \n156 \n \nTotal Minutes \n \n2362 \n \n2360 \n \n2660 \n \n1922 \n \n9304 \n \nTotal Attendees \n \n397 \n \n302 \n \n357 \n \n326 \n \n1382 \n \nTotal CSUQR Exit Total Minutes Total Attendees \n \n2 \n \n6 \n \n3 \n \n10 \n \n21 \n \n140 \n \n370 \n \n210 \n \n700 \n \n1420 \n \n37 \n \n32 \n \n59 \n \n183 \n \n311 \n \nTotal Exits \n \n43 \n \n46 \n \n47 \n \n41 \n \n177 \n \nIn FY 2018, 177 exit conferences were completed. The total number of attendees was 200 more than previous years at over 1,600 (an average of ten individuals per exit conference). The CSUQR average attendance was 15 and BHQR average attendance was nine. Total time of exit conferences was over 10,700 minutes for the year and the average exit length was approximately 60 minutes. The length of exit conferences varies by type of exit conference and may include ACT reviews, which included an additional 15 records per review and details specific to ACT services discussed and reported. \nExit conferences and technical assistance will continue throughout FY 2019 as through the provider feedback survey, providers identified them as helpful assistance from the Collaborative. \n \n86 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nSummary of Findings and Recommendations for Behavioral Health Providers \nQuality Management continually reassesses processes to ensure we are capturing results and \nanalyzing outcomes that give the best information and truest picture of service quality in Georgia. \nThis includes reviewing feedback received from both providers and individuals, as well as revision of \nreview tools as needed to ensure accurate and measurable reporting of results. \n \nJune 2018 marked the completion of the third year of the Quality Management contract. Quality assessors completed 156 BHQRs and 21 CSUQRs in FY 2018 with 177 exit conferences conducted across all review types. Exit conferences may have occurred jointly across the BHQR, CSUQR, and ACT reviews depending on whether the provider rendered more than one of those services. \n \nProvider Performance \nTable 16 summarizes the Overall Averages by fiscal year:  The Overall score for all three review types met or exceeded 88 percent for the third year: BHQR  88 percent, ACT  89 percent, and CSU  88 percent.  Twenty providers were reviewed for a second time in FY 2018 due to initial FY 2017 low scores in Overall, Billing, or both with nearly two-thirds demonstrating improvement across all categories.  Although improvements were evident across all review types and categories, Service Guidelines within ACT reviews significantly declined (p\u003c.01) from 88 percent to 84 percent in FY 2018. \n \nTable 16. BHQR, ACT, CSUQR Overall Averages by Year \n \nBilling \n \nValidation \n \nFOA \n \nBHQR FY 2016 \n \n81% \n \n85% \n \nAssessment/ Planning \n79% \n \nService Guidelines \n90% \n \nBHQR FY 2017 \n \n84% \n \n89% \n \n77% \n \n88% \n \nBHQR FY 2018 \n \n85% \n \n92% \n \n84% \n \n90% \n \nOverall 84% 84% 88% \n \nACT FY 2016 \n \n92% \n \n88% \n \n85% \n \n85% \n \n88% \n \n87 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 16. BHQR, ACT, CSUQR Overall Averages by Year \n \nBilling \n \nValidation \n \nFOA \n \nACT FY 2017 \n \n90% \n \n90% \n \nAssessment/ Planning \n80% \n \nService Guidelines \n88% \n \nACT FY 2018 \n \n91% \n \n93% \n \n87% \n \n84% \n \nOverall 87% 89% \n \nCSUQR FY 2016 \n \nNA \n \n88% \n \nIRR  79% \n \n82% \n \n83% \n \nCSUQR FY 2017 \n \nNA \n \n91% \n \nIRR  80% \n \n87% \n \n86% \n \nCSUQR FY 2018 \n \nNA \n \n91% \n \nIRR  83% \n \n91% \n \n88% \n \nTable 17 summarizes the FOAs by review type and year. The Overall score for the FOAs met or exceeded 91 percent in FY 2018 for both BHQR and CSUQRs. Whole Health, continually the lowest score for BHQRs, has continued to improve from FY 2016. However, it remains an area for improvement across all providers. Safety increased for both BHQRs and CSUQRs from FY 2016 to FY 2017, declined in FY 2018 for both review types. In summary, the BHQR and CSUQR Overall FOA scores increased or remained static since inception; however, the subcategory of Safety suffered a decrease from FY 2017 to FY 2018 for both BHQRs (83 to 78%) and CSUQRs (85 to 82%). \n \nTable 17. BHQR and CSUQR FOA Scores \n \nFocused Outcome BHQR BHQR BHQR CSUQR \n \nAreas \n \nFY 2016 FY 2017 FY 2018 FY 2016 \n \nWhole Health \n \n63% \n \n74% \n \n84% \n \n83% \n \nSafety \n \n81% \n \n83% \n \n78% \n \n76% \n \nPerson Centered Practices 92% \n \n91% \n \n95% \n \n76% \n \nCommunity \n \n87% \n \n93% \n \n96% \n \n92% \n \nChoice \n \n92% \n \n96% \n \n97% \n \n98% \n \nRights \n \n90% \n \n93% \n \n93% \n \n97% \n \nOverall Score \n \n85% \n \n89% \n \n92% \n \n88% \n \nCSUQR FY 2017 \n89% 85% 88% 94% 98% 92% \n91% \n \nCSUQR FY 2018 \n88% 82% 88% 92% 97% 97% \n91% \n \nSpecific to reassessed providers who fell below the minimum threshold for Billing and Overall scores, Table 18 reflects the number of providers who improved from the first FY 2018 review to the second \n \n88 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nreview within the year. While all categories for the statewide BHQR average increased from FY 2017 to FY 2018, reassessed providers all fell below the statewide average at first review, but exceeded the statewide average after second review. Providers who obtained low FY 2017 Overall or Billing scores were scheduled for review and reassessment in FY 2018. While many of the reassessed providers demonstrated improvements on an individual level from first FY 2018 review to second FY 2018 review, those that continued to fall below the established thresholds are to be scheduled for an additional two reviews in FY 2019. Additionally, providers were supplied with additional technical assistance during reviews to assist with performance improvement. \n \nTable 18. Reassessment Review Results \n \nNumber of Providers Who Increased in Score from \n \nFY 2018 Review #1 to FY 2018 Review #2 \n \nBHQR CSU \n \nCategory \n \n(N = 20) (N = 2) \n \nBilling \n \n16 (80%) \n \nNA \n \nIRR \n \nNA \n \n1 (50%) \n \nService Guidelines \n \n12 (60%) 2 (100%) \n \nFocused Outcome Areas \n \n15 (75%) 2 (100%) \n \nAssessment \u0026 Planning \n \n17 (85%) \n \nNA \n \nOverall Scores \n \n17 (85%) 2 (100%) \n \nFY 2018 BH Accomplishments \nThroughout the year, the Quality Management Department partners with DBHDD Quality Improvement, DBHDD subject matter experts, and others to discuss and review findings, concerns, and areas of need across the provider network based on monthly, quarterly, and annual review results. It is through this partnership the Collaborative derives recommendations of revisions to the review process. \nBelow is a listing of accomplishments achieved by the Collaborative that have occurred throughout the 2018 fiscal year. \n BHQR and CSUQR tool revisions for clarity, alignment, and formalized criteria including; \n \n89 | P a g e \n \n Quality Management Annual Report FY 2018 \no Assessment and Planning discharge criteria and diagnosis verification o Defining requirements for what constitutes a medical assessment and a behavioral \nhealth assessment. o Addition of Opioid Maintenance Therapy as a measurable service o Revision Staff Interview questions  Quality Training Series o Safety Planning 101 o Intensive Family Intervention (IFI) Training o Residential Services Documentation o Documentation 101 o Progress Notes 101 o DBHDD Provider Manual: Your Guide to Success  On July 1, 2018, CSUQR process and tools were incorporated into the electronic system (Delmarva Quality Management System) to ensure accuracy, standardization, and formality through electronic reporting.  A new quality-of-care process was piloted to collect, monitor, and report various issues to DBHDD specific to providers' physical environments, staffing, and quality of services which are outside of the scope of the quality reviews.  To standardize and mirror the sampling processes with IDD reviews, Quality Management grouped BH providers into three categories (Small, Medium, Large) based on the number of unique individuals served in the six months preceding a review. Subsequent revisions to BHQR sample sizes continue to reflect these provider groupings.  To increase engagement of individuals with complex care needs and reduce readmission rates, Quality Management educated Crisis Stabilization Unit staff about the benefits of the Collaborative's Care Coordination team.  Increased billing threshold for reassessed providers from 70 to 80 percent based on annual results and network success. \n90 | P a g e \n \n Quality Management Annual Report FY 2018 \n Annual review of all BHQR and CSUQR tools in preparation for new fiscal year.  Quality Management revised and updated the quality section on the Collaborative's website \nto increase ease of use and access to resources available. \nBH Systems Strengths and Recommendations for Improvement \nIn FY 2018, both BHQR and CSUQR scores increased, with both review types statewide scores resulting in Overall score of 88 percent. Findings from both BHQR and CSUQR show a result equal to or exceeding 90 percent for both Service Guidelines (BHQR: 90%, CSUQR 91%), and FOAs (BHQR: 92%; CSUQR 91%). \nWhen comparing FY 2018 scores to FY 2017, there were specific areas that show statistically significant improvement (p = \u003c.01). These included the following: \n The BHQR Assessment and Planning, a category comprised of 10 indicators, had significant increases from the previous year with the exception of the individual meeting admission criteria. However, this specific indicator had the same result of 97 percent for both fiscal years. \n While sixteen of the services reviewed with the BHQR Service Guidelines demonstrated improvement from FY 2017 to FY 2018, both Individual Counseling and Psychosocial Rehabilitation Program has consistently documented significant improvement (p \u003c .01) year to year since FY 2016. \n For FY 2018, of the three BHQR FOA Safety indicators, the one most often scored \"no\" was \"Individuals (or their legal guardians) signed medication consent forms along with the prescriber\". However, results of this indicator have trended upward since contract inception and increased by ten points from FY 2017 to a result of 68 percent for FY 2018. \n All BHQR Person Centered Practice FOA indicators improved significantly demonstrating the individual is receiving individualized services while being an active participant in the planning, modification, or receiving of services. Additionally, records more consistently documented that a plan reassessment had been completed when needed in FY 2018 (87%) than in FY 2017 (82%). \n91 | P a g e \n \n Quality Management Annual Report FY 2018 \n FY 2018 demonstrated even higher levels of documentation specific to the BHQR Choice FOA than in FY 2017. While results of all indicators exceeded 90 percent in FY 2017 for this category, increases in \"Documentation of the providers supplying alternatives when barriers had been identified\" rose from 92 percent to 95 percent. \n Documentation reviewed during the BHQR clearly demonstrated evidence that the \"Individual had been provided supports and service options\" (98%) as well as \"Individuals had their known preferences and differences followed to the extent possible\" by the service delivery provider (98%). \n Specific to the CSUQR IRR, all areas demonstrated significant improvement in scoring from the previous year with the exception of Course of Stay and Treatment Planning. Furthermore, when analyzing C\u0026A records within the CSUQR, several FOA indicators met or exceeded 90 percent with some reaching 100 percent. \n There was signficant improvement (p \u003c .01) in documentation reviewed from FY 2017 to FY 2018 that Individuals at the CSU had rights information readily available written in a language accessible to the individual (FY 2018 98%). \n In CSUQR records, the discharge summary/note included the necessary criteria improved from the previous year fiscal year by 30 points (92%). \nQuality Management found the following additional strengths in the system: \n Through ongoing technical assistance and assessment of practices that support whole-person treatment, reviewed documentation indicated both behavioral health, as well as crisis stabilization unit providers, incorporated techniques to address the whole health and wellness of individuals more frequently in FY 2018. While this occurred in both BHQR and CSUQR scores, significant improvement in the BHQR indicators within Assessment and Planning and the FOA of Whole Health, demonstrated increased awareness, assessment, and service planning by providers with individuals. \n The BHQR Service Guidelines was the highest scoring category of the four in FY 2016, with an average of 90 percent. Although it declined slightly in 2017 by two points, it has since returned to a 90 percent result. Moreover, nearly three quarters of the Service Guidelines \n92 | P a g e \n \n Quality Management Annual Report FY 2018 \nscores within the BHQR met or exceeded 90 percent and nine reviews received a perfect score; three of which were providers reviewed for the first time.  All child and adolescent CSU records demonstrated that the individual was included in the modification of their plans, providers honored youths' preferences and differences, and providers offered individuals options of supports and services.  Although improved each fiscal year, the billing category in the BHQR remains second lowest at 85 percent. Thus, keeping in line with standard requirements, DBHDD provider manual guidelines, and KPI metrics, it is recommended to increase the threshold for reassessment to 90 percent from 85 percent for both Overall and Billing score for FY 2019. \nWhile several providers remained consistent or demonstrated improvements in their results from FY 2017 to FY 2018, opportunities for improvement remain. Several targeted training efforts have occurred over FY 2018 to address low scoring indicators and or service specific trainings resulting from BHQR or CSUQRs scoring and reviews. Additionally, Quality Management has devised several recommendations identified below based on results supplied within this annual report. \nRecommendation I: Continue annual review and evaluation of existing BHQR and CSUQR tools and requirements, as well as inclusion of specific reasons not met, to align with current state requirements and DBHDD recommendations in accordance with the DBHDD Provider Manual. \nRecommendation II: BHQR services additions or changes to include: \n Addition of Substance Abuse Intensive Outpatient Program (SAIOP) to be inline with Provider Manual \n Peer Support Whole Health \u0026 Wellness divided to reflect Individual versus Group  Medication Assisted Treatment as a measurable service \nRecommendation III: Continue sample size analysis to ensure records sampled at the individual provider level are representative of the provider and statewide utilization levels. \nRecommendation IV: Continue the Quality Training Series in FY 2019 based on analysis of FY 2018 data, for all BH and CSU agencies to include, but not limited to, the following: \n93 | P a g e \n \n Quality Management Annual Report FY 2018 \n Utilization of BHQR \u0026 CSUQR results at the agency level  Crisis and Safety planning for Crisis Stabilization Units  Documentation specific to paraprofessional staff  Safe Handling and storage of Medications (CSUQR specific) Recommendation V: Continue inclusion of Immediate Actions/Recommendations at BHQR and CSUQR exit conferences to provide immediate feedback to providers. Recommendation VI: Increase Overall score and Billing score thresholds from 80 to 90 percent to align with DBHDD Key Performance Indicators for FY 2019. Recommendation VII: Complete analysis of Intensive Family Intervention (IFI) services to determine specific areas for improvement. Recommendation VIII: Initiate baseline data collection conducted at each BHQR regarding Overall Programmatic indicators, such as services provided at Medicaid approved sites, agency-wide staffing requirements and policies/procedures. Recommendation IX: Include a scored indicator regarding whether or not CSU discharges are submitted within 48 hours to align with DBHDD requirements. Recommendation X: As IDD QEPR tools are revised for FY 2020, continued alignment and collaboration between indicators asked for both BHQR and QEPR. \n94 | P a g e \n \n Quality Management Annual Report FY 2018 \nSection 4: Intellectual and Developmental Disabilities \nBackground \nPerson Centered Reviews (PCR) and Quality Enhancement Provider Reviews (QEPR) assess the extent to which individuals with intellectual and developmental disabilities are supported with the services they receive and achieve outcomes important to them, and to evaluate provider systems.9 \nThe purpose of the PCR is to assess the quality of life as well as the effectiveness of and the satisfaction individuals receiving services have with the service delivery system. The NCI Adult InPerson Survey (formally the Adult Consumer Survey) is conducted as part of the PCR process. The data collected for the survey is entered in the ODESA system [web-based application developed and maintained by Human Services Research Institute (HSRI)]. HSRI analyzes and generates annual reports comparing Georgia's results to other participating states and the national average. \nThe purpose of the QEPR is to review providers' systems and practices to ensure they meet requirements set forth by the Medicaid waiver and DBHDD, and to evaluate the effectiveness of their service delivery system. Follow-up review activities provide technical assistance to help providers improve service delivery systems through Quality Technical Assistance Consultation (QTAC). \nQuality assessors use various tools to collect data from interviews, observations, and record reviews to compile a well-rounded picture of individuals receiving services: their circle of supports, how involved they are in the decisions and plans developed for them, as well as the quality of services provided. Individuals sampled for the PCR or QEPR participate in the Individual Interview (II) and Individual Service Plan Quality Assurance (ISP QA) Checklist. Both review processes include a Provider Record Review (PRR) and the Developmental Disability Service Specific (DDSS) requirements for each service received, a Staff Interview (SI) with a sample of direct support providers, and on-site observation of day or residential programs. \n \n9 Please refer to the following link to access a full description of the review process and review tools. The Georgia Collaborative: Quality Management IDD \n \n95 | P a g e \n \n Quality Management Annual Report FY 2018 \nDuring the PCR, the Support Coordinator Record Review (SCRR) and Support Coordinator Interview (SCI) tools are completed for the Support Coordinator working with the individual receiving services. During the QEPR, each provider organization receives one administrative review to monitor compliance with requirements through the Qualifications and Training (Q\u0026T) component of the review. The Q\u0026T includes review of a sample of personnel/staff records to determine if staff has the necessary qualifications specific to services rendered, and whether required training was completed within specified timeframes. \nIn this section of the report, each review tool presents results for both the PCR and QEPR. Aggregate scores of IDD quality reviews are a weighted average, based on the total number of standards scored [total met / (total met + total not met)]. Except for the Q\u0026T and DDSS tools, indicators within each tool are grouped into six FOAs, which are areas of the individual's life important to achieve and maintain: \n Whole Health--individuals receiving services are healthy, aware of their health-related needs, and direct their own health care regimen \n Safety-- individuals receiving services are safe in their home and work environments and in their communities; they understand or are learning how to self-preserve in all environments \n Person Centered Practices--supports and services are provided based on personal preferences and direction \n Community Life--individuals receiving services are actively participating and developing social roles in their communities as desired \n Choice--information needed to make informed choices on life decisions, such as where to live, where to work, and which supports, services and providers to use, is available \n Rights--rights are upheld and information and education is provided to ensure understanding of rights \n96 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nSampling Method \nPerson-Centered Review (PCR) \nThe PCR sample used a random sample of 484 eligible adults, age 18 and over, who had not received a PCR during the previous year and were receiving services reviewed through this contract. The PCR sample was stratified and sampled proportionate to each of the six DBHDD regions; therefore, the number of PCRs per region are proportionate to the number of individuals receiving services within the region. Table 19 shows the number and percent for the population of individuals receiving services across the state, the sampling frame (eligible for a PCR this year), as well as the number of PCRs completed within each region. \n \nRegion 1 2 3 4 5 6 \nTotal \n \nTable 19. FY 2018 PCR Sample by Region \n \nPopulation \n \nPopulation eligible for a PCR10 \n \nPCR Sample \n \nN \n \n% \n \nN \n \n% \n \nN \n \n% \n \n2,406 \n \n19.9% \n \n1,489 \n \n19.6% \n \n89 \n \n18.4% \n \n2,090 \n \n17.3% \n \n1,154 \n \n15.2% \n \n78 \n \n16.1% \n \n3,007 \n \n24.9% \n \n2,241 \n \n29.5% \n \n140 \n \n28.9% \n \n1,353 \n \n11.2% \n \n987 \n \n13.0% \n \n63 \n \n13.0% \n \n1,560 \n \n12.9% \n \n790 \n \n10.4% \n \n56 \n \n11.6% \n \n1,674 \n \n13.8% \n \n934 \n \n12.3% \n \n58 \n \n12.0% \n \n12,090 \n \n100.0% \n \n7,595 \n \n100.0% \n \n484 \n \n100% \n \nQuality Enhancement Provider Review (QEPR) \nThose providers not reviewed in FY 2016 or FY 2017 were eligible for a QEPR in FY 2018.11 QEPReligible providers rendering services to anyone selected for the PCR sample were automatically included in the QEPR sample. Additional providers were randomly selected from the remaining QEPR-eligible providers until a sample size of 93 was met. Finally, two randomly selected Support Coordinator (SC) Agencies and four providers selected by DBHDD completed the QEPR sample of 99 \n \n10 Providers who participated in a QEPR in FY17 and individuals who only received services from those providers were excluded. 11 Except for Community Service Boards (CSBs), which are eligible for review every other year. \n97 | P a g e \n \n Quality Management Annual Report FY 2018 \nproviders. Quality Management conducted an additional QEPR of one crisis services provider, to bring the total to 100 QEPRs for the year. \nBased on the number of eligible individuals served, providers were stratified into three categories by size: \"Small,\" \"Medium,\" and \"Large.\" The number within each size category is shown by year in Table 20. \n \nTable 20. QEPR Provider Sample by Size \n \nNumber \n \nProvider Size \n \nFY 2016 FY 2017 \n \nSmall (caseload  30) \n \n46 \n \n52 \n \nMedium (30 \u003c caseload \u003c 100) \n \n36 \n \n19 \n \nLarge (caseload  100) \n \n16 \n \n24 \n \nSupport Coordination Agency Crisis Stabilization Unit \n \n1 \n \n1 \n \n1 \n \n4 \n \nTotal \n \n100 \n \n100 \n \nFY 2018 72 12 13 2 1 \n100 \n \nReview Processes \nPCR and QEPR \nThe focus of the PCR is on quality of life and quality of services received. The focus of the QEPR is on the provider's overall practices, quality of services offered to all individuals served by the provider, and level of compliance with Medicaid waiver and state requirements. Both the PCR and QEPR use the Individual Observation Staff Assessment (IOSA), which includes an interview with individuals receiving services, their staff, and on-site observation(s) (OBS) at residential and day programs, as applicable. In addition to the IOSA, the PCR and QEPR include an evaluation of the Individual Service Plan using a quality assurance checklist (ISP QA), a review of the provider's records, including documentation of service delivery requirements using the Developmental Disabilities Service Specific (DDSS) review tool. The number of Provider Record Reviews (PRR) and DDSS reviews completed for each process depends upon the number of services received by the individuals in the sample. \nWhile the PCR and QEPR share most of the same tools, there are a few exceptions. The PCR includes an interview with the Support Coordinator and a review of the record in the Consumer Information System (CIS) maintained by the Support Coordinator for the individual. The QEPR has an additional \n \n98 | P a g e \n \n Quality Management Annual Report FY 2018 \nreview tool, Qualifications and Training, which is used to review a sample of records from all staff ensuring required training and other state requirements are current and documented (e.g., background screenings, level of education). The total number of records reviewed or interviews completed in FY 2018 for the PCR and QEPR is listed in Table 21, for each review tool. \n \nTable 21. FY 2018 Number of Reviews by Tool and Review Type \n \nReview Tool \n \nPCR \n \nQEPR \n \nTotal \n \nIOSA - Individual Interview (II) \n \n484 \n \n436 \n \n920 \n \nIOSA  Observation (OBS) \n \n353 \n \n285 \n \n638 \n \nIOSA - Staff Interview (SI) \n \n443 \n \n316 \n \n759 \n \nISPQA Checklist \n \n484 \n \n440 \n \n924 \n \nSupport Coordinator Interview (SCI) \n \n484 \n \n8 \n \n492 \n \nSupport Coordinator Record Review (SCRR) \n \n484 \n \n40 \n \n524 \n \nProvider Record Review (PRR) \n \n856 \n \n561 \n \n1,417 \n \nStaff Qualifications and Training (Q\u0026T) \n \nN/A \n \n780 \n \n780 \n \nDDSS - Behavioral Supports Consultation \n \n7 \n \n7 \n \n14 \n \nDDSS - Community Access (Group) \n \n227 \n \n327 \n \n554 \n \nDDSS - Community Access (Individual) \n \n5 \n \n126 \n \n131 \n \nDDSS - Community Living Support \n \n49 \n \n141 \n \n190 \n \nDDSS - Community Residential Alternative \n \n107 \n \n298 \n \n405 \n \nDDSS - Crisis \n \n0 \n \n1 \n \n1 \n \nDDSS - Occupational Therapy \n \n0 \n \n26 \n \n26 \n \nDDSS - Physical Therapy \n \n0 \n \n6 \n \n6 \n \nDDSS - Prevocational \n \n32 \n \n54 \n \n86 \n \nDDSS - Respite \n \n4 \n \n14 \n \n18 \n \nDDSS - Support Coordination \n \n484 \n \n43 \n \n527 \n \nDDSS - Supported Employment DDSS - Speech Therapy DDSS - Transportation Total Number of PCRs/QEPRs \n \n13 \n \n49 \n \n62 \n \n0 \n \n6 \n \n6 \n \n1 \n \n3 \n \n4 \n \n484 \n \n100 \n \n584 \n \nWhen a PCR is completed, a report is given to the provider that includes the strengths of the support team. It includes an evaluation of the supports and services provided and recommendations for the individual interviewed and the support team (including the Support Coordinator, provider, and \n \n99 | P a g e \n \n Quality Management Annual Report FY 2018 \nfamily). A provider who participates in a QEPR receives a comprehensive report that identifies strengths of the service delivery systems, recommendations for improvement, and several performance scores. These scores include the Overall score, Q\u0026T score, and DDSS score. \nQuality Technical Assistance Consultation (QTAC) \nThe QTAC is an additional review that is conducted 90 days after completion of the QEPR. This review is based on any service concerns identified during the PCR or QEPR, or if the provider requests technical assistance. Using findings from the QEPR, technical assistance is provided to support providers and to offer suggestions and guidance to help improve their service delivery systems. The process uses a consultative approach to address specific issues and concerns related to someone receiving services or systems and practices that need improvement. The QTAC supplements the PCR and QEPR processes by affording contracted providers the opportunity to solicit technical assistance for specific needs within the service delivery milieu. \nPerson Centered Review \nPCR Scores by Tool \nFigure 35 shows the average score for each tool used during the PCR, comparing FY 2016 through FY 2018. Findings each year show a similar pattern, with scores for the Individual Interview, Staff Interview and Support Coordinator Interview higher than scores for provider or Support Coordinator documentation (record reviews). Scores across all tools decreased from FY 2016 to FY 2017, and in FY 2018, the scores increased on all of the tools. The FY 2018 scores for Observations, PRRs, and SCRRs were significantly higher than FY 2017 (p \u003c .01); however, it should be noted the percentages are calculated using results from all indicators scored within each FOA and the denominators were quite large, increasing the likelihood of statistical significance. \n100 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 35. PCR Scores by Tool and Year \n \n100% 90% 80% 70% \n \n95.1% \n \n98.4%96.8%98.2% \n \n91.9%92.2% \n \n96.4% 94.3% \n \n95.6% \n \n79.0% 74.8% \n70.2% \n \n79.9% 77.4% \n73.7% \n \n90.8% 83.5% \n83.3% \n \n60% \n \n50% \n \nIndividual Observation Staff Interview Provider Record Support \n \nSupport \n \nInterview \n \nReview \n \nCoordinator Coordinator \n \nRecord Review Interview \n \nFY 2016 (N = 484) \n \nFY 2017 (N = 481) \n \nFY 2018 (N = 484) \n \nSeveral indicators may have driven the increases in the Observation, PRR, and SCRR tools. Indicators listed below were included if the sample size each year was at least 50 and the increase from FY 2017 to FY 2018 was at least 10 percentage points and significant at p \u003c .01: \n \nObservations \n Opportunities were used to provide health and rights education to the individual  Staff took advantage of opportunities to provide experiences to support informed choice  Staff took advantage of opportunities to provide education to the individual on \nresponsibilities regarding rights \n \n101 | P a g e \n \n Quality Management Annual Report FY 2018 \nProvider Record Review \n Documentation included the following copies of the individual's current preventative healthcare reports based upon gender, age and need (if the individual refused the treatment, this is documented): PSA test, bone density, mammogram \n Documentation included the following copies of the individual's specialty healthcare reports based upon diagnoses, assessments and referrals (if the individual refused the treatment, this is documented): Neurological evaluation/supports, wheelchair evaluation/supports, GI evaluation/supports, and psychiatric evaluation/supports \n Documentation demonstrated the results of monitoring (DDP and/or Behavior Specialist) and analysis of the Safety Plan or Positive Behavior Support Plan, and staff had been trained on the individual's Positive Behavior Support Plan or Safety Plan for challenging behaviors \n Documentation demonstrated education is provided to the individual and family (if approved by the individual) on all prescribed medications, and to the individual on risks and benefits of medication \n Critical incident reports were completed, and follow-up/remediation occurred  When a Positive Behavior Support Plan was used to reduce challenging behaviors, there must \nbe evidence the following had been addressed: The provider monitored plans for reviews, analyzed trends, and summarized the effectiveness of the plan and termination criteria  Ongoing evidence of identifying, addressing, and seeking prevention of abuse, neglect, and exploitation was documented  Documentation showed evidence of exploring ways for the individual to participate in community activities and types of employment the same as other citizens  Documentation showed evidence that meaningful choices were offered to individuals for providers and services, and the manner in which services were provided  Documentation demonstrated evidence of ongoing rights education and that individual rights and responsibilities were written in an understandable language/format  Documentation contained records of day to day living expense agreements \n102 | P a g e \n \n Quality Management Annual Report FY 2018 \nSupport Coordinator Record Review \n Emergency preparedness plans were in place  HIPAA Privacy and Security Rules, as outlined at 45 CFR Parts 160 and 164, were specifically \nreviewed with individuals  Exercising meaningful choices regarding the manner by which services were provided was \nreflected in the documentation \nPCR Scores by Focused Outcome Area (FOA) \nAll the PCR tools (II, SI, OBS, PRR, SCI, and SCRR) are designed to measure the six FOAs, with many indicators within each FOA. Results for each FOA, based on all six tools by fiscal year are shown in Figure 36. While scores for the FOAs had shown a decrease from the first to the second year of the contract, in FY 2018 (third contract year), scores have increased on each FOA. The increases range from 1.3 percentage points for Safety to 5.8 points in Community Life. These all represent a statistically significant increase; however, the denominators are quite large and small differences may reflect significance at p \u003c .01. \n103 | P a g e \n \n Quality Management Annual Report FY 2018 \n \n100% 90% 80% \n \nFigure 36. PCR Scores by FOA and Year \n \n93.6% \n87.4% 84.7% \n \n96.5% \n \n91.6% \n \n90.3% \n \n89.1% \n \n84.8% 82.6% \n \n80.6% 76.9% \n \n90.7% \n \n95.4% \n92.0% 90.2% \n \n82.3% 78.7% \n \n71.1% \n70% \n \n60% \n \n50% Whole Health Safety \n \nPerson Centered Practices \n \nCommunity Life \n \nChoice \n \nRights \n \nFY 2016 (N = 484) FY 2017 (N = 481) FY 2018 (N = 484) \n \nPCR Scores by Tool and Focused Outcome Area (FOA) \nIn this section, PCR results for FY 2018 are presented by FOA and tool (Figure 37). Findings for the review components varied across each FOA and indicated the following: \n Similar to FY 2017, documentation reviewed within the provider record review was the lowest scoring component across all FOAs, except for Rights (84.4%). \n Support Coordinators' documentation was the lowest scoring component in Rights (57.3%), for which the interview with the Support Coordinator was also relatively low when compared to other tools (78.5%). \n FY 2018 Staff and Individual Interview findings were at least 86 percent or higher, across all FOAs. \n \n104 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 37. FY 2018 PCR Scores by Tool and FOA \n \nWhole Health \n \n88.2% 94.4% \n86.2% 97.7% 95.5% \n75.7% \n \nCommunity Life \n \n86.6% 89.0% 79.6% \n95.0% 67.6% 67.0% \n \nSafety \n \n92.3% 97.1% \n81.7% 99.9% 97.2% \n80.5% \n \nChoice \n \n96.3% 93.8% 85.3% \n97.9% 70.6% 58.5% \n \nPerson Centered Practices \n0% 25% \nIndividual Interview (N = 484) Observation (N = 353) \n \n87.9% 95.3% \n84.8% 95.5% \n80.3% 77.6% \n \nRights \n \n57.3% \n \n97.6% 98.4% 78.5% 99.0% \n84.4% \n \n50% 75% 100% \n \n0% 25% 50% 75% 100% \n \nStaff Interview (N = 443) \nSupport Coordinator Record Review (N = 484) \n \nSupport Coordinator Interview (N = 484) \nProvider Record Review (N = 856) \n \nFOAs by Demographics \nFor this section, analysis was completed comparing FOA results across regions, residential settings, and funding source. Results for FOA by region, residence and funding source are presented (Figures 38 through 40). To enhance the readability of the side-by side demographics, the start point of the graphs' axis is 50 percent. Please note that some of the categories have a relatively small sample. The sample for host homes is 37 and for state funded services (SFS) is 52; however, some findings may be worth further analysis. Findings indicate the following: \n With the exception of Community Life, Region 1 showed the lowest scores of the FOAs, particularly in Whole Health and Person Centered Practices, for which the score was 10 points lower than the highest scoring region. \n \n105 | P a g e \n \n Quality Management Annual Report FY 2018 \n Individuals receiving services and living with a parent scored lower in Whole Health than individuals in any other residential setting (p \u003c .01). \n Individuals receiving services through the COMP waiver were more likely to have Whole Health indicators met than for services rendered through NOW or SFS (p \u003c .01). \n Individuals receiving services through SFS were less likely to have Safety indicators met than services provider through the COMP waiver or NOW waiver (p \u003c .01). \nFigure 38. FY 2018 PCR Scores by FOA and Region \n \nWhole Health \n \n82.5% 92.0% \n86.4% 89.7% \n88.2% 86.8% \n \nCommunity Life \n \n75.9% 79.2% \n77.6% 78.0% 75.6% 73.9% \n \nSafety \n \n88.5% 94.8% \n89.9% 93.9% 93.4% \n91.4% \n \nChoice \n \n78.7% 85.5% \n81.4% 85.7% \n83.4% 80.6% \n \nPerson Centered Practices \n \n79.2% 89.7% \n82.4% 89.4% \n87.9% 84.1% \n \n50% 60% 70% 80% 90% 100% \n \nRights \n \n88.8% 94.2% \n91.0% 94.0% 95.1% \n91.3% \n \n50% 60% 70% 80% 90% 100% \n \nRegion 1 (n = 89) Region 4 (n = 63) \n \nRegion 2 (n = 78) Region 5 (n = 56) \n \nRegion 3 (n = 140) Region 6 (n = 58) \n \n106 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 39. FY 2018 PCR Scores by FOA and Residential Setting \n \nWhole Health \n \n89.6% 89.8% 88.1% 84.5% \n \nCommunity Life \n \n76.2% 80.3% 79.5% \n76.6% \n \nSafety \n \n92.6% 93.2% 92.2% 90.2% \n \nPerson Centered Practices \n \n84.9% 87.3% 87.8% \n83.7% \n \n50% 60% 70% 80% 90% 100% Group Home (n = 161) Own Place (n = 48) \n \nChoice \n \n82.2% 83.6% 85.0% 81.9% \n \nRights \n \n91.4% 92.4% 92.2% 92.7% \n \n50% 60% 70% 80% 90% 100% Host Home (n = 37) \nWith Parents (n = 236) \n \nFigure 40. FY 2018 PCR Scores by FOA and Funding Source \n \nWhole Health \n \n84.7% 88.4% \n85.0% \n \nCommunity Life \n \n78.6% 76.5% 76.5% \n \nSafety \n \n91.2% 92.2% 87.8% \n \nPerson Centered Practices \n \n85.4% 84.8% 83.4% \n \n50% 60% 70% 80% 90% 100% \n \nChoice \n \n83.3% 82.3% 80.4% \n \nRights \n \n93.0% 91.9% 91.4% \n \n50% 60% 70% 80% 90% 100% \n \nNOW (n = 113) COMP (n = 319) SFS (n = 52) \n \n107 | P a g e \n \n Quality Management Annual Report FY 2018 \nOpportunities for Growth by FOA \nEach PCR tool is organized around the FOAs, Figures 36 through 40, and each FOA uses several standards, or indicators, to measure how well the area is being addressed. As indicated in this report, most findings showed relatively high scores across all perspectives of the PCR, i.e., interviews, on-site observations and documentation areas all averaging over 75 percent; and overall improvement shown for each FOA. However, there are specific areas within each FOA where indicator level scoring was relatively low, has remained low over the past two or three years, or has decreased since FY 2017. These are presented in this section by FOA. The p values for difference of proportions between the FY 2017 and FY 2018 results are included. Statistical significance for this report has been determined to be p \u003c .01 (highlighted in blue), indicating the difference in the scores is statistically significant. \nWhole Health \nThe average Whole Health score in FY 2018 was 87.4 percent, an increase from 84.7 percent in FY 2017. Close to 97 percent of Support Coordinators monitor the health and welfare of individuals receiving services, which is an increase from 93.5 percent in FY 2017. Furthermore, almost all individuals interviewed had supports and service to address whole health needs (99.4%) and access to medications (99.8%). \nThe following table (Table 22) indicates a continued decline in several areas, based on results from the Individual Interview. Each year since 2016, individuals receiving services have been less likely to be aware of what medications they are taking and why, the side effects of the medications, and what the medication should look like. They were also less likely each year to be aware of their diagnosis. \nStaff support for the individual to learn about medications and their side effects remained relatively low in FY 2018, as did provider documentation of a Pap test/pelvic exam and hearing evaluations. Documentation indicated an increase in the degree to which providers offer education on medication risks and side effects, and on all prescribed medications. These are, however, still among the lower scoring PRR Whole Health standards in FY 2018. In addition to low scoring indicators listed in the table, one area with a higher score in FY 2018 has shown a significant decline since FY 2017 (p \n108 | P a g e \n \n Quality Management Annual Report FY 2018 \n\u003c .01). Although still scoring relatively high (90.1%), individuals receiving services were less likely to self-manage health according to preferences (down 7.5 points). \n \nTable 22. Low Scoring Whole Health Indicators (PCR) \n \nIndividual Interview Individual receiving services was aware of: \nThe side effects of the medications \nWhy medications are prescribed \nWhat medications he/she is taking \nThe medication's color, shape, when it is taken \nHis or her diagnoses \nStaff Interview Staff could describe: How to support the individual to learn about medications \nThe side effects of medications taken \nProvider Record Review Documentation demonstrated how/include: Copies of the individual's current preventative healthcare reports for a Pap test or Pelvic exam Copies of the individual's current preventative healthcare reports for a hearing evaluation or supports Individuals and families are provided education on all prescribed medications \nEducation is provided to the individual on the risks and side effects of the medication \n \nFY 2016 FY 2017 \n \n74.3% (n = 350) \n86.0% (n = 351) \n78.3% (n = 428) \n84.9% (n = 351) \n81.9% (n = 481) \n \n52.4% (n = 410) \n68.5% (n = 410) \n68.3% (n = 435) \n80.4% (n = 408) \n71.9% (n = 473) \n \n81.0% (n = 357) \n86.3% (n = 388) \n \n67.7% (n = 195) \n78.7% (n = 211) \n \n48.0% \n \n31.7% \n \n(n = 200) (n = 316) \n \n48.8% \n \n38.3% \n \n(n = 447) (n = 862) \n \n56.6% (n = 327) \n60.5% (n = 332) \n \n25.7% (n = 723) \n33.9% (n = 610) \n \nFY 2018 \n43.6% (n = 433) \n62.5% (n = 435) \n63.5% (n = 458) 72.6% (n = 431) 64.1% (n = 482) \n68.4% (n = 307) \n75.4% (n = 353) \n33.1% (n = 356) \n37.5% (n = 814) \n48.3% (n = 621) \n52.1% (n = 511) \n \np value FY 17/18 \np = .01 p = .07 p = .01 p = .01 p = .01 \np = .87 p = .24 \np = .68 p = .73 p = 0 p = 0 \n \n109 | P a g e \n \n Quality Management Annual Report FY 2018 \nSafety \nSafety scores had an average of 91.6 percent in FY 2018, compared to 90.3 percent in FY 2017. Almost every provider and Support Coordinator reviewed (\u003e 90%) had emergency preparedness plans in place, had documented specific contact information, and documented risks or safety issues for the individual. Support Coordinators advocated to ensure follow-up occurred for safety (97.0%) or critical incidents (96.6). Almost every individual interviewed felt safe in all environments and free from all types of abuse (\u003e 99%). \n \nTable 23 includes the low scoring Safety indicators for FY 2018, indicating some had significantly decreased (p \u003c .01) since FY 2017. Findings suggest a decrease each year in an understanding of what to do if support staff or other supports become incapacitated. At the same time, many staff are not providing education to individuals receiving services on how to use the crisis hotline and most providers were not documenting how they address abuse, neglect and exploitation or offer education on how individuals can self-preserve. Support Coordinators were not always aware of restrictive interventions, needed behavior or crisis plans, and some triggers related to behavior health. \n \nTable 23. Low Scoring Safety Indicators (PCR) \n \nIndividual Interview \nIndividual is aware of or recognizes what constitutes exploitation \nIndividual is aware of how to respond in an emergency/safety situation if supports are incapacitated Staff Interview Staff is providing education on how to use the crisis hotline Provider Record Review Documentation demonstrated: \n \nFY 2016 FY 2017 FY 2018 \n \n90.4% \n \n76.6% \n \n70.5% \n \n(n = 480) (n = 480) (n = 482) \n \n87.3% \n \n78.7% \n \n71.5% \n \n(n = 473) (n = 468) (n = 481) \n \n88.4% \n \n52.8% \n \n59.9% \n \n(n = 392) (n = 91) (n = 352) \n \np value FY 17/18 p = .03 p = .01 \np =.30 \n \n110 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 23. Low Scoring Safety Indicators (PCR) \n \nOngoing evidence of identifying, addressing, and seeking prevention of abuse, neglect, and exploitation How education is offered to self-preserve or develop effective resiliency skills according to the individual's learning style Evidence of a written order by the physician including the rationale and instructions for the use of adaptive supportive devices or medical protective equipment Support Coordinator Interview Support Coordinator was aware: \nOf interventions/plans in place \nOf the individualized techniques for following the behavior plans: \nBehavior \nEmergency \nSafety \nOf specific triggers related to behavior health issues: \nDecomposition \nRelapse \nBehavioral Issues \n \nFY 2016 53.7% (n = 499) \n52.5% (n = 549) \n97.8% (n = 93) \n87.8% (n = 181) \n79.5% (n = 132) 92.2% (n = 258) 91.6% (n = 237) \n87.4% (n = 174) \n88.9% (n = 180) 88.8% (n = 180) \n \nFY 2017 22.9% (n = 893) \n41.1% (n = 893) \n72.3% (n = 101) \n47.8% (n = 92) \n55.8% (n = 104) 70.6% (n = 163) 71.5% (n = 172) \n58.2% (n = 117) 60.5% (n = 114) 74.6% (n = 189) \n \nFY 2018 39.5% (n = 838) \n41.5% (n = 843) \n66.1% (n = 165) \n47.7% (n = 107) \n53.3% (n = 122) \n66.5% (n = 179) 69.3% (n = 202) \n57.3% n = 131) 60.6% (n = 142) 69.7% (n = 188) \n \np value FY 17/18 \np = 0 p = .86 \np = .29 \np = .39 \np = .71 p = .42 p = .48 \np = .89 p = .91 p = .01 \n \n111 | P a g e \n \n Quality Management Annual Report FY 2018 \nPerson Centered Practices \nPerson Centered Practices showed an average score of 84.8 percent, an increase from 82.6 percent in FY 2017. Almost all Support Coordinators had the ISP in the record (98.6%). Support Coordinators (96.9%) and service providers (98.5%) documented how supports/services changed when needed. \nFindings each year (see Table 24) suggest a steady decline in the percent of individuals receiving services who understood how to make progress on their goals or how goals could be changed, and many staff, providers, and Support Coordinators were not reviewing progress on goals with the person. Many individuals receiving services were unable to describe something new experienced or learned in the past six months. \n \nTable 24. Low Scoring Person Centered Practices Indicators (PCR) \n \nIndividual Interview \n \nFY 2016 FY 2017 \n \nFY 2018 \n \np value FY 17/18 \n \nIndividual determines when progress on goals is achieved or goals are met \n \n87.7% \n \n81.8% \n \n75.3% \n \n(n = 480) (n = 479) (n = 482) \n \np = .01 \n \nIndividual describes progress on goals \n \n90.4% \n \n84.8% \n \n78.8% \n \n(n = 478) (n = 474) (n = 482) \n \np = .02 \n \nThe individual understands changes can be made to goals, supports, and services \n \n97.3% \n \n89.6% \n \n81.1% \n \n(n = 479) (n = 479) (n = 482) \n \np = 0 \n \nIn the past 6 months, the Individual was able to identify something: \n \nNew or experienced \n \n70.4% \n \n60.3% \n \n62.6% \n \n(n = 479) (n = 471) (n = 479) \n \np = .46 \n \nHad been learned \n \n78.8% \n \n65.8% \n \n67.2% \n \n(n = 480) (n = 474) (n = 478) \n \np = .46 \n \nStaff Interview \n \nStaff formally reviews progress on goals/objectives with the individual \n \n75.8% \n \n72.2% \n \n68.4% \n \n(n = 826) (n = 302) (n = 434) \n \np = .28 \n \nProvider Record Review Documentation reflected: \n \nThe individual's talents \n \n48.9% \n \n38.9% \n \n(n = 552) (n = 894) \n \nNA \n \nNA \n \nThe individual's strengths \n \n57.0% \n \n48.9% \n \n(n = 553) (n = 894) \n \nNA \n \nNA \n \n112 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 24. Low Scoring Person Centered Practices Indicators (PCR) \n \nFY 2016 FY 2017 \n \nFY 2018 \n \np value FY 17/18 \n \nThe individual's talents/strengths12 \n \nNA \n \nNA \n \n44.2% (n = 847) \n \nNA \n \nThe individual's hopes and dreams \n \n53.3% \n \n42.0% \n \n31.5% \n \n(n = 552) (n = 893) (n = 847) \n \np = 0 \n \nDocumentation demonstrates review of progress and benefit of goals occurs regularly with the individual. Progress notes or learning logs describe progress toward goals including the individual's response to an intervention or activity, based on data Support Coordinator Record Review Documentation demonstrated: \nThe individual's talents \nThe individual's strengths \nProgress notes describe progress toward goals including response to the intervention or activity, based on data \n \n51.4% (n = 552) \n67.7% (n = 551) \n49.2% (n = 480) \n56.4% (n = 479) \n63.9% (n = 483) \n \n40.4% (n = 373) \n72.9% (n = 893) \n28.8% (n = 479) 41.0% (n = 480) 57.8% (n = 481) \n \n44.0% (n = 848) \n68.1% (n = 850) \n35.3% (n = 484) 44.2% (n = 484) 62.7% (n = 483) \n \np = .31 p = 0 \np = .03 p = .32 p = .05 \n \nCommunity Life \nCommunity Life, interaction with and integration in the surrounding community, showed the lowest average score among the FOAs for each year, 80.6 percent (FY 2016), 71.1 percent (FY 2017) and 76.9 percent (FY 2018) respectively. Most providers (96.5%) showed evidence that community services were provided in the community, instead of being provided at the day service location. Almost all individuals interviewed indicated they go out in the community (99.2%) and are able to choose the type of activity in which to participate (96.2%). \nInformation from the face-to-face interviews are shown in Table 25. Findings indicated from the three different interview perspectives (individual, support coordinator, staff) that, individuals \n \n12 The two separate indicators were combined into one for FY 2018. \n \n113 | P a g e \n \n Quality Management Annual Report FY 2018 \nreceiving services were often not developing, or supported to develop, valued social roles or exploring community employment options. However, while still among the lowest scoring interview indicators, improvement has been shown in several of these areas from all three perspectives. \n \nTable 25. Low Scoring Community Life Interview Indicators (PCR) \n \nFY 2016 \n \nFY 2017 \n \nFY 2018 \n \np value FY 17/18 \n \nIndividual Interview The individual was: \n \nExposed to new community activities ( in the past 6 months) \n \n71.4% \n \n63.2% \n \n70.1% \n \n(n = 472) (n = 473) (n = 478) \n \np = .02 \n \nActively pursuing preferences related to goals of employment (not a readiness model) \n \n82.4% \n \n70.4% \n \n71.5% \n \n(n = 335) (n = 338) (n = 312) \n \np = .76 \n \nProvided opportunities to develop new social roles \n \n75.4% \n \n69.4% \n \n74.8% \n \n(n = 476) (n = 477) (n = 477) \n \np = .06 \n \nProvided opportunities to learn about social 81.1% \n \n71.5% \n \n75.8% \n \nroles in the community \n \n(n = 476) (n = 478) (n = 476) \n \np = .13 \n \nStaff Interview Staff was able to describe how the individual is provided opportunities to: \n \nDevelop community employment \n \n71.4% \n \n58.3% \n \n60.7% \n \n(n = 795) (n = 235) (n = 338) \n \np = .57 \n \nDevelop new social roles \n \n59.3% \n \n60.4% \n \n70.6% \n \n(n = 791) (n = 298) (n = 429) \n \np = 0 \n \nSupport Coordinator Interview The Support Coordinator was: \n \nAware of the individual's new community experiences \n \n73.0% \n \n62.0% \n \n68.9% \n \n(n = 467) (n = 471) (n = 476) \n \np = .03 \n \nAware of how the individual is provided opportunities to develop new valued social roles in the community \n \n71.2% (n = 463) \n \n66.9% (n = 475) \n \n70.7% (n = 481) \n \np = .01 \n \nAble to define or explain how the individual 79.5% \n \n72.6% \n \n79.3% \n \nis supported to uphold valued social roles (n = 472) (n = 475) (n = 484) \n \np = .02 \n \nDocumentation from Support Coordinator and Provider Record Reviews indicated that several areas showed improvement since FY 2017; however, these areas remain among the lowest scoring \n \n114 | P a g e \n \n Quality Management Annual Report FY 2018 \nindicators (Table 26). Many service providers and Support Coordinators were not documenting how they supported individuals to develop social roles, seek employment, or participate in the community. \n \nTable 26. Low Scoring Community Life Record Review Indicators (PCR) \n \nFY 2016 \n \nFY 2017 \n \nFY 2018 \n \np value FY 17/18 \n \nDocumentation demonstrated: \n \nOpportunities to seek employment in competitive integrated settings \n \nProvider Record Review \n \n55.2% \n \n24.9% \n \n24.3% \n \n(n = 364) (n = 794) (n = 729) \n \np = .77 \n \nSupport Coordinator Record Review \n \n48.2% \n \n41.9% \n \n43.7% \n \n(n = 363) (n = 394) (n = 373) \n \np = .61 \n \nDevelopment of social roles and natural supports that reflect the individual's interests \n \nProvider Record Review \n \n42.9% \n \n25.4% \n \n28.9% \n \n(n = 532) (n = 881) (n = 823) \n \np = .10 \n \nSupport Coordinator Record Review \n \n48.5% \n \n46.4% \n \n52.9% \n \n(n = 480) (n = 481) (n = 482) \n \np = .04 \n \nHow the individual is supported to learn about, explore and experience the community \n \nProvider Record Review \n \n44.3% \n \n39.5% \n \n44.6% \n \n(n = 532) (n = 881) (n = 827) \n \np = .03 \n \nSupport Coordinator Record Review \n \n48.7% \n \n39.0% \n \n45.7% \n \n(n = 476) (n = 480) (n = 481) \n \np = .03 \n \nHow the individual is supported to have or has responsibilities in the community as desired \n \nProvider Record Review \n \n62.8% \n \n42.8% \n \n50.7% \n \n(n = 530) (n = 883) (n = 825) \n \np = 0 \n \nSupport Coordinator Record Review \n \n58.5% \n \n59.5% \n \n63.5% \n \n(n = 480) (n = 477) (n = 479) \n \np = .21 \n \nHow the individual is supported to/able to participate in community activities and employment the same as Individuals without disabilities \n \nProvider Record Review \n \n79.1% \n \n40.4% \n \n55.6% \n \n(n = 460) (n = 854) (n = 806) \n \np = 0 \n \nSupport Coordinator Record Review \n \n76.9% \n \n62.2% \n \n62.6% \n \n(n = 455) (n = 458) (n = 462) \n \np = .92 \n \nEngagement in community life \n \nProvider Record Review \n \n75.6% \n \n39.5% \n \n44.6% \n \n(n = 533) (n = 881) (n = 827) \n \np = .49 \n \n115 | P a g e \n \n Quality Management Annual Report FY 2018 \nChoice \nChoice is the second-lowest scoring FOA showing an average score of 82.3 percent for FY 2018, an increase from FY 2017. Most Support Coordinators (95.6%) demonstrated the individual was living in the most integrated community setting appropriate to the individual's needs, preferences and level of independence. Additionally, most service providers (90.7%) showed evidence the individual was offered choice. \nAs indicated in Table 27, staff were often not able to describe how they provided the individual options for competitive employment or integrated living settings. Many Support Coordinator and provider record review indicators showed significant decreases from FY 2016 to FY 2017. Of these, some showed improvement but are still relatively low scoring. Findings indicated a lack of documentation for how individuals receiving services were making meaningful and informed choices in different aspects of their lives, including living environments, living situations, competitive employment and community participation. \n \nTable 27. Low Scoring Choice Indicators (PCR) \n \nStaff Interview Staff is presenting options of: \nCompetitive/supported employment \nLiving situations are integrated into local community (i.e., full continuum of housing options, roommate) Support Coordinator Interview \n \nFY 2016 FY 2017 \n \n74.7% (n = 517) \n93.0% (n = 341) \n \n50.5% (n = 220) \n65.5% (n = 905) \n \nSupport Coordinator offers choices related to supported employment and/or competitive employment \n \n72.4% (n = 381) \n \n62.0% (n = 413) \n \nProvider Record Review \n \nProviders demonstrated through documentation how individuals: \n \nAre provided a choice of living situations \n \n74.9% \n \n26.4% \n \n(n = 267) (n = 708) \n \nExercise meaningful choices about living environments \n \n70.7% \n \n21.1% \n \n(n = 259) (n = 715) \n \nFY 2018 \n49.2% (n = 313) \n65.5% (n = 284) \n64.7% (n = 434) \n26.0% (n = 739) \n26.3% (n = 759) \n \np value FY 17/18 p = .78 p = 1.0 \np = .41 \np = .86 p = .02 \n \n116 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 27. Low Scoring Choice Indicators (PCR) \n \nMake informed choices about competitive 54.6% \n \n30.8% \n \n26.5% \n \nor supported employment options \n \n(n = 379) (n = 827) (n = 759) \n \nAre provided with information to make informed choices (education, exploration and experiences) \n \n40.7% \n \n27.4% \n \n27.8% \n \n(n = 548) (n = 891) (n = 845) \n \nMake informed choices about community participation and social interaction \n \n54.8% \n \n42.8% \n \n47.5% \n \n(n = 540) (n = 888) (n = 832) \n \nSupport Coordinator Record Review Support Coordinators demonstrated through documentation how individuals: \n \nAre provided with education, exploration and experiences \n \n41.9% \n \n31.2% \n \n35.8% \n \n(n = 482) (n = 481) (n = 483) \n \nAre offered employment or educational options \n \n55.4% \n \n49.3% \n \n52.7% \n \n(n = 401) (n = 423) (n = 438) \n \nMake meaningful choices about the living environment \n \n77.2% \n \n50.3% \n \n53.6% \n \n(n = 351) (n = 441) (n = 476) \n \nAre provided choices of living situations \n \n75.0% \n \n47.1% \n \n55.6% \n \n(n = 324) (n = 433) (n = 466) \n \nMake meaningful choices about community 56.5% \n \n50.3% \n \n66.3% \n \nparticipation \n \n(n = 480) (n = 441) (n = 483) \n \np = .06 p = .85 p = .05 \np = .13 p = .31 p = .33 p = .01 p = .01 \n \nRights \nScores surrounding Rights have remained relatively high each year since FY 2016, showing an average score of 92.0 percent in FY 2018, an increase of approximately two points since FY 2017 (90.2%). Providers showed evidence for all rights restrictions that a fading plan was in place, and only one record reviewed (PRR) gave an indication of co-mingled funds between the individual and the provider  the provider was given technical assistance on how to rectify. Almost every individual interviewed felt they had privacy (99.2%), did not have rights restrictions (99.9%), were treated with respect (99.9%), and almost all staff were aware of individual's rights and preferences for exercising those rights (99.3%). \nFindings in Table 28 show the lowest scoring indicators for the Rights FOA. Many Support Coordinators were not aware of restrictive interventions that were in place and many were not addressing rights restrictions. Support Coordinator documentation did not always show evidence of a signed informed consent form for psychotropic medications or of a signature on the notification of \n \n117 | P a g e \n \n Quality Management Annual Report FY 2018 \nrights and responsibilities. Documentation often did not show evidence the complaints/grievance policy was shared in a manner accommodating the individual's learning style. \n \nTable 28. Low Scoring Rights Indicators (PCR) \n \nFY 2016 FY 2017 \n \nSupport Coordinator Interview Support Coordinators were: \n \nAware of restrictive interventions/plans in place \n \n87.8% \n \n47.8% \n \n(n = 181) (n = 92) \n \nAddressing identified rights restrictions \n \n90.6% \n \n62.0% \n \n(n = 223) (n = 129) \n \nSupport Coordinator Record Review Support Coordinator documentation demonstrated evidence of: \n \nThe individual or legal guardian's signature on notification the individual has been informed about rights and responsibilities, at least annually \n \n58.4% \n \n22.7% \n \n(n = 473) (n = 476) \n \nInformed consent for taking psychotropic medications prescribed by a psychiatrist or psychiatric nurse practitioner \n \n49.1% \n \n35.5% \n \n(n = 281) (n = 251) \n \nThe complaints/grievance policy is shared in a manner accommodating the individual's learning style \n \n61.5% (n = 478) \n \n34.7% (n = 479) \n \nHIPAA Privacy and Security Rules are specifically reviewed with individuals \n \n72.1% \n \n55.2% \n \n(n = 480) (n = 478) \n \nConsent provided by the individual or legal guardian for the implementation of the Positive Behavior Support Plan \n \n79.1% (n = 86) \n \n55.2% (n = 49) \n \nProvider Record Review Provider records demonstrated evidence of: \n \nOngoing education about rights \n \n53.8% \n \n32.6% \n \n(n = 548) (n = 891) \n \nFor Provider-Owned or Controlled Residential Settings, there was documentation demonstrating the individual has a lease or other legally enforceable agreement to protect from eviction \n \n64.6% \n \n44.3% \n \n(n = 113) (n = 201) \n \nFY 2018 \n56.9% (n = 130) 68.9% (n = 180) \n23.6% (n = 461) \n41.1% (n = 248) \n42.0% (n = 483) 66.9% (n = 484) \n66.9% (n = 61) \n43.0% (n = 849) \n49.8% (n = 849) \n \np value FY 17/18 p = .18 p = .21 \np = .72 p = .19 p = .02 p = 0 p = .56 \np = 0 \np = .25 \n \n118 | P a g e \n \n Quality Management Annual Report FY 2018 \nIndividual Service Plan Quality Assurance Checklist (ISP QA) \nDuring the PCR, assessors reviewed the content of the Individual Service Plan (ISP) to assess adherence to requirements and the extent to which the plan specifically addressed specific needs, goals, and desires. \nISP Written to Support a Meaningful Life \nThe ISP QA checklist provided an overall rating for each service plan, based upon the degree to which the ISP was written to provide a meaningful life for the individual receiving services. Each ISP was identified as providing one of the following life-styles: \nService Life: The ISP supports a life with basic paid services and paid supports. Needs \"important for\" the individual were addressed, such as health and safety. However, there is not an organized effort to provide support in obtaining other expressed desires \"important to\" the individual, such as getting a driver's license, having a home, or acting in a play. The individual receiving services was not connected to the community and has not developed social roles but expresses a desire to do so. \nGood but Paid Life: The ISP supports a life with connections to various supports and services (paid and non-paid). Expressed goals \"important to\" the person are present, indicating goals and desires were obtained beyond basic health and safety needs. Individuals may go out into the community but with only limited integration into community activities. For example, someone may go to church but not have the opportunity to participate in Sunday school or sing in the choir. Community connections are lacking, and there is an indication of a desire to achieve more. \nCommunity Life: The ISP supports a life with the desired level of integration in the community and in various preferred settings. Friends and support beyond providers and family members is demonstrated, as is the development of meaningful social roles, such as belonging to a Red Hat club or a book club or having employment in a competitive rather than segregated environment. Rather than just going to church, the Individual receiving services may be an usher at the church or sing in the choir. Relationships developed in the community are reciprocal. The ISP is written with goals that help support moving toward a Community Life, as she or he chooses. \n119 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nOverall findings are presented in Figure 47. A majority of ISPs reviewed each year was written to support a \"Good but Paid Life.\" Findings in FY 2018 may suggest a shift from Good but Paid Life category (decreasing) to both Community Life and Service Life (increasing). However, these differences were quite small and the most prominent results is the continued majority of ISPs rated as a Good but Paid Life. \n \n100% 75% \n \nFigure 47. PCR ISP QA Life Indicator by Year \n73.8% 70.8% 66.7% \n \n50% \n \n25% \n \n9.0% \n \n14.9% 11.9% \n \n16.6%17.3%18.4% \n \n0% Service Life \n \nA Good but Paid Life \n \nCommunity Life \n \nFY 2016 (N = 484) FY 2017 (N = 481) FY 2018 (N = 484) \nFindings by region and year are presented in Table 29. While N sizes are relatively small, each region had over 50 ISPs reviewed. Findings for Region 2 appear to indicate a downward trend in the percent of ISPs written to support a Service Life or Good but Paid Life, and a concurrent upward trend in Community Life. In Region 6, the percent of ISPs supporting a Good but Paid Life has decreased over the three-year timeframe and the percent supporting a Community Life has increased. Each year, Region 1 has shown the highest, or close to the highest, percent of ISPs supporting a Community Life, compared to all other regions. \n \n120 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nRegion 1 2 3 4 5 6 \n \nTable 29. ISP QA Life Indicator by Region and Year \n \nService Life \n \nGood but Paid Life \n \nCommunity Life \n \nFY 2016 FY 2017 FY 2018 FY 2016 FY 2017 FY 2018 FY 2016 FY 2017 FY 2018 \n \n5.3% 6.3% 15.7% 58.5% 55.8% 58.4% 36.2% 37.9% 25.8% \n \n12.6% 10.0% 5.1% 82.8% 75.6% 67.9% 4.6% 14.4% 26.9% \n \n9.1% 8.2% 17.1% 73.6% 74.6% 67.9% 17.4% 17.2% 15.0% \n \n5.6% 9.8% 15.9% 83.3% 86.3% 77.8% 11.1% 3.9% 6.3% \n \n5.3% 20.6% 8.9% 78.9% 68.3% 75.0% 15.8% 11.1% 16.1% \n \n19.1% 23.7% 25.9% 72.1% 69.5% 55.2% 8.8% 6.8% 19.0% \n \nISP Expectations \nIn December 2017, DBHDD completed modifications to the checklist template and expectations, and the new/revised form went into effect in January 2018. Therefore, 397 ISP QA Checklists were completed using the original form and 78 were completed using the new ISP form. All the expectations and indicators have been modified, with the exception of the Overall Rating of how the ISP is written, shown for all 484 checklists in Figure 47 and Table 29 above. For the remaining results, data are presented for the original and new ISPs separately. \nResults Using Original ISP QA Checklist \nQuality assessors reviewed 12 different indicators to measure the quality of the ISP. Each indicator is listed in Table 30 and each has four requirements that must be met by the content in the ISP. The indicator is rated 0 - 4; \"0\" meaning the content of the ISP did not meet the requirements and \"4\" meaning all four of the requirements were addressed in the ISP. \nData in Table 30 shows the percent of ISPs at each rating on the 12 different indicators. For example, 46.0 percent of ISPs reviewed in FY 2018 had all four requirements present for the indicator regarding the communication chart. Data indicates: \n Fifty-seven point one percent of all ISPs reviewed addressed all elements in each section.  Expectations regarding the rights, psychotropic medications, or behavior supports and the \nhealth and safety sections were most likely to have all four expectations met. \n \n121 | P a g e \n \n Quality Management Annual Report FY 2018 \n The ISP indicators measuring how well the individual's hopes and dreams were addressed and whether goals were person-centered were most likely to have a rating of \"0\" or \"1\", 21.1 percent and 18.0 percent, respectively. \n \nTable 30. ISP QA Checklist Ratings by Expectation (FY 2018) N = 397 \n \nRatings \n \nISP QA Checklist Description \n \n0 \n \n1 \n \n2 \n \n3 \n \n4 Average \n \nRights, Psychotropic Medications, Behavior Supports Section \n \n0.6% 0.0% 0.0% 3.1% 96.3% 3.9 \n \nHealth and Safety Review Section completed accurately and thoroughly \n \n0.6% \n \n0.0% \n \n0.6% \n \n8.7% 90.1% \n \n3.9 \n \nPerson-centered Important to/For \n \n0.6% 0.0% 1.9% 28.6% 68.9% 3.7 \n \nSIS completed and support needs are addressed in the ISP \n \n0.6% 0.0% 3.1% 28.6% 67.7% \n \n3.6 \n \nCommunication Chart \n \n0.6% 0.0% 6.2% 47.2% 46.0% 3.4 \n \nService Summary \n \n2.5% 2.5% 14.3% 28.0% 52.8% 3.4 \n \nRelationship Map \u0026 discussion on ways to develop relationships \n \n0.6% 4.3% 14.3% 33.5% 47.2% 3.3 \n \nMeeting Minutes \n \n1.9% 7.5% 16.8% 21.1% 52.8% 3.3 \n \nTraining Goal Action Plan \n \n0.6% 3.7% 11.2% 54.7% 29.8% 3.2 \n \nHopes and Dreams \n \n13.0% 8.1% 9.3% 16.1% 53.4% 3.1 \n \nAction Plans/Objectives \n \n0.6% 2.5% 20.5% 39.1% 37.3% 3.1 \n \nGoals are Person-centered \n \n4.3% 13.7% 12.4% 28.6% 41.0% 3.0 \n \nTotal \n \n2.2% 3.6% 9.3% 28.3% 57.1% 3.4 \n \nTable 31 shows the average rating (0 - 4) by expectation for FY 2016 through FY 2018. The average rating for all three years was 3.4 and showed very little change by year for each expectation. Expectations measuring how well the individual's hopes and dreams were addressed and whether goals were person-centered were the lowest rated expectations all three years. \n \n122 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 31. Average Rating by Expectation (0 - 4) \n \nISP QA Checklist \n \nFY 2016 FY 2017 (N = 484) (N = 481) \n \nRights, Psychotropic Medications, Behavior Supports Section \n \n3.9 \n \n3.9 \n \nHealth and Safety Review Section completed accurately \n \nand thoroughly (HRST information should be noted \n \n3.9 \n \n3.9 \n \nthroughout ISP) \n \nPerson-centered Important To/For \n \n3.7 \n \n3.6 \n \nSupports Intensity Scale (SIS) completed and support needs are addressed in the ISP \n \n3.6 \n \n3.6 \n \nCommunication Chart Service Summary Relationship Map \u0026 discussion on ways to develop relationships \n \n3.5 \n \n3.4 \n \n3.2 \n \n3.3 \n \n3.4 \n \n3.3 \n \nMeeting Minutes \n \n3.2 \n \n3.2 \n \nTraining Goal Action Plan \n \n3.2 \n \n3.2 \n \nHopes and Dreams \n \n3.0 \n \n3.0 \n \nAction Plans/Objectives Goals are Person-centered \n \n3.2 \n \n3.1 \n \n3.0 \n \n2.9 \n \nOverall Average \n \n3.4 \n \n3.4 \n \nFY 2018 (N = 397) \n3.9 \n3.9 \n3.7 3.6 3.4 3.4 3.3 \n3.3 3.2 3.1 3.1 3.0 3.4 \n \nResults Using New ISP QA Checklist \nThe \"New\" ISP QA Checklist (effective date 1/1/2018) contains seven Expectations that use a Likert scale to score the result. Scoring options are Needs Improvement, Emerging, Achieving and Exceeding. Likert scale ratings are presented for the state in Figure 48. The Expectations \"Summary reflects the individual's community life\" and \"[Relationship] Map demonstrates the individual is connected to the community\" showed the greatest need for improvement, 20.5 percent and 10.3 percent, respectively. Consistent with ratings from the original ISP QA, over 75 percent of all new ISP QA's were rated either \"Achieving\" or \"Exceeding\" on the following expectations: \n Goals and objectives were person centered (75.6%)  Health and Safety section included how all current issues, needs and/or risks were addressed \n(97.4%) \n \n123 | P a g e \n \n Quality Management Annual Report FY 2018 \n Profile was detailed and included person centered information (79.5%)  Communication Chart provided meaningful information staff can use to best support the \nindividual (76.6%) \n \nFigure 48. \"New\" ISP QA Checklist Ratings \n \nMap demonstrates the person is connected to the 10.3% community. \n \n34.6% \n \n16.7% \n \nCommunication Chart provides meaningful information staff can use to best support the person. \n \n2.6% 20.8% \n \n41.6% \n \nProfile is detailed and includes person centered information. \n \n2.6% \n \n17.9% \n \n28.2% \n \nSummary describes changes in the person's life in the last 6.5% \n \nyear and the supports/plans needed to address the \n \n22.1% \n \nchange with the person. \n \n36.4% \n \n38.5% 35.1% 51.3% 35.1% \n \nSummary reflects the person's community life. 20.5% \n \n39.7% \n \n29.5% 10.3% \n \nHealth and Safety section includes how all current issues, 1.3%1.3% needs and/or risks are addressed. \n \n47.4% \n \n6.4% \n \nGoals and objectives are person centered. \n \n17.9% 21.8% \n \n50.0% 53.8% \n \nNeeds Improvement \n \n0% Emerging \n \n25% \n \n50% \n \n75% \n \nAchieving \n \nExceeding \n \n100% \n \nPCR Results by Service \nDuring the PCR, Quality Management conducted a record review with every provider who provided services to the individual and whose services are eligible for review by the Collaborative. Information in Table 32 shows results for record reviews completed during the PCRs, by service and year. The N is the number of records reviewed for each service and the percent met is based on the total number of indicators reviewed. When reviewing the findings, it is important to note several services had \n \n124 | P a g e \n \n Quality Management Annual Report FY 2018 \neight or fewer records reviewed. Among services with 30 or more records, Supported Employment reflected the highest score in FY 2018, an increase from last year and close to the FY 2016 rate. \n \nTable 32. PCR PRR Results by Service and Year \n \nFY 2016 \n \nFY 2017 \n \nService \n \nN \n \n% Met \n \nN % Met \n \nProvider Record Review \n \n558 79.0% 894 70.2% \n \nBehavioral Supports \n \n- \n \n- \n \n2 91.7% \n \nCommunity Access (Group) \n \n233 77.1% 378 67.9% \n \nCommunity Access (Individual) \n \n60 79.7% 95 72.9% \n \nCommunity Living \n \n47 83.8% 72 66.8% \n \nCommunity Residential \n \n91 80.4% 195 73.4% \n \nNursing Services \n \n- \n \n- \n \n1 87.8% \n \nOccupational Therapy \n \n- \n \n- \n \n- \n \n- \n \nPhysical Therapy Prevocational \n \n- \n \n- \n \n- \n \n- \n \n70 75.9% 74 64.9% \n \nRespite \n \n1 87.5% 6 61.8% \n \nSpeech/Language Therapy Supported Employment \n \n- \n \n- \n \n- \n \n- \n \n54 82.3% 71 77.8% \n \nTransportation \n \n2 \n \n84.7% \n \n- \n \n- \n \nSupport Coordination Record Review 484 79.9% 481 73.6% \n \nFY 2018 \n \nN % Met \n \n856 74.8% \n \n8 83.9% \n \n361 74.6% \n \n92 74.4% \n \n104 70.1% \n \n195 77.1% \n \n2 68.8% \n \n1 83.3% \n \n1 82.8% \n \n51 71.6% \n \n8 67.6% \n \n1 82.8% \n \n32 81.0% \n \n- \n \n- \n \n484 77.5% \n \nTable 33 displays scores by FOA for the services reviewed during the PCR for FY 2018. Among providers with more than 30 records, results indicated the following: \n Providers of Community Access, Prevocational, and Supported Employment showed the lowest scores in Whole Health. \n Support Coordinators reflected the lowest Rights score and providers of Community Living Supports reflected the lowest Community Life score. \n The average statewide scores for FOAs ranged from a high of 84.4 percent for Rights to a low of 67.0 percent for Community Life. Within the reviewed services, the greatest variance was for Support Coordination, from a high of 97.2 percent for Safety to a low of 57.3 percent for Rights. \n \n125 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 33. FY 2018 PCR Record Review Results by FOA and Service \n \nService \n \nWhole \n \nComm \n \nHealth Safety PCP \n \nLife Choice Rights \n \nBehavioral Supports (N = 8) \n \n81.2% 93.3% 87.7% 41.7% 79.4% 86.8% \n \nCommunity Access (Group) (N = 361) \n \n69.3% 81.0% 79.3% 70.1% 60.7% 86.2% \n \nCommunity Access (Individual) (N = 92) \n \n70.5% 80.9% 78.6% 69.9% 59.3% 85.8% \n \nCommunity Living Support (N = 104) \n \n71.4% 75.8% 73.5% 56.0% 54.0% 84.4% \n \nCommunity Residential Alternative (N = 195) 87.6% 80.9% 75.0% 62.9% 55.2% 82.3% \n \nPrevocational (N = 51) \n \n69.1% 81.4% 76.5% 62.3% 50.0% 86.2% \n \nRespite (N = 8) \n \n81.3% 70.8% 64.2% 49.4% 53.4% 79.3% \n \nSupported Employment (N = 32) \n \n61.9% 80.8% 88.0% 93.9% 78.7% 84.2% \n \nRegistered Nurse (N = 2) \n \n77.8% 71.4% 50.0% N/A 41.7% 83.3% \n \nSupport Coordination (N = 484) \n \n95.5% 97.2% 80.3% 67.6% 70.6% 57.3% \n \nState PRR Average (N = 856) \n \n75.7% 80.5% 77.6% 67.0% 58.5% 84.4% \n \nAverage \n83.9% 74.6% 74.4% 70.1% 77.1% 71.6% 67.6% 81.0% 68.8% 77.4% 74.8% \n \nPCR Strengths and Recommendations \nDuring each PCR, assessors identified strengths about services offered and provided recommendations to help improve services and overall quality of life for individuals receiving services. Table 34 and Table 35 display strengths and recommendations identified in PCRs completed in FY 2018. Staff strengths most often cited included an awareness of unique safety needs, achievements, and communication of everyday decisions. Staff identified ways to help the individual experience new things in the community, explored other potential dreams and promoted independence. \n \nTable 34. Top Strengths Identified During a PCR \n \nStrength \n \nInstances \n \nStaff is aware of the unique safety needs of the individual. \n \n374 \n \nStaff has a clear understanding how the individual communicates \n \nchoice making in everyday decisions. \n \n335 \n \nIndividual feels valued. \n \n316 \n \nStaff acknowledges the individual's achievements. \n \n311 \n \nPercent of PCRs 77.8% \n69.6% 65.7% 64.7% \n \n126 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 34. Top Strengths Identified During a PCR \n \nStrength \n \nInstances \n \nStaff is knowledgeable of and provided examples of how \n \npreferences for exercising rights are actively being supported. \n \n261 \n \nStaff consistently promotes independence. \n \n243 \n \nStaff demonstrates the use of person centered values and \n \napproaches in everyday interactions with the individual. \n \n225 \n \nPercent of PCRs \n54.3% 50.5% \n46.8% \n \nTable 35. Top Recommendations Identified During a PCR \n \nRecommendation \n \nInstances \n \nConduct \"what if\" scenarios to determine the individual's skills \n \nin various safety situations. \n \n254 \n \nIdentify ways to expose the individual receiving services to new \n \nexperiences in his/her community. \n \n201 \n \nEnsure daily schedules and activities promote exposure to new \n \nthings ('new places and new faces') and are not stagnant in \n \nnature. \n \n191 \n \nSupport exploration of other potential dreams. This can be \n \ndone by using the 3 Es; Education, Exposure, and Experience. \n \n185 \n \nPercent of PCRs 52.8% 41.8% \n39.7% 38.5% \n \nQuality Enhancement Provider Review (QEPR) \nQEPR Scores by Size \nThe QEPR Overall score is based on findings from the Individual Interviews, Staff Interviews, Observations and Provider Record Reviews (PRR). The Administrative Qualifications and Training (Q\u0026T), based on a sample of staff rendering services, and the Developmental Disability Service Specific (DDSS) reviews that monitor compliance specific to each service, are reviewed and scores are supplied to the provider, but are not included in the provider's Overall score. During FY 2018, one crisis provider received a QEPR. Because crisis services are very different than typical services, the results for crisis providers are presented separately. \n \n127 | P a g e \n \n Quality Management Annual Report FY 2018 \nIn FY 2018, 99 providers who were not reviewed in FY 2016 or FY 2017 were selected for a QEPR: 72 Small, 12 Medium, and 15 Large. One provider offered crisis services and the provider of this service was reviewed separately for the service, bringing the total to 100 QEPRs completed. Comparisons by year are not an appropriate measure as the FY 2017 and FY 2018 samples were not selected to be representative of the state  the proportion of Small providers increased each year. See Figure 49 for distribution of QEPRs by provider size for FY 2016, FY 2017 and FY 2018. \nFigure 49. FY 2016 - FY 2018 QEPR Samples by Provider Size and Type \n \nFY 2016 \n \n47% \n \n36% \n \n16% 1% \n \nFY 2017 \n \n52% \n \n19% \n \n25% \n \n4% \n \nFY 2018 \n \n72% \n \n12% \n \n15% 1% \n \n0% \n \n20% \n \n40% \n \n60% \n \n80% \n \nSmall (caseload  30) Large (caseload  100) \n \nMedium (30 \u003c caseload \u003e 100) Crisis Provider \n \n100% \n \nOverall QEPR Score by Tool and Year \nFigure 50 shows the average Overall scores for providers reviewed in FY 2016, FY 2017 and FY 2018 and scores for each tool used in the review process. The Overall score for FY 2018 was 84.8 percent, with the Observation and Staff Interviews showing the highest scores of just over 95 percent. As in FY 2016 and FY 2017, the PRR reflected the lowest-scoring area (72.4%). \n \n128 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 50. Overall QEPR Scores by Tool and Year \nIndividual Interview FY16: 95.1% FY17: 90.7% FY18: 89.6% \n \nProvider Record Review \nFY16: 73.6% FY17: 69.6% FY18: 72.4% \n \nOverall Score FY16: 88.4% \n(N = 99) FY17: 83.7% \n(N = 96) FY18: 84.8% \n(N = 99) \n \nStaff Interview \nFY16: 96.9% FY17: 95.2% FY18: 95.3% \n \nObservation \nFY16: 98.2% FY17: 95.7% FY18: 96.0% \n \nOverall Crisis Provider Scores \nSix crisis providers have received a QEPR, one in FY 2016, four in FY 2017, and one in FY 2018. Figure 51 shows the Overall scores for these providers and scores by tool and year. Compared to other providers reviewed with the QEPR, crisis providers show relatively higher scores across all tools, but especially higher scores for the PRR (91.1% versus 72.4%). \n129 | P a g e \n \n FY16: N = 1 FY17: N = 4 FY18: N = 1 \n \nQuality Management Annual Report FY 2018 \nFigure 51. Overall Crisis Provider Scores by Tool and Year \nIndividual Interview FY16: 83.4% FY17: 86.0% FY18: 90.2% \n \nProvider Record Review \nFY16: 88.0% FY17: 84.1% FY18: 91.1% \n \nOverall Score \nFY16: 85.1% FY17: 86.8% FY18: 93.1% \n \nStaff Interview \nFY16: 81.3% FY17: 89.1% FY18: 97.8% \n \nObservation \nFY16: 87.7% FY17: 92.5% FY18: 97.9% \n \nQualifications and Training (Q \u0026 T) and Service Specific (DDSS) \nEvery QEPR included a review of staff qualifications and training. In FY 2018, a sample of 780 staff records was reviewed. The sample was stratified by service to ensure all services offered by each provider were included in the review process. The primary purpose of the Q\u0026T record review was to confirm relevant staff information was accurate and up to date (e.g., driver's license, performance evaluations, background screening) and staff had received all required trainings specific to services provided. DDSS reviews are completed to ensure services are provided as specified by DBHDD (Figure 52). \n130 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 52. QEPR Qualifications and Training and DDSS Results by Year \n \nQEPR Providers \n \nCrisis Providers \n \nQ \u0026 T \n FY 2016: 80.6% (N = 1,003)  FY 2017: 82.6% (N = 947)  FY 2018: 84.6% (N = 780) \nDDSS \n FY 2016: 99.9% (N = 1,389)  FY 2017: 99.8% (N = 1,667)  FY 2018: 99.8% (N = 1,101) \n \nQ \u0026 T \n FY 2016: 92.8% (N = 11)  FY 2016: 91.8% (N = 51)  FY 2018: 94.5% (N = 5) \nDDSS \n FY 2016: 100% (N = 8)  FY 2017: 100% (N = 48)  FY 2018: 100% (N = 6) \n \nQualifications \u0026 Training Opportunities for Improvement \nProvider staff, particularly for crisis providers, appeared to do relatively well maintaining compliance with required training and qualifications with overall Q\u0026T scores of 84.6 percent and 94.5 percent, respectively. Most staff (97.4%) followed DBHDD's policy 04-104 for Criminal Records Checks and were properly licensed (96.5%). One hundred percent of crisis providers ensured crisis response system staff (mobile team members and intensive support staff) had participated in training and passed an examination demonstrating competence in all crisis protocols and requirements. For providers reviewed this year, several areas present opportunities for improvement in staff training, including the following: \n Within the first 60 days of hire, many staff did not have training on: o Use of the Georgia Crisis Response System (42.1% met) o How to work with individuals with co-occurring diagnoses (47.4% met) o Suicide prevention skills (65.3% met) \n Required training for Developmental Disability Professionals did not always include: \n \n131 | P a g e \n \n Quality Management Annual Report FY 2018 \no Supports Intensity Scale overview (52.0% met) o Individual service planning training (58.4% met)  The following required topics were not always included in the annual training: o Specific individual medications and their side effects (64.8% met) o Emergency and disaster plan procedures (68.3% met) The Q\u0026T review component uses a sample of employees from each provider to determine compliance with standards. The number of employee records sampled per provider varies based upon the total number of staff and services rendered. For this report, the results are presented by indicator and by provider for all employee records reviewed. Therefore, while an indicator for most employee records may support a \"met\", fewer providers may have all employees in compliance with the standard. For example:  The aggregate score for staff records shows 77.5 percent had training on person-centered values, principles and approaches. o Only 66.3 percent of QEPR providers had this training documented for the employee \nrecords selected.  Approximately 97 percent of staff records had documented evidence of all background \nscreening requirements. o However, only about 87 percent of QEPR providers demonstrated all staff records reviewed have all required background screening requirements in place. \n Accurate and up-to-date annual work performance evaluations were present for 72.9 percent of employee records reviewed. o Only 49.5 percent of providers showed all staff records reviewed had met this requirement. \n Most staff (86.2%) were current on annual tuberculosis testing. \n132 | P a g e \n \n Quality Management Annual Report FY 2018 \no Only 61.2 percent of providers had documentation that all staff records reviewed contained current tuberculosis testing. \nQEPR Scores by Provider Size13 \nFigure 53 displays the distribution of the QEPR Overall and Q\u0026T scores by size of the organization and year.14 In FY 2018, Overall scores ranged from 82.0 percent for Medium providers to 86.3 percent for Small providers. Qualifications and Training scores ranged from a low of 81.1 percent for Medium providers to 85.8 percent for large providers. On average, Medium providers scored lower than Small or Large providers on both the Overall score and Q\u0026T. All providers, regardless of size, scored above 98 percent on the DDSS tool (not shown in the figure). \n13 Crisis provider scores were excluded. There was one Small crisis provider reviewed in FY 2018, one large, one medium and two Small crisis providers reviewed in FY 2017 and one Small crisis provider reviewed in FY 2016. 14 See Figure 49 for details regarding provider size categories. \n133 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFigure 53. Overall and Qualifications and Training Score by Provider Size and Review Year \n \nOverall Score \n \nQualifications and Training \n \n100% \n \n100% \n \n90% \n \n90.4% \n \n89.0% \n \n86.3% \n \n86.1% 84.4% \n \n82.0% \n \n86.1% \n \n80% \n \n83.2% \n \n82.7% \n \n70% \n \n90% 85.3% \n \n85.8% \n \n80.0% 80% \n \n82.2% 81.1% 86.4% 79.8% \n \n81.5% \n \n70% \n \n76.0% \n \n60% \n \n60% \n \n50% Small \n \nMedium \n \nLarge \n \n50% Small \n \nMedium \n \nFY 2016 (N = 99) FY 2017 (N = 96) FY 2018 (N = 99) \n \nLarge \n \nQEPR Scores by Focused Outcome Area (FOA) \nThe Overall score for each review was divided into six FOAs. Each FOA score was calculated with the combined results from the Individual Interview, Staff Interview, Observation (IOSA), and Provider Record Review.15 Results for QEPRs (except for crisis providers) are shown by FOA and year in Figure 54. With the exception of Person Centered Practices, scores within each FOA increased slightly from FY 2018. The lowest scoring areas each year were Community Life, Choice and Person Centered Practices, while measures surrounding Safety and Rights remained relatively high  showing scores over 90 percent each year. \n \n15 FOAs calculated for the PCR also included the Support Coordinator Interview and SCRR, which are not part of the QEPR process. Comparisons between the PCR and QPER are not appropriate. \n134 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 54. Overall QEPR Scores by FOA and Year \n \nRights FY16: 92.4% FY17: 91.2% FY18: 92.4% \nChoice FY16: 86.2% FY17: 75.9% FY18: 78.2% \n \nWhole Health FY16: 89.1% FY17: 83.2% FY18: 84.9% \nOverall Score FY16: 88.4% FY17: 83.7% FY18: 84.8% \nCommunity FY16: 78.0% FY17: 70.4% FY18: 72.0% \n \nSafety FY16: 93.9% FY17: 90.9% FY18: 91.0% \nPerson Centered Practices FY16: 82.6% FY17: 81.6% FY18: 80.4% \n \nQEPR Scores by Tool and FOA \nIn this section, results by FOA for the QEPR are presented for each tool used in calculating the Overall score for the QEPR process  Individual Interview, Observation, Staff Interview and PRR (see Figure 55).16 Findings for the providers reviewed this year indicate the following: \n Provider documentation (PRR) was the lowest scoring tool across all the FOAs, ranging from a low of 52.5 percent for measures surrounding Choice to 83.8 percent for Rights. \n \n16 See Table 21 for the number of interviews and records completed for each QEPR component. \n \n135 | P a g e \n \n Quality Management Annual Report FY 2018 \n \n Observations and Staff Interviews reflected relatively high Community Life scores, 93.1 percent and 85.6 percent respectively; however, the Individual Interview results in this FOA was somewhat lower (79.0%) and providers did not document this as well (64.3%). \n Community Life and Person Centered Practices were the lowest scoring FOAs (79.0% and 91.5% respectively) from the Individual Interviews. \n Observation scores were approximately 93 percent or higher in all the FOAs with the exception of Person Centered Practices (90.4%). \n \nFigure 55. FY 2018 QEPR Scores by Tool and Focus Outcome Area \n \nWhole Health Safety \nPerson Centered Practices Community Life Choice Rights \n \n86.1% 95.4% 94.9% \n75.2% \n \n78.7% \n \n91.7% 96.9% 98.8% \n \n81.5% 93.9% \n90.4% 74.3% \n \n64.3% \n \n79.0% 85.6% 93.1% \n \n52.5% \n \n94.8% 93.6% \n96.6% \n \n96.4% 97.5% 96.3% 83.8% \n \n0% \n \n25% \n \nIndividual Interview (N = 436) \n \nObservations (N = 285) \n \n50% \n \n75% \n \nStaff Interview (N = 316) \n \nRecord Review (N = 955) \n \n100% \n \n136 | P a g e \n \n Quality Management Annual Report FY 2018 \nQEPR Scores by FOA and Provider Size \nProvider scores by size and FOA are presented in Figure 56. Compared to Small and Large providers, Medium providers scored relatively lower across all FOAs. With the exception of Whole Health and Person Centered Practices, there was little variation across FOAs for Small and Large providers in FY 2018. Small providers performed relatively better on Whole Health and Large providers performed relatively better on Person Centered Practices. \nFigure 56. FY 2018 QEPR Provider Scores by FOA and Size \n \nWhole Health Safety \nPerson Centered Practices Community Life Choice Rights 50% \n \n88.7% 81.4% 82.0% \n \n91.5% 90.2% 90.8% \n \n78.8% 78.2% \n82.9% \n \n72.5% 69.7% \n72.8% \n \n80.9% 72.7% \n78.1% \n \n92.5% 90.5% \n93.3% \n \n60% \n \n70% \n \n80% \n \n90% \n \n100% \n \nSmall (N = 72) \n \nMedium (N = 12) \n \nLarge (N = 15) \n \n137 | P a g e \n \n Quality Management Annual Report FY 2018 \nQuality and Technical Assistance Consultation (QTAC) \nA total of 161 QTACs were completed, of which 136 were at the provider level and 25 at the individual level. A provider level QTAC focuses on the overall provider practices and processes versus an individual level QTAC that only focuses on services and supports for that one person. Due to the ability to check more than one referral type, totals do not sum to 161. Table 36 shows the number of QTACs by referral source and type for FY 2018. Most provider referrals were internally initiated but the provider may have also requested additional technical assistance once on site. Most QTACs were completed at the provider level (84.5%) and most had an internal referral source (85.3%). The majority of QTACs (64.6%) were generated from the QEPR. \n \nTable 36. QTACs by Referral Source and Type \n \nProvider (N = 136) Individual (N = 25) \n \nReferral Source \n \nNumber Percent Number Percent \n \nInternal \n \n116 \n \n85.3% \n \n25 \n \n100.0% \n \nProvider \n \n20 \n \n14.7% \n \n0 \n \n0.0% \n \nReferral Type: \n \nQuality of Care (PCR/QEPR) CAP Review Provider Request QEPR Follow Up \n \n9 \n \n6.5% \n \n24 \n \n96% \n \n- \n \n- \n \n1 \n \n4% \n \n26 \n \n18.8% \n \n- \n \n- \n \n103 74.6% \n \n- \n \n- \n \nTable 37 provides a list of detailed reasons for the QTACs completed in FY 2018. The most frequently cited reasons were related to the QEPR and alert follow-ups. More than one reason can be identified for a QTAC. \n \nTable 37. QTAC Referral Reasons: FY 2018 Reason \nQEPR follow up Quality of care follow up Review of person-centered documentation Request for 2 day Person-Centered Thinking Training Person-centered training needed Lack of person-centered documentation Other \n \nN % \n103 62.7% 33 20.5% 13 8.1% 5 3.1% 3 1.9% 2 1.2% 2 1.2% \n \n138 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 37. QTAC Referral Reasons: FY 2018 Reason \nAssistance with criminal background checks Social inclusion training needed \nTotal \n \nN % \n1 0.6% 1 0.6% \n161 100.0% \n \nTechnical assistance is provided at every QTAC. Approximately 45 to 48 percent of provider level QTACs offered brainstorming, group or individual discussion, and resources (Figure 57) to help address the areas needing improvement. The most common type of technical assistance offered at the individual level was individual discussion (80.0%) with staff. \n \nFigure 57. FY 2018 QTAC Technical Assistance Provided \n \n1:1Training \n \n6.6% 8.0% \n \nBrainstorming \n \n24.0% \n \nGroup Discussion Group Training \n \n16.0% 18.4% \n0.0% \n \nIndividual Discussion \n \nResources 8.0% \n \nRole Play \n \n10.3% 0.0% \n \n0% \n \n20% \n \nProvider (N = 136) \n \n47.1% 47.8% \n44.9% 47.8% \n \n80.0% \n \n40% \n \n60% \n \n80% \n \nIndividual (N = 25) \n \n100% \n \nIntellectual and Developmental Disability Summary of Findings and Recommendations \n \n139 | P a g e \n \n Quality Management Annual Report FY 2018 \nJune 2018 marked the completion of the third year of the Collaborative contract. Quality assessors completed 100 QEPRs, 484 PCRs, and 161 QTACs. Beginning in FY 2017, the Collaborative implemented tool revisions to evaluate the quality of intellectual and developmental disability services and supports in both the QEPR and PCR. The six FOAs identified throughout this report and indicators were used to address a wide range of requirements and promising practices within each FOA. \n \nFY 2018 IDD Accomplishments \nSeveral opportunities were provided throughout the year for the IDD Quality Management team to work in partnership with DBHDD. This collaboration focused on three key initiatives: training for stakeholders, the new IDD Case Management system and tool revisions for the review processes. \nThroughout the year, the Collaborative's Quality Management team and DBHDD met to discuss and review findings from FY 2017, current fiscal year findings, and other identified areas of need across the provider network. Training sessions on four Focused Outcome Areas were developed and presented throughout the year based on this analysis and discussion. The following is a list of trainings provided for stakeholders in the 2018 fiscal year: \n \nWhole Health Person Centered \n \n Empowering Individuals to Maintain their Healthcare  Techniques on How to Help Educate People on Medications \n Person Centered Documentation: Part 1 \u0026 Part 2  What is Person Centered Language? Part 1 \u0026 Part 2  Staff Matching, Why it Works \n \nCommunity \n \n How to Support Community Inclusion \n \nRights \u0026 Choice \n \n The importance of supporting the concepts of Choice and Rights from the perspective of people receiving services and providers \n \n140 | P a g e \n \n Quality Management Annual Report FY 2018 \nAdditionally, in collaboration with DBHDD, a training plan was developed for the new IDD Case Management System project. The plan was developed for internal (the Collaborative and designated DBHDD staff) and external (individuals applying for services, providers of IDD services, and Support Coordination staff) users and other stakeholders to provide guidance on navigating and using the web-based application. \nThrough workgroups with the DBHDD Quality Improvement Office, IDD program staff, and other stakeholders, implementation of tool revisions occurred over the course of FY 2018 to align with the DBHDD IDD Provider Manual, policies and procedures. The tool revisions included: \n \nTool Updates \nTool Redesign \n \n Individual Interview  Observation  Provider Record Review  Staff Interview \n ISP QA Checklist  Support Coordination Interview \n(to be implemented July 2018) \n \nIDD System Strengths and Recommendations \nMost providers and staff were aware of unique safety needs and achievements of individuals they serve. Most individuals were aware of how to self-preserve (88.6%) and felt safe in their work and living environments (99.6%). Based on observations, staff and providers took advantage of opportunities to provide education on health and rights, and with one exception, all individuals expressed that they were free from abuse, neglect, and exploitation. \nFindings on many of the tools for both the PCR and QEPR showed scores of over 90 percent (see Table 38). Similar to FY 2017, documentation is the lowest scoring area from record reviews during both the PCR and QEPR. \n \n141 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 38. FY 2018 Summary by Tool and Review Type \n \nTool \n \nPCR \n \nQEPR \n \n(N = 484) (N = 99) \n \nCrisis (N = 1) \n \nIOSA - Individual Interview \n \n92.2% \n \n89.6% \n \n90.2% \n \nIOSA - Observation \n \n98.2% \n \n96.0% \n \n97.9% \n \nIOSA - Staff Interview \n \n95.6% \n \n95.3% \n \n97.8% \n \nProvider Record Review \n \n74.8% \n \n72.4% \n \n91.1% \n \nSC Record Review \n \n77.4% \n \nNA \n \nNA \n \nSC Interview \n \n83.5% \n \nNA \n \nNA \n \nAdmin Q\u0026T \n \nNA \n \n84.6% \n \n91.8% \n \nDDSS \n \n99.9% \n \n99.8% \n \n100% \n \nFOA scores from both review types (PCR and QEPR) suggest the service delivery system appears to do well across the six critical areas, showing scores of at least 70 percent each year. Scores from both review types were over 90 percent for Safety and Rights. Providers have effective systems and practices in place to help individuals be safe in their environments and exercise their rights, and individuals have indicated they do feel safe and their rights are upheld. \nWhole Health scores for both the PCR (87.4%) and for providers reviewed this year with a QEPR (84.9%) indicated most health needs are likely identified and addressed. Support Coordinators did very well on the health-related sections of the ISP in accurately completing all components of rights, the psychotropic medications and behavior supports section, and the health and safety review section. \nOpportunities for Improvement \nWhile overall FOA scores remained relatively high, drilling down to the standards/indicators measuring each FOA provides insight into opportunities for developing quality improvement initiatives or training programs. \n \n142 | P a g e \n \n Quality Management Annual Report FY 2018 \nWhole Health \nFindings for Whole Health show, from the perspective of individuals who were interviewed, a consistent decline in several health-related areas. Each year they appear to be less likely to know what medications they are taking, why, and what the side effects are. Because individuals with intellectual and developmental disabilities may depend on color and shape of medication to ensure the correct pill/tablet is taken, having decreased awareness of this each year could be problematic and lead to an increase in medication errors. These trends appear to be supported through the PRR, for which findings indicate most providers did not offer education to individuals receiving services on their prescribed medications (48.3%) or including the risks and side effects (52.1%). \nFurthermore, for the PCR, because about half (N = 236) of the individuals sampled live in a family home and Whole Health was the lowest scoring area, this may be an area that warrants additional analysis to determine how living in a family home impacts an individual's health indicators. \nRecommendation XI: Currently the DBHDD Provider Manual for Community Developmental Disability Providers requires providers to educate individuals on the risks and benefits of antipsychotic medications and all other types of prescribed medication and this must be documented in the clinical record. Training in this area was conducted by the Collaborative in June of 2018. It is recommended this indicator continue to be tracked and evaluated in FY 2019 to determine if additional training would be beneficial. \nRecommendation XII: The Collaborative could complete an ad hoc report to compare the FOA scores at the indicator level across different resident type, specifically for individuals living in their family home. \nSafety \nApproximately 30 percent of individuals interviewed did not know how to respond to an emergency or crisis situation if supports were incapacitated. Over 52 percent of recommendations for the PCR \n143 | P a g e \n \n Quality Management Annual Report FY 2018 \nwere related to helping individuals receiving services learn how to manage safety situations by conducting \"what if\" scenarios. At the same time, 40 percent of staff were not providing education on the use of the Crisis Hotline. In addition, only 42.1 percent of staff records reviewed this year, and 43.4 percent reviewed in FY 2017, documented staff/provider training on the Georgia Crisis Response System. \nInformation from Support Coordinators appears to point to a need for a better understanding of issues surrounding behavioral health, including a better awareness of intervention plans and specific triggers an individual may have. Support Coordinators were not likely to be aware of needed crisis plans (47.4% met) or behavior support plans (57.2% met). Half of the Support Coordinators were not aware of the individualized techniques needed to follow a plan for the individual (53.3%). \nRecommendation XIII: The Collaborative and DBHDD could consider providing specific training to Support Coordination as it relates to behavior supports and how to evaluate this service. \nRecommendation XIV: Almost every provider and Support Coordinator reviewed during safety situations is a fundamental support of the service delivery system. DBHDD could consider conducting a targeted safety campaign that includes resources, as well as web-based and face-to-face training throughout the state. The DBHDD monthly newsletter could include a new tip or resource that providers, individuals, and families can use to promote independence in the area of safety. \nPerson Centered Practices \nA key area of focus for the Centers for Medicare and Medicaid Services (CMS) and DBHDD is to ensure services and supports are provided using person centered practices and planning. To the extent possible, the individual receiving services should be at the center of all decisions, plans, and goals. It is important for providers to understand who each individual is and what he or she may want, hope for, and dream. Unfortunately, many ISPs did not ensure all the goals were person centered (74.5%) or their hopes and dreams were addressed throughout the plan (80.8%). Most individuals receiving services seemed to understand their own talents, strengths and goals (82.6%) \n144 | P a g e \n \n Quality Management Annual Report FY 2018 \nbut most providers did not document those talents/strengths (42.2%) or how they review progress toward achieving goals with each individual (44.0%). \nRecommendation XV: DBHDD and the Collaborative will be providing training on the changes to the ISP being implemented in FY 2019. It is recommended this training include a section on writing person centered goals. \nRecommendation XVI: One of the key components to implementing a person centered approach to planning is to ensure individuals receiving services have the opportunity to review their goals and make changes as deemed necessary. With the implementation of the IDD Case Management System (new electronic record and ISP) it will be important to monitor how well the new system supports person centered planning. In the meantime, DBHDD could consider including a standard in the provider manual that goals should be reviewed with the individual more frequently and documented within the record. \nCommunity Life \nAccording to CMS expectations, individuals with intellectual and developmental disabilities should be able to participate in their communities in the same manner as individuals who do not have a disability. Community Life remains the lowest scoring area in the PCR and for providers reviewed through the QEPR. Information from individual interviews suggests about 25 to 30 percent of individuals receiving services had not been given the opportunity to learn about and develop new social roles in the community, or experience new community activities. In addition, provider and Support Coordinator documentation often does not show support for individuals to be engaged in the community, develop social roles, explore new activities or participate in activities the same as other citizens. The ISP should be written to help ensure integration into the community in various settings, as desired. However, only 18 percent of ISPs reflected full participation in the community. \nRecommendation XVII: An annual training could be offered on developing community connections for individuals receiving services. DBHDD could consider making this annual training mandatory for all providers. Training, and related resources, could include practical application of ideas and \n145 | P a g e \n \n Quality Management Annual Report FY 2018 \nexamples to help staff and Support Coordinators understand different and realistic ways to connect an individual to the community. Recommendation XIII: As part of the training on the revised ISP, DBHDD could consider providing training to include specific approaches to assist Support Coordinators in documentation in the Supports Intensity Scale (SIS) related to the support needs of the individual while in the community and developing ISPs and goals consistent with an integrated life in the community. \nChoice \nInformed choice is the cornerstone of helping anyone understand and achieve meaningful goals and direct supports and services. Information from documentation indicates most provider records lacked evidence informed choice was provided for competitive or supported employment (26.5%), living situations (26.0%), environments (26.3%), community participation/social interactions (47.5%), or educational opportunities (27.8%). Over half of staff interviewed had not provided options for competitive employment; however, provider and Support Coordinator documentation appears to be what is primarily affecting the low Choice FOA score. Recommendation XIX: Quality Management can develop a training to address how to document choices offered and how providers can support individuals to make informed decisions. This training would target support staff and Support Coordinators. \nRights \nThe proper use of restrictive interventions is strictly monitored and enforced by DBHDD. Individuals interviewed during the review processes did not indicate any violations regarding the improper use of any type of restrictive intervention or any unauthorized restrictions in their home, community, work, or day program. They generally feel their rights are upheld. However, records maintained by providers and Support Coordinators, as well as interviews with the Support Coordinators, indicate there may be some issues regarding proper documentation of addressing rights. \n146 | P a g e \n \n Quality Management Annual Report FY 2018 \nSigned consent forms are often missing for individuals prescribed psychotropic medications (26.5%), thus indicating the individual had been informed of rights and responsibilities. Additionally, less than half of the providers documented how they provided rights education (43.0%). Recommendation XX: DBHDD could consider requiring Support Coordinators to provide education to individuals and families on the importance of giving and receiving copies of signed consent forms for psychotropic medications. Through this, Support Coordinators could ensure consent forms are being completed and subsequently document this in their own support notes. \nProvider Documentation \nThroughout the findings in this report, documentation by providers and Support Coordinators has shown consistently lower scores when compared across review tools or within the FOAs. Support Coordinators documented areas of health (95.5%) and safety (97.2%), but did not document rights (57.3%), the only area in which service providers scored higher. Provider documentation reviewed this year was lowest in the area of choice, the same as in FY 2017. \nRecommendation XXI: The Collaborative and DBHDD could consider developing develop a stakeholder workgroup to review the Support Coordination Outcome Review template and determine if there are needed improvements or changes that can better support providers. Recommendation XXII: Quality Management could develop a training to specifically address how Support Coordinators can improve support notes and documentation specific to areas identified throughout the intellectual and developmental disability section of this report. \nAdditional Recommendations Quality Enhancement Provider Reviews (QEPR) \nOne of the Collaborative initiatives for the 2019 fiscal year is to increase the frequency of QEPR data to analyze reviews from a three-year to a two-year cycle. This would provide information that is \n147 | P a g e \n \n Quality Management Annual Report FY 2018 \nmore current for all providers on the quality of services across the state, and increase the ability to examine more timely comparison of provider scores across the IDD Provider Network. \nRecommendation XXIII: The Georgia Collaborative Quality Department will continue to collaborate with DBHDD on revisions to the IDD review tools to more efficiently assess the quality of specific services according to the state guidelines. Collectively, DBHDD and the Collaborative could evaluate and develop the tools based on new policies and the DBHDD IDD Provider Manual. \nThe QEPR report was originally designed to provide detailed data information to the provider regarding services rendered to all individuals receiving services. The report displays findings from the administrative review component for staff qualifications and training and quality of care concerns, including protected health information (PHI). The QEPR reports are not publicly posted at this time. Having the ability to post the QEPR providers reports on the Georgia Collaborative website would allow individuals and family members easy access to information on provider's quality performance. Additionally, providers would be able to see how their peers are conducting business in relation to the review process. In order to post the QEPR reports, they need to be restructured with all PHI removed. \nRecommendation XXIV: The Collaborative proposes to redesign the QEPR final report for public posting of a summary of findings on the Collaborative website, similar to BH. The report will remove all PHI; additionally, at the exit conference the provider will receive a detailed preliminary report to contain overall findings, recommendations and specific Individual Record Review findings. \nQuality Technical Assistance Consultation (QTAC) \nEach provider selected for a QEPR receives a QEPR follow up QTAC, regardless of score or opportunities for improvement. However, some providers need the technical assistance and additional on-site time from assessors more than others. By reducing the number of QEPR follow up QTACs completed each year, the Collaborative could focus technical assistance efforts on providers who most need support. \n148 | P a g e \n \n Quality Management Annual Report FY 2018 \nSection 5: Behavioral Health and Intellectual Developmental Disabilities: Overall Results by Focused Outcome Area \nQuality Management analyzed the Focused Outcome Areas (FOA), as described throughout this report, in all four major review processes: BHQR, CSUQR, QEPR and PCR. The following graphic (Figure 58) displays results for each FOA by these review processes. While the same concepts are measured, it is important to note most direct comparisons across the processes are not appropriate and should be made with caution. The following caveats should be taken into consideration when reviewing the data: \n Sampling processes vary for different review processes.  The total scores for each FOA for BHQR and CSUQR results are based only on record reviews.  For QEPR and PCR, total scores for each FOA are based on indicators from four different tools \nincluding individual/staff interviews, observations and record reviews.  A total of 22 indicators are used for all FOAs measured during the BH and CSU reviews, three \nto four per area. During the QEPR and PCR, a total of 115 indicators are used, 16 to 39 per area.  Behavioral health and intellectual and developmental disability providers are required to record information using different provider manuals, requirements by waiver program, and national standards. \n149 | P a g e \n \n Quality Management Annual Report FY 2018 \nFigure 58. FY 2018 Focused Outcome Areas by Review Type \n \nWhole Health Safety \nPerson Centered Community Choice Rights \n \nBHQR FOA Overall: \n92% \n84% 78% 95% 97% 97% 93% \n \nCSUQR FOA Overall: \n91% \n88% 82% 88% 92% 97% 97% \n \nQEPR FOA Overall: \n72.4% \n75.2% 78.7% 74.3% 64.3% 52.5% 83.8% \n \nPCR FOA Overall: \n74.8% \n75.7% 80.5% 77.6% 67.0% 58.5% 84.4% \n \nIn review of specific indicators for both the behavioral health and intellectual and developmental disability reviews, direct comparisons could not be identified. Although both reviews measure the same areas, behavioral health and intellectual and developmental disability indicators were not designed to be compared. As tools are revised and updated, effort will be made to develop more comparative indicators between BH and IDD in upcoming fiscal years. \n \n150 | P a g e \n \n Section 6: Feedback Survey Results \n \nQuality Management Annual Report FY 2018 \n \nFollowing completion of quality reviews for both behavioral health and intellectual and developmental disabilities, providers are offered the opportunity to complete a feedback survey. For intellectual and developmental disability services, individuals who participate in the interview are also offered the opportunity to provide feedback about the process. Surveys are optional and may be completed by the same agency more than once in a fiscal year. In addition, it is not known who completes the survey, as individuals and staff are not required to submit their name or provider agency. A five-point Likert scale is used: strongly agree, somewhat agree, neither agree nor disagree, somewhat disagree, or strongly disagree. \n \nTable 39 provides information from providers for data entered into the system during FY 2018. Response rates were generally low, but findings were overwhelmingly positive across all the review types and from both providers and individuals. The percent was calculated as follows: (Strongly Agree + Somewhat Agree) / Total Responses. Additionally, two surveys were linked to the joint QEPR and BHQR process through the provider submission of name with the survey responses. The results for both systems, BH and IDD, using the calculation mentioned previously, was 99.6 percent. Results of indicators for each of the two systems are identified below for annual comparison. \n \nTable 39. FY 2018 Collaborative Provider Feedback Surveys \n \nPercent: Strongly Agree + Somewhat Agree/Total Responses \n \nIDD \n \nSurvey Indicators \n \n(N = 66) \n \nOverall, you are satisfied with the review/consultation process. \n \n98.5% \n \nThe Collaborative staff interacted with you and your staff in a professional manner. \n \n100% \n \nThe Collaborative staff interacted with the individuals you support in a professional manner. \n \n100% \n \nThe Collaborative staff answered your indicators and concerns clearly and \n \nconsistent with DBHDD manual. (If you disagree, please explain at the end 98.5% \n \nof the survey.) \n \nThe Collaborative staff facilitated an environment which was collaborative and positive. \n \n100% \n \nYou would contact the Collaborative staff for technical assistance, training, and resource support, if needed. \n \n100% \n \nBH (N = 23) 100% 100% 100% \n95.1% \n100% 100% \n \n151 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nTable 39. FY 2018 Collaborative Provider Feedback Surveys \n \nPercent: Strongly Agree + Somewhat Agree/Total Responses \n \nIDD \n \nSurvey Indicators \n \n(N = 66) \n \nThe process provided constructive feedback. \n \n100% \n \nThe process helped identify the strengths of your supports and services. \n \n100% \n \nThe feedback you received will help provide supports and services that meet the desired outcomes of the individuals you support. \n \n100% \n \nThe recommendations generated from this process can be used to make a positive contribution to the individuals served. \n \n98.4% \n \nThe recommendations generated from this process can be used to make a positive contribution to your organization overall. \n \n100% \n \nThe feedback provided will assist your organization with making quality improvements to systems and practices. \n \n100% \n \nBH (N = 23) 100% 100% 100% \n100% \n100% \n100% \n \nOverall Survey Result \n \n99.6% 99.6% \n \nOnly five responses were received specific to the individual survey completed following an IDD quality review, all responses resulted in \"strongly agree,\" a 100 percent score. Indicators asked to individuals include: \n Overall, I am pleased with how the interview went.  The purpose of the interview was explained to me.  I was treated with respect.  The person who interviewed me seemed interested in what I said.  The person who interviewed me was pleasant.  My indicators were answered.  The length of the interview was good. \nQuality Management will continue to review its processes, including information obtained from these surveys, to ensure providers are equipped with the necessary tools and opportunities to best serve individuals across the state. Quality Management will also tailor its quality training both to its \n \n152 | P a g e \n \n Quality Management Annual Report FY 2018 \ninternal staff as well as for Georgia providers to promote a cooperative and constructive partnership in the quality of services provided in the state of Georgia specific to the individual's needs. \n153 | P a g e \n \n Quality Management Annual Report FY 2018 \nSection 7: Conclusion \nFY 2018 marked the third complete year of the Collaborative Quality Management review process. During FY 2018, Quality Management completed 781 reviews for behavioral health and intellectual and developmental disability providers. The BHQR Overall score for FY 2018 was 88 percent, a fourpoint increase from both FY 2017 and the baseline year of FY 2016. Additionally, all four BHQR categories increased from the previous year specific to BHQRs with Assessment and Planning improving by ten percentage points. \nWhile there is demonstrated success noted in all of the different BHQRs categories, there are opportunities for growth similar to findings of previous years related to addressing all assessed needs, co-occurring issues being assessed and addressed, as well as appropriate discharge planning criteria documentation. Furthermore, ACT Service Guidelines score declined from FY 2017 to FY 2018 by four points. Several declines in ACT indicators brought the Service Guidelines category from 88 percent to 84 percent while the remaining categories of billing, FOAs, and Assessment and Planning increased. \nAdditionally, FY 2018 marked the second year reassessments were completed for providers who fell below the threshold criteria established in FY 2017. Twenty providers (15%) were reviewed for a second time during the fiscal year compared to 35 (27%) in the year prior. In review of scores for these specific providers at the time of their first FY 2018 review compared to their second FY 2018 review, providers increased their scores in all areas. For example, the average Overall score went from 83 percent to 91 percent. While it is anticipated the reassessment of providers will lead to elevated results from year-to-year, some providers continue to decline in scoring from first review to second review. Two reassessed providers in FY 2018 declined rather than improved in Overall score. However, FY 2018 annual findings (with increases statewide) promote that ongoing review processes, provider education, and technical assistance have improved documentation specific to care of individuals and promoted improved quality of service to the individuals served. Consequently, any provider reviewed after July 1, 2018 having an Overall score or Billing score of less than 90 percent will be required to have a second review conducted. \n154 | P a g e \n \n Quality Management Annual Report FY 2018 \nSpecific to IDD reviews, Quality assessors completed 100 QEPRs, 484 PCRs, and 161 QTACs. Scores for all of the components of the PCR, i.e., interviews, observations and documentation, averaged over 75 percent, an overall improvement for each FOA. \nThe average QEPR score for FY2018 has increased to 84.8 percent with the provider record review as the lowest scoring area. With the exception of the individual interview, all tools for the QEPR increased in score from the previous year. The QEPR FOA Overall scores all increased from FY 2017 except for Person Centered Practices that declined somewhat each fiscal year (FY 2016 82.6% to FY 2018 80.4%). Overall data results showed strengths in the area of Safety and Person Centered Practices. \nWhere possible, Quality Management conducted joint BHQR, CSUQR and QEPRs to reduce administrative burden to providers. Ten joint reviews were conducted during FY 2018, all at Community Service Board locations. Using such a method promotes additional education to the network and among assessors in the field, further strengthening expertise and collaboration. Each joint review lends valuable information and feedback through joint discussions and exit conferences for providers and assessors alike. Additionally, providers are supplied an additional opportunity to complete a satisfaction survey following the process. \nFeedback obtained from the satisfaction surveys provides a qualitative approach to individual and staff perceptions related to review processes and yielded positive results in FY 2018. Quality Management will continue to use feedback from the surveys to review processes and provide professional, effective, and constructive approaches to ensure providers are equipped with the necessary tools and opportunities they need to best support individuals. \nProvider trainings in FY 2019 are to occur both in person as well as via webinar through the formalized Quality Training Program based on collaboration and partnership with DBHDD. Quality Management shall use findings from the behavioral health, CSU, and intellectual and developmental disability reviews to recommend topics for trainings. Such trainings will be prioritized based on identified system needs. Additionally, as the collaboration and partnership continues, changes to \n155 | P a g e \n \n Quality Management Annual Report FY 2018 \nmeasurement tools to further align indicators is both recommended and required for ongoing smooth review processes. \n156 | P a g e \n \n Appendix A: Abbreviations and Acronyms \n \nQuality Management Annual Report FY 2018 \n \nAcronym \nACT AD BH BHQR C\u0026A CIS CMS CST CSU CSUQR DBHDD DDSS FOA FY II IDD IFI IOP IOSA IRR IRP ISP QA ISP KPI MAR MH N NA NCI NCP OBS PCP PCR PHI PRR QEPR \n \nDefinition \nAssertive Community Treatment Addictive Diseases Behavioral Health Behavioral Health Quality Review Child and Adolescent Consumer Information System Centers for Medicaid and Medicare Services Community Support Team Crisis Stabilization Unit Crisis Stabilization Unit Quality Review Department of Behavioral Health and Developmental Disabilities Developmental Disability Service Specific Focused Outcome Area(s) Fiscal Year Individual Interview Intellectual and Developmental Disability Intensive Family Intervention Intensive Outpatient Individual Observation Staff Assessment Individual Record Review Individual Recovery / Resiliency Plan Individual Service Plan Quality Assurance Checklist Individual Service Plan Key Performance Indicator Medication Administration Record Mental Health Number in sample Not Applicable National Core Indicators Nursing Care Plan On-site Observations Person Centered Practices Person Centered Review Protected Health Information Provider Record Review Quality Enhancement Provider Review \n \n157 | P a g e \n \n Q\u0026T QTAC SC SCI SCRR SFS SI SIS SU TA \n \nQualifications and Training Quality Technical Assistance Consultation Support Coordinator Support Coordinator Interview Support Coordinator Record Review State Funded Services Staff Interview Supports Intensity Scale Substance Use Technical Assistance \n \nQuality Management Annual Report FY 2018 \n \n158 | P a g e \n \n Appendix B: Score Distributions \n \nQuality Management Annual Report FY 2018 \n \nGraphical distributions for each category show the percent of providers who scored within the specified range of scores, as indicated on the horizontal/x-axis. With each distribution, the median, mean, and mode may be provided. The median represents the score that falls in the middle of distribution (50th percentile) and the mean represents the average score. Standard deviation refers to the amount of variation or dispersion there is in a distribution of scores, or how much scores tend to spread-out from the mean. Standard deviations are sometimes used to determine significant differences between scores within a distribution; however, this application requires a distribution to be normally distributed (similar to a bell curve). Most of the distributions presented in this report do not meet the requirements of a normal distribution, meaning they do not look like a normal bell curve; therefore, standard deviations have been excluded from the report at this time. \n \nTable of Contents (Links available by clicking on Figure Name) \nBehavioral Health Quality Review Distributions \nBHQR Overall Score Distribution BHQR Tier 1 Overall Score Distribution BHQR Tier 2 Overall Score Distribution BHQR Tier 2+ Overall Score Distribution BHQR Tier 3 Overall Score Distribution BHQR Billing Score Distribution BHQR Assessment \u0026 Treatment Score Planning Distribution BHQR Service Guidelines Score Distribution BHQR Focused Outcome Areas Score Distribution \n \nAssertive Community Treatment (ACT) Quality Review Distributions \nACT Overall Score Distribution ACT Billing Score Distribution ACT Assessment \u0026 Planning Distribution ACT Service Guidelines Score Distribution ACT Focused Outcome Areas Score Distribution \n \n159 | P a g e \n \n CSU Quality Review Distributions \nCSU Overall Score Distribution CSU Service Guidelines Score Distribution CSU Focused Outcome Areas Score Distribution CSU Individual Record Review Score Distribution \n \nQuality Management Annual Report FY 2018 \n \nReassessment BHQR Distributions \nBHQR Overall Score Distribution of Reassessed Providers BHQR Billing Score Distribution of Reassessed Providers BHQR Assessment \u0026 Treatment Planning Distribution of Reassessed Providers BHQR Service Guidelines Score Distribution of Reassessed Providers BHQR Focused Outcome Areas Score Distribution of Reassessed Providers \n \nPCR Distributions: Scores by Tool and Year \nPCR Individual Interview Scores by Year PCR Observation Scores by Year PCR Staff Interview Scores by Year PCR Provider Record Review Scores by Year PCR Support Coordinator Review Scores by Year PCR Support Coordinator Interview Scores by Year \n \nQEPR Distributions: Scores by Tool and Year \nFY 2018 Overall QEPR Score FY 2018 QEPR Individual Interview Scores FY 2018 QEPR Observation Scores FY 2018 QEPR Staff Interview Scores FY 2018 QEPR Provider Record Review Scores FY 2018 QEPR Qualifications and Training Scores FY 2018 QEPR Service Discrepancy Scores FY 2018 QEPR Overall Scores by Provider Size FY 2018 QEPR Qualifications and Training Scores by Provider Size \n \n160 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nBHQR Overall Score Distribution by Year \n \n60% \n \n50% \n \n51%49% \n \n47% \n \n42% \n \n40% \n \n30% \n \n26% 25% \n \n20% \n \n16%19% \n \n10% \n \n5% 1% 0% 1%1%1%2%2%2% 3% 3% \n \n6% \n \n0% \n \nFY 2016 (N = 141; Mean = 84%; Median = 87%) FY 2017 (N = 167; Mean = 84%; Median = 86%) FY 2018 (N = 156; Mean = 88%; Median = 90%) \n \nBHQR Tier 1 Overall Score Distribution by Year \n \n80% \n \n70% \n \n68% \n \n60% \n \n58% \n \n50% \n \n48% 48% \n \n40% \n \n30% \n \n21% \n \n25% \n \n20% \n \n17% 11% \n \n10% \n \n3% \n \n0% \n \nFY 2016 (N = 24; Mean = 86%; Median = 88% ) FY 2017 (N = 28; Mean = 85%; Median = 85%) FY 2018 (N = 29; Mean = 88%; Median = 90%) \n \n161 | P a g e \n \n 60% 50% 40% 30% 20% 10% \n0% \n100% 80% 60% 40% 20% 0% \n \nQuality Management Annual Report FY 2018 \nBHQR Tier 2 Overall Score Distribution by Year \n51%50% 49% 41% \n \n19%19% \n \n24%24% \n \n1% \n \n2%1% 4%5%2% \n \n6% \n \nFY 2016 (N = 90; Mean = 85%; Median = 87%) FY 2017 (N = 115; Mean = 85%; Median = 86%) FY 2018 (N = 93; Mean = 89%; Median = 90%) \nBHQR Tier 2+ Overall Score Distribution by Year \n83%83% 80% \n20% 17%17% \n \nFY 2016 (N = 5; Mean = 88%; Median = 87%) FY 2017 (N = 6; Mean = 86%; Median = 85%) FY 2018 (N = 6; Mean = 87%; Median = 86%) \n \n162 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nBHQR Tier 3 Overall Score Distribution by Year \n \n60% \n \n56% \n \n50% \n \n41% \n \n43% \n \n40% \n \n30% \n \n29%27% \n \n20% 10% \n \n5% \n \n14% 5%6%7% 6%7% \n \n11% 7% \n \n9%11%7% \n \n11% \n \n0% \n \nFY 2016 (N = 22; Mean = 78%; Median = 83%) FY 2017 (N = 18; Mean = 83%; Median = 91%) FY 2018 (N = 28; Mean = 83%; Median = 89%) \n \nBHQR Billing Score Distribution by Year \n \n70% \n \n60% \n \n53% \n \n50% \n \n48% 42% \n \n40% \n \n33% \n \n30% \n \n28%26% \n \n20% 10% \n \n4%1%1% 1%2%2% 1%1%1% 1%1%1% 2%2%1% 1%4%1% 5%5%3%10%8%11% \n \n0% \n \nFY 2016 (N = 141; Mean = 81%; Median = 88%) FY 2017 (N = 167; Mean = 84%; Median = 90%) FY 2018 (N = 156; Mean = 85%; Median = 92%) \n \n163 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nBHQR Assessment \u0026 Treatment Planning Score Distribution by Year \n \n50% \n \n40% \n \n40% \n \n30% \n \n31% 31%31% 30% \n \n29% \n \n23% \n \n21% \n \n20% \n \n16% \n \n17% \n \n10% \n \n4%5% \n \n7% \n \n9% \n \n1%1%1% 1%1%1% \n \n2% \n \n0% \n \nFY 2016 (N = 141; Mean = 79%; Median = 79%) FY 2017 (N = 167; Mean = 77%; Median = 78%) FY 2018 (N = 156; Mean = 84%; Median = 86% ) \n \nBHQR Compliance with Service Guidelines Score Distribution by Year \n \n70% 60% \n \n58% 62% 53% \n \n50% \n \n40% 30% \n \n34%31%29% \n \n20% \n \n10% \n \n1% \n \n1% 1%1% 1%1% 1%7%4% 6%7%3% \n \n0% \n \nFY 2016 (N = 140; Mean = 90%; Median = 92%) FY 2017 (N = 167; Mean = 88%; Median = 91%) FY 2018 (N = 156 ;Mean = 90%; Median = 93%) \n \n164 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nBHQR Focused Outcome Areas Score Distribution by Year \n80% 67% \n \n60% \n \n47% \n \n50% \n \n41% \n \n40% \n \n30% \n \n28% \n \n20% \n \n17% \n \n2% \n \n4%2%2% 8%3% \n \n0% \n \nFY 2016 (N = 141; Mean = 85%; Median = 87%) FY 2017 (N = 167; Mean = 89%; Median = 90%) FY 2018 (N = 156 ; Mean = 92%; Median = 93%) \n \nACT Overall Score Distribution by Year \n \n60% \n \n50% \n \n47% 48% 50%48% \n \n42% \n \n40% \n \n30% \n \n26% 25% \n \n20% \n \n11% \n \n10% \n \n5% \n \n0% \n \nFY 2016 (N = 19; Mean = 88%; Median = 91%) FY 2017 (N = 20; Mean = 87%; Median = 91%) FY 2018 (N = 21; Mean = 89%; Median = 90%) \n \n165 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nACT Billing Score Distribution by Year \n \n70% \n \n67% 60% \n \n60% \n \n58% \n \n50% \n \n40% \n \n37% \n \n30% \n \n20% \n \n20% 10% \n \n15% \n \n19% \n \n5%5%5% 10% \n \n0% \n \nFY 2016 (N= 19; Mean = 92%; Median = 96%) FY 2017 (N = 20; Mean = 90%; Median = 96%) FY 2018 (N = 21; Mean = 91%; Median = 95%) \n \nACT Assessment \u0026 Planning \n \nScore Distribution by Year \n \n70% \n \n60% \n \n58% \n \n50% 40% \n \n35% \n \n404%3% 38% \n \n30% \n \n26% \n \n20% \n \n20% \n \n19% \n \n16% \n \n10% \n \n5% \n \n0% \n \nFY 2016 (N = 19; Mean = 85%; Median = 80%) FY 2017 (N = 20; Mean = 80%; Median = 80%) FY 2018 (N = 21; Mean = 87%; Median = 87%) \n \n166 | P a g e \n \n 60% 50% 40% 30% 20% 10% \n0% \n90% 80% 70% 60% 50% 40% 30% 20% 10% \n0% \n \nQuality Management Annual Report FY 2018 \n \nACT Service Guidelines Score Distribution by Year \n \n50% \n \n33%32% \n \n383%7%35% 29% \n \n21% \n \n11% \n \n10% \n \n5% \n \nFY 2016 (N = 19; Mean = 85%; Median = 89%) FY 2017 (N = 20; Mean = 88%; Median = 89%) FY 2018 (N = 21; Mean = 84%; Median = 86%) \nACT FOAs Score Distribution by Year \n81% \n \n55% 47% \n37%35% \n \n16% \n \n19% \n \n10% \n \nFY 2016 (N = 19; Mean = 88%; Median = 91%) FY 2017 (N = 20; Mean = 90%; Median = 91%) FY 2018 (N = 21; Mean = 93%; Median = 95%) \n \n167 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nCSU Overall Score Distribution by Year \n \n50% 45% 40% 35% 30% 25% 20% 15% 10% \n5% 0% \n \n47% 43%48% \n \n43% \n \n35% \n \n26% 17% \n \n21% \n \n5%4%5% \n \n5% \n \nFY 2016 (N = 19; Mean = 83%; Median = 83%) FY 2017 (N = 23; Mean = 86%; Median = 88%) FY 2018 (N = 21; Mean = 88%; Median = 90%) \nCSU Compliance with Service Guidelines Score Distribution by Year \n \n80% \n \n76% \n \n70% \n \n61% \n \n60% \n \n50% \n \n40% \n \n32% 32% \n \n30% \n \n20% 10% \n \n4% \n \n11% 9% 11% 5% 4%5% \n \n14% 4% \n \n17% 16% \n \n0% \n \nFY 2016 (N = 19; Mean = 82%; Median = 80%) FY 2017 (N = 23; Mean = 87%; Median = 91%) FY 2018 (N = 21; Mean = 91%; Median = 100%) \n \n168 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nCSU Focused Outcome Areas Overall Distribution by Year \n \n60% \n \n57%52% \n \n50% \n \n47% 48% \n \n40% \n \n35% 37% \n \n30% \n \n20% \n \n16% \n \n10% \n \n9% \n \n0% \n \nFY 2016 (N = 19; Mean = 88%; Median = 88%) FY 2017 (N = 23; Mean = 91%; Median = 92%) FY 2018 (N = 21; Mean = 91%; Median = 91%) \n \nCSU Individual Record Review Score Distribution by Year \n \n70% \n \n65% \n \n60% \n \n50% \n \n42% \n \n40% 30% \n \n37% 29% 26% \n \n33% \n \n29% \n \n20% \n \n11% \n \n10% \n \n5% 4%5% 5%4%5% \n \n0% \n \nFY 2016 (N = 19; Mean = 79%; Median = 77%) FY 2017 (N = 23; Mean = 82%; Median = 82%) FY 2018 (N = 21; Mean = 83%; Median = 83%) \n \n169 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFY 2018 BHQR Overall Score Distribution of Reassessed Providers \n \n70% \n \n60% \n \n60% \n \n55% \n \n50% \n \n40% \n \n35% \n \n30% \n \n20% \n \n20% \n \n15% \n \n10% \n \n5% 5% \n \n5% \n \n0% \n \nFirst Review FY 2018 (N = 20; Mean = 83%; Median = 84%) Second Review FY 2018 (N = 20; Mean = 91%; Median = 91%) \n \nFY 2018 BHQR Billing Score Distribution of Reassessed Providers \n \n60% \n \n55% \n \n50% \n \n40% \n \n40% \n \n35% \n \n30% \n \n25% \n \n20% \n \n10% \n \n10% \n \n10% \n \n10% \n \n5% \n \n5% \n \n5% \n \n0% \n \nFirst Review FY 2018 (N = 20; Mean = 75%; Median = 78%) Second Review FY 2018 (N = 20; Mean = 89%; Median = 92%) \n \n170 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFY 2018 BHQR Assessment \u0026 Planning Score Distribution of Reassessed Providers \n70% 60% \n60% \n \n50% \n \n45% 45% \n \n40% \n \n30% \n \n20% \n \n10% \n \n5% \n \n15% \n \n10%10% \n \n10% \n \n0% \n \nFirst Review (N = 20; Mean = 79%; Median = 83%) Second Review (N = 20; Mean = 89%; Median = 89%) \n \nFY 2018 BHQR Service Guidelines Score Distribution of Reassessed Providers \n \n70% \n \n65% \n \n60% \n \n50% \n \n45% \n \n40% \n \n40% \n \n35% \n \n30% \n \n20% \n \n10% \n \n5% 5% 5% \n \n0% \n \nFirst Review (N = 20; Mean = 87%; Median = 89%) Second Review (N = 20; Mean = 89%; Median = 89%) \n \n171 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFY 2018 BHQR FOAs Score Distribution of Reassessed Providers \n \n70% \n \n65% \n \n60% 50% \n \n50% 40% \n \n40% \n \n35% \n \n30% \n \n20% 10% \n10% \n \n0% \n \nFirst Review (N = 20; Mean = 88%; Median = 91%) Second Review (N = 20; Mean = 93%; Median = 95%) \n \nPCR Individual Interview Scores by Year \n \n80% \n \n67.8% \n \n70% \n \n66.1% \n \n62.0% \n \n60% \n \n50% \n \n40% \n \n30% \n \n21.6% \n \n20% \n \n5.4% 19.6% 11.0% \n \n18.2% 10.7% \n \n10% 0% \n \n0.2%0.4% 0.8%1.9%3.15%.8% \n \n5.4% \n \nFY 2016 (N = 484; Mean =95.1% ; Median = 97.1%) FY 2017 (N = 481; Mean = 91.9%; Median = 93.7%) FY 2018 (N = 484; Mean = 92.2%; Median = 95.1%) \n \n172 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nPCR Observation Scores by Year \n \n60% \n \n56.5% \n \n51.9% \n \n50% \n \n48.5% 48.8% \n \n41.7% \n \n40% \n \n38.1% \n \n30% \n \n20% \n \n10% \n \n3.90%.42%.76%.11%.4% \n \n0% \n \nFY 2016 (N = 412; Mean = 98.4%; Median = 99.5%) FY 2017 (N = 181; Mean =96.8%; Median = 98.7%) FY 2018 (N = 276; Mean =98.2%; Median = 100%) \n \nPCR Staff Interview Scores by Year \n \n70% \n \n69.3% 67.8% 67.3% \n \n60% \n \n50% \n \n40% \n \n30% \n \n24.5% 24.4% \n \n20% 10% \n0% \n \n0.4% \n \n2.2% \n \n11.1% 6.8% 5.8% \n \n0.9% 0.8% 2.6% \n \n16.0% \n \nFY 2016 (N = 482; Mean = 96.4%; Median = 97.7%) FY 2017 (N = 224; Mean = 94.3%; Median = 96.0%) FY 2018 (N = 312; Mean = 95.6%; Median = 97.1%) \n \n173 | P a g e \n \n 35% 30% 25% 20% 15% 10% \n5% 0% \n \nQuality Management Annual Report FY 2018 \nPCR Provider Record Review Scores by Year \n \n0.2% \n \n26.6% \n \n29.3% 28.4% \n \n29.2% \n \n23.9% 22.5% \n \n18.3% 16.4% \n14.1% \n9.3% 5.0% 6.2% 1.2% 3.4% 3.9% 0.3% 0.6% \n \n17.7% 18.3% \n10.6% 7.3% \n5.6% 1.7% \n0.2% \n \nFY 2016 (N = 356; Mean = 79.0%; Median = 79.1%) FY 2017 (N = 478; Mean = 70.2%; Median = 70.5%) FY 2018 (N = 483; Mean = 74.8%; Median = 75.9%) \n \nPCR Support Coordinator Record Review Scores by Year \n \n30% 25% 20% 15% 10% \n5% 0% \n \n25.0% \n \n24.3% \n \n24.4% \n \n22.9% \n \n20.2% 21.1% 19.4% \n \n15.8% 15.3% 13.0% \n9.6% 11.4% \n \n17.0% 14.5% \n \n9.1% 5.4% 6.8% \n \n5.0% \n \n0.8%0.20%.8%0.40%.8%0.4%1.7%1.5%0.8%0.2%2.7%2.1% \n \n2.3% \n \n2.7% 2.5% \n \nFY 2016 (N = 484; Mean = 79.9%; Median = 84.1%) FY 2017 (N = 481; Mean = 73.7%; Median = 76.4%) FY 2018 (N = 484; Mean = 77.4%; Median = 75.9%) \n \n174 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nPCR Support Coordinator Interview Scores by Year \n \n60% \n \n50% 40% 30% \n \n45.5% 45.1% 39.6% 41.5% 39.0% \n33.1% \n \n20% \n \n12.0% \n \n10.6% \n10% 5.0% \n \n0.4% \n \n0.4% \n \n0% \n \n0.2% \n \n7.1% \n2.5% 7.3% 5.2% 1.0%0.2%1.5% 1.7% \n \nFY 2016 (N = 477; Mean = 90.8%; Median = 98.4%) FY 2017 (N = 479; Mean = 83.3%; Median = 96.7%) FY 2018 (N = 484; Mean = 83.5%; Median = 98.4%) \n \nFY 2018 Overall QEPR Score \n \n60% \n \n54.5% \n \n50% \n \n40% 32.3% \n30% \n \n20% 13.1% \n10% \n \n0% \n \n(N = 99; Mean = 84.8%; Median = 86.4%) \n \n175 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFY 2018 QEPR Individual Interview Scores \n \n80% \n \n70% \n \n68.7% \n \n60% \n \n50% \n \n40% \n \n30% \n \n24.2% \n \n20% \n \n10% \n \n5.1% \n \n1.0% \n \n1.0% \n \n0% \n \n(N = 99; Mean = 89.6%; Median = 91.9%) \n \nFY 2018 QEPR Observation Scores \n \n80% \n \n75.9% \n \n70% \n \n60% \n \n50% \n \n40% \n \n30% \n \n20% \n \n13.8% \n \n10% \n \n8.0% \n \n2.3% \n \n0% \n \n(N = 87; Mean = 96.0%; Median = 97.2%) \n \n176 | P a g e \n \n Quality Management Annual Report FY 2018 \n \nFY 2018 QEPR Staff Interview Scores \n \n90% \n \n81.6% \n \n80% \n \n70% \n \n60% \n \n50% \n \n40% \n \n30% \n \n20% \n \n10% \n \n10.2% \n \n7.1% \n \n1.0% \n \n0% \n \n(N = 98; Mean = 95.3%; Median = 96.3%) \n \nFY 2018 QEPR Record Review Scores \n40% \n \n30.3% \n \n30% \n \n27.3% \n \n20.2% 20% \n14.1% \n \n10% \n \n5.1% \n \n3.0% \n \n0% \n \n(N = 99; Mean = 72.4%; Median = 73.4%) \n \n177 | P a g e \n \n 50% 40% 30% 20% 10% \n0% \n100% 80% 60% 40% 20% 0% \n \nQuality Management Annual Report FY 2018 \nFY 2018 QEPR Qualifications and Training Scores \n45.5% \n \n1.0% \n \n18.2% 15.2% \n5.1% 6.1% 1.0% \n \n8.1% \n \n(N = 99; Mean = 84.6%; Median = 90.7%) \nFY 2018 QEPR Service Discrepancy Scores \n \n91.9% \n \n8.1% \n \n(N = 99; Mean = 99.8%; Median = 100.0%) \n \n178 | P a g e \n \n Quality Management Annual Report FY 2018 \nFY 2018 QEPR Overall Scores by Provider Size \n80% 73.3% \n70% \n \n60% \n51.4% 50% \n50.0% \n \n40% \n \n37.5% \n \n33.3% \n \n30% \n \n20% \n \n16.7% \n \n13.3% 11.1% \n \n13.3% \n \n10% \n \n0% \nSmall (N = 72; Mean = 86.3%; Median = 87.2%) Medium (N = 12; Mean = 82.0%; Median = 84.6%) Large (N = 15; Mean = 84.4%; Median = 83.5%) \n \n179 | P a g e \n \n Quality Management Annual Report FY 2018 \nFY 2018 QEPR Qualifications and Training Scores by Provider Size \n70% 66.7% \n60% \n \n50% 44.4% \n \n40% \n \n33.3% \n \n30% \n \n25.0% \n \n25.0% \n \n20.0% 20% \n16.7% 16.7% \n \n10% \n \n8.3% \n \n6.7% \n \n6.7% \n \n5.6% 4.2% \n \n1.4% \n \n1.4% \n \n0% \n \n9.7% 8.3% \n \nSmall (N = 72; Mean = 85.3%; Median = 90.9%) Medium (N = 12; Mean = 81.1%; Median = 83.9%) Large (N = 15; Mean = 85.8%; Median = 83.5%) \n \n180 | P a g e \n \n "},{"id":"dlg_ggpd_y-ga-bb400-b-pa15-b2017-belec-p-btext","title":"Quality management annual report, FY 2017","collection_id":"dlg_ggpd","collection_title":"Georgia Government Publications","dcterms_contributor":["Georgia. Department of Behavioral Health and Developmental Disabilities"],"dcterms_spatial":["United States, Georgia, 32.75042, -83.50018"],"dcterms_creator":["Georgia Collaborative ASO"],"dc_date":["2017"],"dcterms_description":["Began with: FY 2016.","FY 2016 (harvested on May 6, 2019 from dbhdd.georgia.gov); title from PDF cover (Georgia Government Publications database, viewed March 18, 2020).","FY 2018 (harvested on May 29, 2019 from dbhdd.georgia.gov) (Georgia Government Publications database, viewed March 18, 2020)."],"dc_format":["application/pdf"],"dcterms_identifier":null,"dcterms_language":["eng"],"dcterms_publisher":["Atlanta, Ga. : Georgia. Department of Behavioral Health and Developmental Disabilities"],"dc_relation":null,"dc_right":["http://rightsstatements.org/vocab/InC/1.0/"],"dcterms_is_part_of":null,"dcterms_subject":["Georgia Collaborative ASO--Evaluation--Periodicals","Mental health services--Georgia--Evaluation--Periodicals","Developmentally disabled--Services for--Georgia--Evaluation--Periodicals","Developmentally disabled--Services for--Evaluation","Evaluation","Mental health services--Evaluation","Georgia","Annual reports","Periodicals"],"dcterms_title":["Quality management annual report, FY 2017"],"dcterms_type":["Text"],"dcterms_provenance":["University of Georgia. Map and Government Information Library"],"edm_is_shown_by":["https://dlg.galileo.usg.edu/do:dlg_ggpd_y-ga-bb400-b-pa15-b2017-belec-p-btext"],"edm_is_shown_at":["https://dlg.galileo.usg.edu/id:dlg_ggpd_y-ga-bb400-b-pa15-b2017-belec-p-btext"],"dcterms_temporal":null,"dcterms_rights_holder":null,"dcterms_bibliographic_citation":null,"dlg_local_right":null,"dcterms_medium":["reports"],"dcterms_extent":null,"dlg_subject_personal":null,"iiif_manifest_url_ss":null,"dcterms_subject_fast":null,"fulltext":"FY 2017 Quality Management \nAnnual Report \n \n Quality Management Annual Report FY 2017 \nSECTION 1: EXECUTIVE SUMMARY ............................................................................................................................... 4 \nBehavioral Health ..........................................................................................................................................................4 Intellectual and Developmental Disabilities ..................................................................................................................8 \nSECTION 2: INTRODUCTION ........................................................................................................................................12 \nSECTION 3: BEHAVIORAL HEALTH QUALITY REVIEWS...................................................................................................14 \nBACKGROUND......................................................................................................................................................................14 SAMPLING METHOD .............................................................................................................................................................15 \nIndividual Records and Billing Review .........................................................................................................................15 Individual and Staff Interviews ....................................................................................................................................16 BHQR OVERALL REVIEW SCORES............................................................................................................................................17 BHQR Overall Scores....................................................................................................................................................17 BHQR Overall Scores by Tier ........................................................................................................................................19 BHQR Billing Validation ...............................................................................................................................................20 BHQR Assessment and Treatment Planning ................................................................................................................24 BHQR Compliance with Service Guidelines..................................................................................................................26 BHQR Focused Outcome Areas (FOA) ..........................................................................................................................30 BHQR Staff and Individual Interviews..........................................................................................................................33 ASSERTIVE COMMUNITY TREATMENT .......................................................................................................................................35 ACT Quality Review Overall Scores ..............................................................................................................................35 ACT Billing Validation ..................................................................................................................................................36 ACT Assessment and Treatment Planning ...................................................................................................................39 ACT Compliance with Service Guideline Scores ...........................................................................................................40 ACT Focused Outcome Areas .......................................................................................................................................42 CRISIS STABILIZATION UNIT QUALITY REVIEWS ...........................................................................................................................44 Crisis Stabilization Unit Sample Method......................................................................................................................44 CSU Quality Review .....................................................................................................................................................45 CSU Individual Record Review (IRR) .............................................................................................................................46 CSU Compliance with Service Guidelines.....................................................................................................................48 CSU Focused Outcome Area ........................................................................................................................................52 CSU Staff Interview (SI) and Individual Interview (II) ...................................................................................................54 REASSESSMENT FREQUENCY REVIEW........................................................................................................................................57 BHQR Reassessment Review Findings .........................................................................................................................57 CSU Reassessment Review Findings ............................................................................................................................61 TECHNICAL ASSISTANCE/EXIT CONFERENCE ...............................................................................................................................62 SUMMARY OF FINDINGS AND RECOMMENDATIONS FOR BEHAVIORAL HEALTH PROVIDERS ..................................................................63 System Strengths .........................................................................................................................................................63 Opportunities for Improvement...................................................................................................................................66 \nSECTION 4: INTELLECTUAL AND DEVELOPMENTAL DISABILITIES ..................................................................................70 \nBACKGROUND......................................................................................................................................................................70 1 \n \n Quality Management Annual Report FY 2017 \nSAMPLING METHOD .............................................................................................................................................................71 Person-Centered Review (PCR) ....................................................................................................................................71 Quality Enhancement Provider Review (QEPR) ...........................................................................................................72 \nREVIEW PROCESSES ..............................................................................................................................................................73 Onsite Reviews (PCR and QEPR) ..................................................................................................................................73 Quality Technical Assistance Consultation (QTAC) .......................................................................................................75 \nPERSON-CENTERED REVIEW ...................................................................................................................................................75 PCR Scores by Tool .......................................................................................................................................................75 PCR Scores by Focused Outcome Area (FOA)...............................................................................................................77 PCR Scores by Tool and Focused Outcome Area (FOA) ................................................................................................78 Opportunities for Growth by FOA ................................................................................................................................79 Individual Service Plan Quality Assurance Checklist (ISP QA) ......................................................................................87 ISP Expectations ..........................................................................................................................................................89 PCR Results by Service .................................................................................................................................................91 PCR Strengths and Recommendations ........................................................................................................................93 \nQUALITY ENHANCEMENT PROVIDER REVIEW (QEPR)..................................................................................................................95 QEPR Scores by Tool ....................................................................................................................................................95 Overall QEPR Score ......................................................................................................................................................96 Overall Crisis Provider Scores ......................................................................................................................................97 Qualifications and Training (Q \u0026 T) and Service Specific (DDSS) .................................................................................98 Qualifications \u0026 Training Opportunities for Improvement ..........................................................................................99 QEPR Scores by Provider Size.................................................................................................................................... 101 QEPR Scores by Focused Outcome Area (FOA) ......................................................................................................... 102 QEPR Scores by Tool and FOA ................................................................................................................................... 103 QEPR Scores by FOA and Provider Size ..................................................................................................................... 105 \nQUALITY AND TECHNICAL ASSISTANCE CONSULTATION (QTAC)................................................................................................... 106 INTELLECTUAL AND DEVELOPMENTAL DISABILITY SUMMARY OF FINDINGS AND RECOMMENDATIONS ................................................. 108 \nSystem Strengths ...................................................................................................................................................... 108 Opportunities for Improvement................................................................................................................................ 109 \nSECTION 5: BEHAVIORAL HEALTH AND INTELLECTUAL DEVELOPMENTAL DISABILITIES: OVERALL RESULTS BY FOCUSED OUTCOME AREA ........................................................................................................................................................115 \nSECTION 6: FEEDBACK SURVEY RESULTS ....................................................................................................................117 \nSECTION 7: CONCLUSION...........................................................................................................................................120 \nAPPENDIX A: ABBREVIATIONS AND ACRONYMS........................................................................................................122 \nAPPENDIX B: SCORE DISTRIBUTIONS .........................................................................................................................124 \nBHQR Overall Score Distribution by Year ................................................................................................................ 126 BHQR Tier 1 Overall Score Distribution by Year ...................................................................................................... 126 BHQR Tier 2 Overall Score Distribution by Year ...................................................................................................... 127 BHQR Tier 2+ Overall Score Distribution by Year .................................................................................................... 127 \n2 \n \n Quality Management Annual Report FY 2017 \nBHQR Tier 3 Overall Score Distribution by Year ...................................................................................................... 128 BHQR Billing Score Distribution by Year.................................................................................................................. 128 BHQR Assessment \u0026 Treatment Planning Distribution by Year ............................................................................. 129 BHQR Compliance with Service Guidelines Score Distribution by Year .................................................................. 129 BHQR Focused Outcome Areas Score Distribution by Year..................................................................................... 130 ACT Overall Score Distribution by Year ................................................................................................................... 130 ACT Billing Score Distribution by Year ..................................................................................................................... 131 ACT Assessment \u0026 Treatment Planning .................................................................................................................. 131 ACT Compliance with Service Guidelines ................................................................................................................ 132 ACT FOAs Score Distribution by Year ....................................................................................................................... 132 CSU Overall Score Distribution by Year ................................................................................................................... 133 CSU Compliance with Service Guidelines Score Distribution by Year ..................................................................... 133 CSU Focused Outcome Areas Overall Distribution by Year ..................................................................................... 134 CSU Individual Record Review Score Distribution by Year ...................................................................................... 134 BHQR Overall Score Distribution ............................................................................................................................. 135 BHQR Billing Score Distribution............................................................................................................................... 135 BHQR Assessment \u0026 Treatment Planning Score Distribution ................................................................................ 136 BHQR Compliance with Service Guidelines Score Distribution ............................................................................... 136 BHQR FOAs Score Distribution................................................................................................................................. 137 PCR Individual Interview Scores by Year ................................................................................................................. 137 PCR Staff Interview Scores by Year.......................................................................................................................... 138 PCR Support Coordinator Review Scores by Year.................................................................................................... 138 PCR Observation Scores by Year .............................................................................................................................. 139 PCR Provider Record Review Scores by Year ........................................................................................................... 139 PCR Support Coordinator Review Scores by Year.................................................................................................... 140 FY 2017 Overall QEPR Score .................................................................................................................................... 140 FY 2017 QEPR Individual Interview Scores .............................................................................................................. 141 FY 2017 QEPR Staff Interview Scores ...................................................................................................................... 141 FY 2017 QEPR Observation Scores .......................................................................................................................... 142 FY 2017 QEPR Observation Scores .......................................................................................................................... 142 FY 2017 QEPR Qualifications and Training Scores .................................................................................................. 143 FY 2017 QEPR Service Discrepancy Scores .............................................................................................................. 143 FY 2017 QEPR Overall Scores by Provider Size ........................................................................................................ 144 FY 2017 QEPR Qualifications and Training Scores by Provider Size ....................................................................... 145 \n3 \n \n Section 1: Executive Summary \n \nQuality Management Annual Report FY 2017 \n \nThe Georgia Department of Behavioral Health and Developmental Disabilities (DBHDD) contracts with the Georgia Collaborative Administrative Services Organization (the Collaborative) to integrate disparate systems for data, service delivery, finance, and quality oversight for individuals receiving behavioral health or intellectual and developmental disability services. Quality Management within the Collaborative is tasked with incorporating these two distinct systems into one collaborative and integrated program. On June 30, 2017, Quality Management completed the second year of the contract with DBHDD. \nThere are many similarities between the two systems. Similar tools and processes are used to monitor quality and compliance within each area, including interviews with individuals and providers/staff and review of records maintained by providers for individuals receiving services. Both divisions review an individual plan for people receiving services. Examples include the Individualized Recovery/Resiliency Plan (behavioral health), and the Individual Service Plan (intellectual and developmental disabilities). In both systems, Focused Outcome Areas (FOA) critical to an individual's quality of life are assessed: Person Centered Practices, Choice, Community, Rights, Whole Health, and Safety. However, there are some distinct differences as well. Intellectual and Developmental Disability reviews include onsite observations of licensed residential and day programs, as well as an administrative review of staff qualifications and training requirements. Behavioral Health reviews include extensive claims review to identify billing discrepancies and report the results of the Billing Validation on each review. \nBehavioral Health \nDuring the first year of the contract, Quality Management completed one Behavioral Health Quality Review (BHQR) for each of the 141 providers with an additional subset review of 18 Assertive Community Treatment (ACT) programs for a combined 159 reviews. The second year resulted in a total of 167 BHQRs with an additional 18 ACT reviews totaling 185 reviews across a network of 132 behavioral health providers. See the table below. Of the 185 BHQRs completed, 35 were for \n4 \n \n Quality Management Annual Report FY 2017 \nproviders who had a second review in fiscal year (FY) 2017, based on scoring criteria (\u003c80 percent overall score or \u003c70 percent billing validation). This practice will continue through the upcoming 2018 fiscal year. The table below provides details by review type of the categories surrounding BHQRs as well as Crisis Stabilization Unit Quality Reviews (CSUQR). \n \nFY 2017 Overview of BHQR and CSUQR Review Data \n \nReview Type \n \nNumber of Reviews \n \nRecords Reviewed \n \nIndividuals Interviewed \n \nStaff Interviewed \n \nBHQR \n \n185* \n \n3,816 \n \n735 \n \n774 \n \nCSUQR \n \n23 \n \n343 \n \n114 \n \n115 \n \nTotal \n \n208 \n \n4,159 \n \n849 \n \n889 \n \n*FY17 resulted in 167 BHQR reviews with an additional eighteen ACT specific reviews for a total of 185 reviews. \n \nBHQRs measure provider performance through four different categories: billing validation, FOAs, assessment and treatment planning, and service guidelines. Assertive Community Treatment services are also reviewed when such services are rendered by the provider. Findings are shown in the following table for BHQR and ACT reviews. The BHQR results are similar each year, although two category scores increased in FY 2017 (billing validation and FOA), while the remaining two categories decreased (assessment and treatment planning, and compliance with service guidelines). In FY 2017, both BHQR and ACT results continued to show lower compliance with assessment and treatment planning standards than in other categories. Additionally, FOAs increased for both BHQR and ACT reviews compared to the FY 2016 baseline year. \n \nBHQR FY 2016 BHQR FY 2017 ACT FY 2016 ACT FY 2017 \n \nBHQR and ACT Results by Category by Year \n \nAssessment \n \nBilling \n \nand Treatment Service \n \nValidation \n \nFOA \n \nPlanning Guidelines \n \n81% \n \n85% \n \n79% \n \n90% \n \n84% \n \n89% \n \n77% \n \n88% \n \n92% \n \n88% \n \n85% \n \n85% \n \n90% \n \n90% \n \n80% \n \n88% \n \nOverall 84% 84% 88% 87% \n \n5 \n \n Quality Management Annual Report FY 2017 \nDetailed findings of the lowest-scoring areas within the broader categories may influence quality improvement initiatives. These findings include the following: \n Assessment and treatment planning standards for both BHQR and ACT reviews indicated a need for the IRP to address co-occurring health conditions, whole health and wellness needs, as well as addressing assessed needs. Additionally, discharge plans which clearly define criteria for discharge (i.e., clear, clinical benchmarks) were lacking in both review types. \n Although whole health remained the lowest-scoring FOA for behavioral health, it increased for both the behavioral health and ACT reviews conducted in the second year with safety scoring as the lowest FOA for ACT reviews. \n The behavioral health whole health indicators demonstrating lower results included the following: o Ongoing assessment to determine the need for external referrals for health services, supports, and treatment were often not documented; o Communication with external referral sources to identify results of testing and treatment were not documented within the record; o Medical conditions were often not assessed, monitored, and recorded; o Safeguards for medications used were often not documented. \n Co-occurring health conditions were addressed in 36 percent of the IRPs reviewed in FY 2016 and declined to 34 percent through the reviews of FY 2017. \n FY 2017 specific billing issues most often cited remained similar to the FY 2016 results and included the following: o Missing/incomplete service orders o Individuals not meeting admission criteria o Missing progress notes \nIn January 2016, Quality Management initiated a review of Georgia's 19 crisis stabilization units (CSU). The review continued through FY 2017 and incorporated the same 19 CSUs with four CSUs \n6 \n \n Quality Management Annual Report FY 2017 \nrequiring a second review within the year due to low initial scores. The focus of CSUQR is to assess compliance with DBHDD and Medicaid requirements, using the same categories as shown for BHQRs except for billing validation and assessment and treatment planning. Billing validation was not reviewed for CSU providers in FY 2017, at the request of DBHDD. In place of assessment and treatment planning, an individual record review (IRR) occurred with similar questions raised such as individual meeting admission criteria, discharge plans documenting necessary information, as well as co-occurring health conditions being addressed. The CSU scores for FY 2016 and FY 2017 are shown in the following table and indicate the following: \n Scores increased across all categories  The average overall score showed an increase from 83 to 86 percent  FOA was the highest scoring category each year \n \nBHQR and CSUQR Results by Category by Year \n \nAssessment \n \nand \n \nBilling \n \nTreatment \n \nService \n \nValidation FOA \n \nPlanning \n \nGuidelines \n \nBHQR FY 2016 \n \n81% \n \n85% \n \n79% \n \n90% \n \nBHQR FY 2017 \n \n84% \n \n89% \n \n77% \n \n88% \n \nOverall 84% 84% \n \nCSUQR FY 2016 \n \nN/A \n \n88% (IRR) 78% \n \n82% \n \n83% \n \nCSUQR FY 2017 \n \nN/A \n \n91% (IRR) 80% \n \n87% \n \n86% \n \nResults at the indicator level in FY 2017 were consistent with the initial FY 2016 period, suggesting the following: \n Relatively high compliance specific to documentation of admission criteria, timely assessments, daily status updates, and required staffing and physician access \n Documentation lacked evidence unique to co-occurring health conditions, as well as proof of meeting the American Society for Addiction Medicine (ASAM) patient placement criteria, when appropriate \n7 \n \n Quality Management Annual Report FY 2017 \n \n Individuals did not consistently participate in treatment planning and their discharge plans were not tailored to their individual needs \n \nIntellectual and Developmental Disabilities \nQuality Management completed 481 person-centered reviews (PCR), 100 quality enhancement provider reviews (QEPR) and 208 quality technical assistance consultations (QTAC) as part of the quality reviews for services offered to individuals with intellectual and developmental disabilities. See the table below for FY 2017 details by intellectual and developmental disability review type. \n \nReview Type \nPCR QEPR QTAC Total \n \nFY 2017 Overview of IDD Review Data \n \nNumber of \n \nRecords \n \nIndividuals \n \nReviews \n \nReviewed \n \nInterviewed \n \n481 \n \n894 \n \n481 \n \n100 \n \n1,356 \n \n473 \n \n208 \n \nN/A \n \nN/A \n \n789 \n \n2,250 \n \n954 \n \nStaff \nInterviewed \n304 398 N/A 702 \n \nThe focus of the PCR is to assess the individual's quality of life, as well as the effectiveness of and satisfaction with the service delivery system from the individual's perspective. This holistic process included interviews with the individual and direct support staff including the support coordinator (SC) providing services, observations (if applicable to the service setting), record reviews for all services received including support coordination, and a review of the ISP. \nThe focus of the QEPR is to monitor providers to ensure they meet requirements set forth by the Medicaid waivers and DBHDD to evaluate the effectiveness of their service delivery system. This process includes interviews with individuals receiving services from the provider, interviews with staff, observations for day and residential programs, record reviews including ISP and an administrative review of staff qualifications and training. \nThe QTACs focus on follow-up review activities and provide technical assistance to help providers improve service delivery systems. Tools of intellectual and developmental disability reviews include the individual interview (II) and staff interview (SI), individual observation staff assessment (IOSA), \n8 \n \n Quality Management Annual Report FY 2017 \nprovider record review (PRR), support coordinator record review, support coordinator interview, the administrative qualifications and training tool, as well as a tool for developmental disability service specific (DDSS). The table below indicates the results of each tool by review type. Because crisis services are different, the four providers offering crisis services are reviewed separately. \n \nFY 2017 Summary by Tool and Review Type \n \nPCR \n \nQEPR Crisis \n \nTool \n \n(N = 481) (N = 96) (N = 4) \n \nIndividual Interview \n \n91.9% \n \n90.7% 86.0% \n \nIOSA  Observation \n \n96.8% \n \n95.7% 92.5% \n \nStaff Interview \n \n94.3% \n \n95.2% 89.1% \n \nProvider Record Review \n \n70.2% \n \n69.6% 84.1% \n \nSC Record Review \n \n73.7% \n \nN/A N/A \n \nSC Interview \n \n83.3% \n \nN/A N/A \n \nAdmin Q\u0026T \n \nNA \n \n82.7% 91.8% \n \nDDSS \n \n99.8% \n \n99.8% 100% \n \nThe Quality Management team identified a number of strengths this year in the intellectual and developmental disability service delivery system: \n Many staff and providers were aware of the unique safety needs and achievements of the individuals they serve. \n Individuals were aware of how to self-preserve in unsafe situations.  Individuals feel valued in part because most direct service providers have a clear \nunderstanding of each individual's unique communication styles and skills.  In many PCRs, it was noted staff promoted independence, as well as the use of person- \ncentered values and practices.  Record review findings indicated more than 95 percent of staff reviewed were in compliance \nwith background screening requirements.  Data indicate relatively high scores for most components of both review processes. \n \n9 \n \n Quality Management Annual Report FY 2017 \nHowever, several specific findings point to results being used to guide training sessions or quality improvement initiatives: \n As in FY 2016, FY 2017 results showed provider record reviews (PRR) and support coordinator record reviews were the lowest scoring components, particularly for providers offering prevocational, community access (group) and community living supports. \n Documentation was less evident in some areas of rights, responsibilities, and restrictive interventions, as well as informed choice and aspects of abuse, neglect, and exploitation. \n Staff record reviews indicated some key trainings were often missing, including how to work with individuals with co-occurring diagnoses, suicide prevention, and use of the Georgia Crisis System. \nData from the FY 2017 PCRs compared to FY 2016, showed a decrease across the tools and all the FOAs, particularly in the areas of choice, whole health, and community life. Some key findings indicate the following: \n Individuals are often not receiving education on their medications, including what they are taking, why, and what the side effects are. \n Informed consent forms for psychotropic medications were often not in the record or not signed by the individual or guardian. \n For most ISPs, the goals were not person-centered, and most records reviewed for staff indicated a lack of training on person-centered values, principles, and approaches. \n Support coordinator and provider record review evidence indicated individuals are often not offered opportunities to seek and find competitive employment, have new community experiences, or participate in community activities. \nIt has been a goal for DBHDD and the Collaborative to integrate the processes used to measure quality for behavioral health and intellectual and developmental disability services. However, evaluating side-by-side comparisons are difficult due to different types of sampling methods, as well \n10 \n \n Quality Management Annual Report FY 2017 \nas differences in the tools and standards used. Quality Management will review the behavioral health and intellectual and developmental disability record review tools to identify modifications which may enhance the ability to make these types of comparisons for the upcoming fiscal year. These and other results are discussed in more detail throughout the report, including a summary of review processes and sampling procedures. The report also includes a summary of findings for each FOA; in-depth analysis of data from the behavioral health, intellectual and developmental disability, and CSU quality reviews; as well as tabular and graphic displays of findings. Throughout the report, abbreviations and acronyms are employed for brevity and efficiency, which can be found in Appendix A. \n11 \n \n Section 2: Introduction \n \nQuality Management Annual Report FY 2017 \n \nThe Georgia Collaborative Administrative Services Organization (the Collaborative), contracted by Georgia's Department of Behavioral Health and Developmental Disabilities (DBHDD), employed 30 quality assessors to complete more than 1,400 quality reviews in the past two fiscal years across Georgia. The mission of Quality Management within the Collaborative is to provide DBHDD with valuable and useful data for use in making decisions about the quality of services, funding, development of programs, etc. \nThis annual report is the result of assessing, gathering, compiling, analyzing, and measuring the quality of the service delivery system. This is accomplished through assessment of and technical assistance to DBHDD's behavioral health, and intellectual and developmental disability providers. Further, this report contains suggestions for modifications in some processes, methods, approaches, and tools to measure the quality and impact of services. Quality Management recognizes that even quality assessment or measurement processes must have a quality review periodically to examine and determine if we are measuring the things most important and meaningful to individuals receiving services within the Georgia. \nThis second annual report includes behavioral health, intellectual and developmental disability, and crisis stabilization unit (CSU) findings as well as a detailed explanation of the review processes, analysis and comparisons of network performance across providers, and information from individuals about the services received. The difference in proportions test was used to test statistical significance, where applicable.1 Indicators with an N of 90 or greater and a pass/fail rate of more than 10 percent were analyzed to determine change over time, from Year 1 to Year 2. Areas highlighted in blue within tables reflect significance at a p \u003c .05 level unless stated otherwise. While there are similarities in the review processes, there are also distinct differences; \n \n1 Bohrnstedt, George W. \u0026 Knoke, David. (1988). Statistics for Social Data Analysis, 2nd Edition. Itasca, Illinois. F.E. Peacock Publishers, Inc., pgs. 198-200. \n12 \n \n Quality Management Annual Report FY 2017 \ntherefore, behavioral health, intellectual and developmental disability, and CSU results are reported separately. The report is divided into the following sections: \n Behavioral Health Quality Reviews o Assertive Community Treatment \n Crisis Stabilization Unit Quality Reviews  Intellectual and Developmental Disabilities \no Person-Centered Reviews o Quality Enhancement Provider Reviews o Quality Technical Assistance Consultations  Behavioral Health, and Intellectual and Developmental Disability Discussion  Provider and Individual Satisfaction Surveys  Two appendices are attached o Acronym and abbreviation list o Distribution for all tools used in the BHQR, CSUQR, and intellectual and \ndevelopmental disability reviews  Summaries of findings and recommendations after each section to address areas needing \nimprovement or training/education programs to help improve the quality of services provided to Georgians \n13 \n \n Quality Management Annual Report FY 2017 \nSection 3: Behavioral Health Quality Reviews \nBackground \nThe purpose of the Behavioral Health Quality Review (BHQR) is to determine adherence to DBHDD's standards and to assess the quality of the service delivery system through individual record and claims reviews.2 Review questions are based on DBHDD and Medicaid requirements, and are organized into four categories: billing validation, assessment and treatment planning, compliance with service guidelines, and focused outcome areas (FOA). The score for each category represents the percent of relevant questions met or present. The BHQR overall score is calculated by averaging the scores for the four categories.3 Each category accounts for 25 percent of the overall rating. Minor alterations were made before the FY 2017 review process began to adhere to DBHDD and Medicaid requirement changes, as well as language changes specific to tools, providing more clarity for assessors and providers. \nWhen a BHQR is completed, the lead assessor convenes the team to analyze data gathered and plan the exit conference. The exit conference is designed to give immediate, preliminary feedback of the BHQR findings to the provider. A report of these preliminary findings is left with the provider in the form of an exit conference report which outlines the provider's identified strengths and any areas of concern from the four primary categories of the review. Assessors also include any items of concern falling outside the parameters of the review determined to be an area of concern or risk. Technical assistance is also provided during the review and exit conference. \nWithin 30 days of completion of a BHQR, a final assessment report is posted on the Collaborative's website, and the provider is notified via electronic mail of the posting along with the final scores identified as the Final Assessment. Like the exit conference report, the final assessment identifies \n2 Please refer to the following link to access a full description of the review process and review tools. http://georgiacollaborative.com/providers/prv-BH.html 3 The FOA subcategories are individually scored and are not averaged for the final overall FOA result at the review level. The final overall FOA result is calculated by adding all \"yes\" or \"present\" responses of all FOA subcategories and dividing by the total \"yes\" or \"no\" responses of the combined subcategories for each review. \n14 \n \n Quality Management Annual Report FY 2017 \nstrengths and opportunities for growth in the four categories of FOA, billing score, compliance with service guidelines, and assessment and treatment planning. The final assessment includes recommendations for improvement and comparisons to a statewide average based on the previous year's results. The final assessment also highlights any areas of concern or risk which fall outside the scope or scoring of the BHQR in the form of additional comments on practices. \nProviders are offered an opportunity to appeal the BHQR findings. Appeal information, including timeframes for submission, is provided upon notification of the completion of the final report. \nSampling Method \nIndividual Records and Billing Review \nDuring the 2017 fiscal year, 132 providers were eligible for review, as determined by DBHDD, and received a BHQR. This is a reduction from the implementation year (FY 2016) of 141 providers. Reduction in providers may have been a direct result of site closure, inability to provide previously reviewed services, limited claims or services to individuals resulting in a lack of adequate volume for review, or at the request of DBHDD to forgo review during the year. A sample of individuals was selected for each of the record reviews, and a sample of those individuals' claims was used for the billing review. To be eligible for the sample, each individual selected must have had at least three claims in the three months (or longer, if necessary) preceding the BHQR. \nTo select the individuals as part of the record review, an unduplicated list of individuals receiving services from the provider was stratified by service and payer source: Medicaid, state-contract, and fee-for-service claims submissions. The sample was selected proportionate to the providers' ratio of individuals served by payer source, ensuring each service was represented. The sample for each provider consisted of up to 30 individuals for non-intensive and specialty service providers, with an additional 15 individuals for providers who also offered assertive community treatment (ACT). The number of individuals selected per provider was based on the number of individuals served. For FY 2017, 3,816 individuals were sampled for record reviews, averaging approximately 29 records per \n15 \n \n Quality Management Annual Report FY 2017 \nprovider. However, it is important to note 35 providers had a second review during the year. (See Reassessment \"Frequency\" Reviews section for more information.) \nFor each chosen individual record, a random sample of up to 10 paid claims was selected for a billing review. The number of claims reviewed per individual was based on services provided and claim submissions. The total number of claims reviewed for FY 2017 was 29,602, compared to 31,213 in FY 2016. When providers did not have adequate claims submissions in the three months preceding their review, claims selection timeframe was extended, but did not precede the provider's previous BHQR (when applicable). \nBHQR billing validation and claim(s) review focused on specific services. The services included are listed in Table 4 beginning on page 26 of this report. All eligible providers were reviewed at least once during the fiscal year and had at least one claim per billed service included in the claims review, ensuring the complete array of services provided and charged were included in the BHQR. \nIndividual and Staff Interviews \nSamples used for the individual and staff interviews were selected by the provider and quality assessors conducting the BHQR; services received or provided were not considered in the selection of interviewees. Interview sampling methods remained the same for FY 2017 as in FY 2016. Quality assessors attempted to complete a minimum of five Individual and five staff interviews per BHQR; however, the actual number interviewed fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of employees, and the number of individuals the provider served at the time of the review. If an individual or staff declined an interview, assessors selected an additional individual or staff to be interviewed, when possible. \nIn FY 2017, a total of 735 individual interviews (II) and 774 staff interviews (SI) were completed. This does not include interviews conducted during the crisis stabilization unit (CSU) reviews, which are discussed later in the report. There was an increase in staff interview participation by an additional 21 interviews from the previous year. Results from interviews conducted are not included in the BHQR overall scores yet were used to obtain valuable, qualitative feedback to \n16 \n \n Quality Management Annual Report FY 2017 \npromote quality improvement activities. The results of interviews were shared with providers and DBHDD to provide direct communication and perception of individuals receiving services and staff providing services as an additional quality initiative. \nBHQR Overall Review Scores \nIn this report, data are aggregated and presented by overall provider scores as well as by category (billing validation, assessment and treatment planning, compliance with service guidelines, focused outcome areas [FOA], and individual and staff interview). The four main categories, billing validation, assessment and treatment planning, compliance with service guidelines and focused outcome areas each account for 25 percent of the overall score. Each FOA (choice, person-centered practices, whole health, safety, rights, and community life) also has scores displayed and discussed.4 \nBHQR Overall Scores \nFigure 1 shows the average overall score for the 167 reviews and the scores for each category compared to the 141 reviews conducted in FY 2016 graphically presented to demonstrate areas of improvement and decline. The mean of overall scores for FY 2017 was 84 percent. This is the same result obtained from the providers reviewed in FY 2016. Both categories of billing validation (84 percent) and FOAs (89 percent) increased from the FY 2016 rates of 81 percent and 85 percent, respectively. Compliance with service guidelines, the highest category in FY 2016 at 90 percent decreased in FY 2017 by two points to 88 percent. The lowest scoring category remained assessment and treatment planning, previously averaging 79 percent in FY 2016, now 77 percent for FY 2017. Although change was identified across the differing tools and scores from year-to- \n4 Unless otherwise stated, category scores were determined by dividing all the \"Yes\" answers by the sum of the \"Yes\" and \"No\" answers in the category. Questions scored as \"Not Applicable\" (N/A) were not factored into the overall category score. Each subcategory's score was determined similarly based on the questions contained within each subcategory. \n17 \n \n Quality Management Annual Report FY 2017 \nyear, no statistical significance was found at p \u003c .05. The following are highlights of the overall category scores: \n The lower assessment and treatment planning scores driven by the lack of documentation supporting the incorporation of whole health and wellness goals and objectives into individual resiliency plans (IRPs) in FY 2016, appear to be mainly driven by lack of addressing co-occurring health conditions, discharge-planning criteria, and addressing all assessed needs in FY 2017. \n Billing validation indicates approximately 16 percent of claims reviewed were unjustified and subject to recoupment; this result declined positively from the previous year result of 24 percent of unjustified claims (See Figure 2). \n The FOAs in FY 2016 represented a new area of review for which providers had not previously been assessed; however, it is possible through education in this area or inclusion of such measurement that providers' scores increased from 85 percent in the initial year to 89 percent in FY 2017. \n18 \n \n 84% 81% \n \nQuality Management Annual Report FY 2017 \n \nFigure 1. Fiscal Year Results by Category \n \n79% 77% \n \n90% 88% \n \n89% 85% \n \n84% 84% \n \nBilling \n \nAssessment \u0026 Service Guidelines Focused Outcome \n \nTreatment \n \nAreas \n \nPlanning \n \nFY 2016 (N = 141) FY 2017 (N = 167) \n \nOverall \n \nBHQR Overall Scores by Tier \nIn July 2014, DBHDD implemented a community behavioral health provider network structure in which providers were classified using a four-tiered structure.5 Tiers are defined as follows: \n Tier 1: Comprehensive Community Providers  Tier 2: Community Medicaid Providers  Tier 2+: Community State Funded Providers  Tier 3: Specialty Providers \nTable 1 provides a snapshot of the BHQR overall scores by tier and a distribution of scores by each Tier is provided in Appendix B. Consistent with results of FY 2016, scores remained generally lower for Tier 3 providers (83 percent) compared to the other tiers. However, there was an improvement from the previous year, having increased from 78 percent to 83 percent. Both Tier 1- and Tier 2- \n \n5 Policies regarding the implementation and definition of the DBHDD Community Behavioral Health Provider Network Structure can be found at DBHDD's PolicyStat website: https://gadbhdd.policystat.com \n19 \n \n Quality Management Annual Report FY 2017 \n \nlevel providers exceeded the overall average of 84 percent. Providers identified as a Tier 2+ \n \nprovider (N = 6) had the highest score which resulted in a mean of 86 percent for FY 2017 \n \n(compared to other tiers) yet decreased by two points from FY 2016. \n \nFiscal Year FY 2016 FY 2017 \n \nTable 1. BHQR Overall Scores by Tier \n \nTier 1 Tier 2 Tier 2+ Tier 3 \n \n86% (N = 24) \n85% (N = 28) \n \n85% (N = 90) \n85% (N = 115) \n \n88% (N = 5) 86% (N = 6) \n \n78% (N = 22) \n83% (N = 18) \n \nOverall Average \n84% (N = 141) \n84% (N = 167) \n \nTier 2 providers make up the largest number of providers in Georgia. Tier 2 providers may serve adults, children and adolescents, or both, but must have the capacity to provide the entire array of non-intensive services including both mental health and substance use disorder services. Tier 2 providers may also provide specialty services but are not required to do so. Tier 2+ providers (N = 6) scored (on average) 86 percent. \nTier 3 providers demonstrated the most improvement through an increased mean value of 78 percent in FY 2016 to 83 percent in FY 2017. Additionally, there was a five-percentage point increase in the mean comparing both Tier 1 and Tier 2 providers. Both Tier 1 and Tier 2 providers demonstrated consistent overall scores compared with previous year's results. See distributions for tier level Providers in Appendix B. \nBHQR Billing Validation \nThe billing validation score for each BHQR is the percent of justified billed dollars divided by the total paid/charged dollars for the reviewed claims. Billing scores are then averaged across the network of annual reviews to obtain an annual statewide average. Nine providers scored 100 percent, with an additional nine providers scoring 99 percent. See the distribution of BHQR billing scores in Appendix B. In FY 2016, three providers scored zero percent in billing validation, compared to zero providers for the fiscal year 2017. With the second year of reviews \n20 \n \n Quality Management Annual Report FY 2017 \ndemonstrating increases in billing category results, FY 2018 thresholds for reassessment are increasing from 69 percent to 79 percent. See Reassessment \"Frequency\" Review section on page 57 for more details. Figure 26 shows the total dollar amount reviewed through paid claims analysis across all providers during the BHQRs for FY 2017 ($2,934,560.52) as compared to FY 2016 ($3,417,902.28) and the dollar amount found to be unjustified, $463,049.93 and $807,050.16, respectively. In FY 2017, fewer funds being reviewed may be due to several factors including but not limited to provider sample size, limited claims availability, as well as the types of service being reviewed. Although the total funds reviewed for FY 2017 were fewer than reviewed in FY 2016, the amounts unjustified and susceptible for recoupment decreased from 24 percent to 16 percent. FY 2017 yielded a report of an additional $2,076,429.98 in Medicaid funds compared to the FY 2017 state funding amount of $429,065.27. In evaluating Medicaid versus state funded claims/encounters, 84 percent of claims reviewed were found justified compared to 85 percent of state-funded claims/encounters. \n6 The percent of justified versus unjustified dollar amounts, as depicted within Figure on the next page, are calculations of total dollar amounts reviewed divided by the total number of unjustified/justified dollar amounts. The annual statewide average billing score is not equivocal to the percent justified/unjustified depicted in the figure; statewide billing score is calculated based on averaging the individual provider scores at year-end. Statewide averages specific to the billing score are impacted by providers who receive an extreme low score and who may have greater amounts of dollars reviewed in proportion to the network total. \n21 \n \n Quality Management Annual Report FY 2017 \n \nFigure 2. BHQR Billing Validation Amount Reviewed by by Fiscal Year \n \n$3,500,000.00 $3,000,000.00 $2,500,000.00 \n \n$807,050.16 24% \n \nTotal Amount Reviewed in FY 2016: $3,417,902.28 Total Amount Reviewed in FY 2017: $2,934,560.52 \n$463,049.93 16% \n \n$2,000,000.00 \n \n$1,500,000.00 $1,000,000.00 \n \n$2,610,852.12 76% \n \n$2,471,510.59 84% \n \n$500,000.00 \n \n$0.00 \n \nFY 2016 Total $ Justified \n \nFY 2017 Total $ Unjustified \n \nWhen a claim was found to be unjustified, assessors selected all applicable reasons a reviewed claim was identified as a discrepancy; therefore, one claim may have multiple discrepancy reasons identified. The most prevalent billing discrepancy reasons identified in FY 2017 remained consistent with those identified in FY 2016 and are as follows: \n Missing/incomplete service orders  Individuals not meeting admission criteria  Missing progress notes \nHowever, both service orders and individuals meeting criteria improved for the 2017 fiscal year with an evident reduction of 1,500 documented occurrences combined (see Table 2). Furthermore, in FY 2016, discrepancy results indicated there were 779 missing progress notes (2.5 percent) of the 31,213 claims reviewed. This improved slightly by nearly 9 percent in FY 2017 with 676 missing \n22 \n \n Quality Management Annual Report FY 2017 \nnotes or documentation, or 2.3 percent of claims reviewed. Although improvement has been made for these areas outlined above, they continue to remain the top reasons related to billing discrepancies. Most improved discrepancy reasons from FY 2016 to FY 2017 are printed name missing (92.6 percent improvement) and the record not submitted within required timeframe (90.5 percent fewer instances). Content not being unique to the individual was identified 137 times in FY 2016 compared to 243 in FY 2017, thereby demonstrating a decline by nearly 87 percent. Table 2 provides the number of occurrences for the remainder of all monitored discrepancy reasons and the change in percent from FY 2016 to FY 2017. A negative difference demonstrates improvement from year to year. Overall, 20 (87 percent) of the observed indicators (N = 23) resulted in improvement from the baseline year to FY 2017. \n \nTable 2. BHQR Billing Discrepancy Reasons \n \nFY 2016 \n \nFY 2017 \n \nOccurrences Occurrences \n \nClaims Reviewed \n \n31,213 \n \n29, 602 \n \nMissing/incomplete order \n \n1,612 (5.2%) 496 (1.7%) \n \nDoes not meet admission criteria \n \n1,132 (3.6%) 748 (2.5%) \n \nQuantitative \n \nOccurrences Occurrences \n \nProgress note is missing \n \n779 (2.5%) 676 (2.28%) \n \nCode is missing/different than code billed \n \n381 (1.2%) 379 (1.28%) \n \nStaff credential missing \n \n299 (1.0%) 295 (1.00%) \n \nSignature missing \n \n275 (0.9%) 194 (0.66%) \n \nDate of entry missing \n \n263 (0.8%) 185 (0.62%) \n \nUnits billed exceed time / units documented 236 (0.8%) 112 (0.38%) \n \nConsistency requirements missing \n \n202 (0.7%) 57 (0.19%) \n \nCredential not supported by documentation 199 (0.6%) 276 (0.93%) \n \nRecord not submitted within timeframe \n \n197 (0.6%) 19 (0.06%) \n \nLocation missing (out-of-clinic) \n \n166 (0.5%) 155 (0.52%) \n \nTime in / time out missing \n \n92 (0.3%) \n \n68 (0.23%) \n \nDate of service incorrect / missing \n \n63 (0.2%) \n \n42 (0.14%) \n \nPrinted name missing \n \n42 (0.1%) \n \n2 (0.01%) \n \nValue Change 1,611 -1,116 \n-384 Change \n-103 -2 -4 -81 -78 \n-124 -145 +77 -178 -11 -24 -21 -40 \n \nPercent Change -5.16% -67.5% -30.2% Change -8.6% \n4.9% 4.4% -25.1% -26.4% -49.7% -70.6% 45.9% -90.5% -2.2% -22.0% -30.6% -92.6% \n \n23 \n \n Quality Management Annual Report FY 2017 \n \nTable 2. BHQR Billing Discrepancy Reasons \n \nPerformance Standards \n \nFY 2016 Occurrences \nFY 2016 Occurrences \n \nFY 2017 Occurrences \nFY 2017 Occurrences \n \nValue Change Value Change \n \nContent does not match service definition \n \n735 (2.4%) 489 (1.7%) -246 \n \nContent does not support code billed \n \n428 (1.4%) 397 (1.3%) \n \n-31 \n \nContent does not support units billed \n \n375 (1.2%) 518 (1.8%) +143 \n \nIntervention outside staff's scope/practice \n \n348 (1.1%) \n \n79 (0.3%) \n \n-269 \n \nContent is not unique to the individual \n \n137 (0.4%) 243 (0.8%) +106 \n \nMultiple services billed at the same time \n \n95 (0.3%) \n \n79 (0.3%) \n \n-16 \n \nNon-billable activity Diversionary activities billed \n \n76 (0.2%) \n \n59 (0.2%) \n \n-17 \n \n48 (0.2%) \n \n26 (0.1%) \n \n-22 \n \nAreas highlighted in blue within table reflect significance at a p \u003c .05 and indicators with n \u003c 90 were not tested due to low volume. \n \nPercent Change Percent Change -29.9% -2.3% 45.7% -75.8% 86.8% -11.3% -17.9% -41.5% \n \nBHQR Assessment and Treatment Planning \n \nAssessment and treatment planning consisted of nine questions answered once per record \n \nreviewed in FY 2016. A change to the question of medical screening occurred for FY 2017 which \n \nwas previously combined to include measurement of both a behavioral health assessment and \n \nmedical screening to render satisfactory credit, thereby totaling 10 questions for FY 2017. The \n \nquestions and percent \"yes\" on each are presented in Table 3 below, and areas for growth include \n \nthe following: \n \n The lowest-scoring question in FY 2016 indicated co-occurring health conditions were often \n \nnot included in individuals' plans of care (64 percent) without any explanation. Similar \n \nresults were found for FY 2017 review for which 34 percent indicated co-occurring \n \nconditions were addressed in the Individual Recovery / Resiliency Plan (IRP). \n \n Where other needs were identified (housing, employment, childcare, higher education, \n \netc.), they were only addressed in 59 percent of the plans in FY 2016 and 48 percent in FY \n \n2017. \n \n24 \n \n Quality Management Annual Report FY 2017 \n \n Fifty-nine percent of individuals had goals, objectives, or interventions in their plans to address wellness in the baseline review year, compare to 56 percent in FY 2017. This area remains an area needing improvement. \n \nTable 3. BHQR Assessment \u0026 Treatment Planning Question Scores by Year \n \nQuestion \n \nFY 2016 FY 2017 \n \nAll assessed needs are addressed \n \n59% \n \n48% \n \nCo-occurring health conditions addressed in IRP \n \n36% \n \n34% \n \nCurrent behavioral health assessment \n \nN/A \n \n94% \n \nCurrent medical screening is present \n \n98% \n \n97% \n \nDischarge plan defines criteria \n \n80% \n \n58% \n \nGoals/objectives honor hopes, choice, preferences, outcomes \n \n91% \n \n89% \n \nIndividual meets admission criteria \n \n95% \n \n97% \n \nInterventions/objectives are goal-linked \u0026 service-consistent \n \n96% \n \n91% \n \nIRP is individualized in personalized language \n \n78% \n \n87% \n \nWhole health \u0026 wellness in IRP \n \n59% \n \n56% \n \nAssessment \u0026 Treatment Planning Result \n \n79% \n \n77% \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nComparisons in scoring reflect a deficit in a comprehensive, whole-person, whole-health approach \n \nto recovery/resiliency planning with a number of the providers in the network. The average score \n \nfor this category was 77 percent, compared to the FY 2016 score of 79 percent. Individualized \n \nlanguage, an area identified for needed growth in FY 2016 (78 percent), increased significantly by \n \nnearly 10 points in FY 2017 (87 percent); however, discharge planning previously held a moderate \n \nscore of 80 percent in FY 2016, which declined significantly in FY 2017 to 58 percent. This decrease \n \nis likely due to a change in the question scoring criteria in FY 2017 to include specific clinical \n \nbenchmarks related to discharge criteria. \n \nData suggest that although records contained required assessment documentation, such as present medical screenings and current behavioral health assessments, a vast percent of recovery/resiliency plans lacked relevant goals, objectives, and interventions to address individuals' assessed needs (to include co-occurring health conditions) or plan for increased whole health and wellness. \n \n25 \n \n Quality Management Annual Report FY 2017 \n \nBHQR Compliance with Service Guidelines \nThe compliance with service guideline questions were answered once per individual record reviewed. The number of questions answered varied, specific to the service scored. The service reviewed for each individual record was based on which services were reflected in the billing claims sample for the respective individual; therefore, multiple services could be reviewed within one individual's record. A total of 18 services were reviewed across all providers in FY 2016, as shown in Table 4. An additional service, opioid maintenance therapy, had three reviews conducted and 69 records reviewed for FY 2017 and is not included in the table below. The (r) size in Table 4 represents the number of providers assessed for the service, based on the claims sample. Additionally, the total number of records reviewed for each service was included immediately following as the (n) size. For FY 2016, each provider was reviewed only once; thus, the n also represents the number of providers reviewed for each service. However, in FY 2017 multiple providers were reviewed more than once; thus, the n representation has been changed to reflect the number of reviews, which allows for consistency in the year-to-year comparisons. \n \nTable 4. BHQR Service Guidelines Scores by Service Type* \n \nService Type MH Peer Support  Individual Addictive Disease Support Services Assertive Community Treatment \n \nFY 2016 87% (r = 7, n = 119) 81% (r = 48, n = 224) 85% (r = 19, n = 314) \n \nFY 2017 95% (r = 6, n = 38) 85% (r = 69, n = 293) 88% (r = 20, n = 334) \n \nValue Percent Change Change \n+8 8.4% \n+4 4.7% \n+3 3.4% \n \nPsychosocial Rehabilitation Program \n \n90% (r = 75, n = 196) 93% (r = 26, n = 179) \n \n+3 3.2% \n \nIntensive Case Management Intensive Family Intervention \n \n95% (r = 10, n = 55) 86% (r = 30, n = 221) \n \n94% (r = 13, n = 53) 85% (r = 37, n = 185) \n \n-1 -1.2% -1 -1.2% \n \nCase Management \n \n85% (r = 69, n = 557) 84% (r = 90, n = 746) \n \n-1 -1.2% \n \nCommunity Support Community Support Team \n \n84% (r = 84, n = 484) 94% (r = 7, n = 23) \n \n83% (r = 114, n = 637) 92% (r = 7, n = 13) \n \n-1 -1.1% -2 -2.2% \n \n26 \n \n Quality Management Annual Report FY 2017 \n \nTable 4. BHQR Service Guidelines Scores by Service Type* \n \nService Type Individual Counseling \n \nFY 2016 \n \nFY 2017 \n \nValue Percent Change Change \n \n97% (r = 113, n = 1288) 94% (r = 150, n = 1980) -3 -3.3% \n \nPsychiatric Treatment Group Counseling/ Training \n \n95% (r = 91, n = 843) 93% (r = 63, n = 418) \n \n92% (r = 128, n = 950) 90% (r = 78, n = 561) \n \n-3 -3.3% -3 -3.2% \n \nFamily Counseling/ Training \n \n96% (r = 92, n = 667) 91% (r = 129, n = 904) \n \n-5 -6.2% \n \nNursing Assessment \u0026 Health MH Peer Support Program \n \n85% (r = 85, n = 884) 95% (r = 33, n = 234) \n \n80% (r = 116, n = 897) 86% (r = 25, n = 199) \n \n-5 -5.5% -9 -10.5% \n \nPsychosocial Rehabilitation - Individual 96% (r = 25, n = 649) 85% (r = 98, n = 904) \n \n-11 -12.9% \n \nPeer Support Whole Health \u0026 Wellness 92% (r = 7, n = 45) \n \n68% (r = 12, n = 67) \n \n-24 -35.3% \n \nAD Peer Support - Individual \n \n79% (r = 1, n = 7) \n \n48% (r = 1, n = 13) \n \n-31 -64.6% \n \n*\"r\" size represents number of reviews conducted for each service and the \"n\" size equates to the maximum number of records reviewed per service. Areas highlighted in blue within table reflect significance at a p \u003c .05. AD Peer Support  Individual service was not statistically tested due to low response volume. \n \nCompliance with service guidelines was the highest-scoring category of the four in FY 2016, with an average of 90 percent but has since declined to an average of 88 percent. Moreover, nearly twothirds (64.1 percent) of reviews resulted in a score above the 88 percent average in this category, with more than half (56.9 percent) exceeding 90 percent. \nMH Peer Support  Individual, Addictive Disease Support Services, Assertive Community Treatment, and Psychosocial Rehabilitation Program were the top-scoring services demonstrating the highest rate of compliance for FY 2017 compared to all services measured. The most-improved services are outlined in Table 5 and have listed the lowest scoring questions per service, which provide for areas of continued growth and opportunity. \n \n27 \n \n Quality Management Annual Report FY 2017 \n \nTable 5. FY 2017 Service Type Improvement Results \n \nService type and FY 2017 lowest scoring question(s): \n \nFY 2016 \n \nFY 2017 \n \nMental Health Peer Support  Individual \n \n87% (r = 7) \n \n95% (r = 6) \n \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives in the treatment plan. \nSubstance Use Disorder Support Services \n \n77% (n = 119) 71% (n = 199) 81% (r = 48) 85% (r = 69) \n \nCoordination with family and significant others is documented and with [adult] individual's permission. Answer \"yes\" if attempt is made, but permission is not given. (Review authorization period.) \nContact must be made with the individual receiving services at a minimum of twice each month, at least one face-to-face. (Review authorization period.) \nAssertive Community Treatment \n \n43% (n = 224) \n64% (n = 224) 85% (r = 19) \n \n44% (n = 292) \n66% (n = 292) 88% (r = 20) \n \nThere is evidence that the ACT Team is working with informal support systems/collateral contacts at least 2-to-4 times per month with or without the individual present (must be documented) to provide support and skills training to assist the individual in his/her recovery. (Review authorization period.) \nPsychosocial Rehabilitation Program \n \n43% (n = 314) 39% (n = 335) 90% (r = 75) 93% (r = 26) \n \nWeekly progress notes must document the individual's progress relative to functioning and skills related to the person-centered \n \n84% (n = 195) 62% (n = 178) \n \ngoals identified in his/her Individual Recovery Plan (IRP). \n \n*\"r\" size represents number of reviews conducted for each service and the \"n\" size equates to the maximum number of records reviewed per service. Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nData also suggests areas of improvement clearly identified through various service types which decreased from FY 2016 to FY 2017. These results are detailed in Table 6, with further analysis and trending included at the question level supplemented by the lowest scoring questions provided by service. \n \n28 \n \n Quality Management Annual Report FY 2017 \n \nTable 6. FY 2017 Service Type Declined Results* \n \nService type and FY 2017 lowest scoring question(s): \n \nFY 2016 \n \nFY 2017 \n \nAddictive Disease (AD) Peer Support - Individual \n \n79% (r = 1) \n \n48% (r = 1) \n \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan. The staff interventions reflected in the progress notes are related to the staff interventions listed on the treatment plan. \nDocumentation supports that the individual has identified his/her own individual goals for recovery. \nPeer Support Whole Health and Wellness \n \n100% (n = 7) \n100% (n = 7) 100% (n = 7) 92% (r = 7) \n \n7% (n = 13) \n7% (n = 13) 15% (n = 13) 68% (r = 12) \n \nCollaboration with other health care providers to ensure that individual has access to needed services is documented. (One time per authorization.) \nThere is a minimum of one contact weekly either face-to-face or by phone. (Review authorization period.) \nThere is evidence in the documentation of an annual physical or at a minimum a discussion and encouragement to have an annual physical exam. \nPsychosocial Rehabilitation - Individual \n \n77% (n = 44) 89% (n = 45) 91% (n = 44) 96% (r = 25) \n \n50% (n = 67) 66% (n = 67) 68% (n = 66) 85% (r = 98) \n \nThere is a minimum of two contacts each month, and one is face-to-face. (Review authorization period.) \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan. \nMental Health Peer Support Program \n \n74% (n = 638) 92% (n = 649) 95% (r = 33) \n \n66% (n = 903) 79% (n = 902) 86% (r = 25) \n \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the \n \n88% (n = 234) \n \n71% (n = 199) \n \ntreatment plan. \n \n*\"r\" size represents number of reviews conducted for each service and the \"n\" size equates to the maximum number of records reviewed per service. Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nThe lowest-scoring service remains substance use disorder peer support  individual, previously at 79 percent in FY 2016, which was 48 percent for FY 2017; however, only one provider was reviewed as providing this service (who had an overall compliance score of 55 percent) and differed from the \n \n29 \n \n Quality Management Annual Report FY 2017 \noriginal provider reviewed (overall compliance score of 90 percent) in FY 2016. Additionally, peer support whole health and wellness declined in scoring from FY 2016 (92 percent) to FY 2017 (68 percent) by approximately 24 percentage points with seven reviews conducted in FY 2016 and 12 reviews in FY 2017. The lower scores in both services were driven by a lack of documentation reflecting individuals' progress toward specific goals and objectives in their plans. Additionally, the individual identifying his/her own goals (substance use disorder peer support), collaboration with other care providers, and the requirement of staff making minimal monthly contacts (peer support whole health and wellness) were noted as areas for improvement. One of the 141 providers reviewed in FY 2016 was not reviewed for compliance with service guidelines because the provider only billed a service not reviewed programmatically at that time (opioid maintenance treatment). FY 2017 had two providers of the 167 reviews scored for service as it pertained to opioid maintenance therapy as, during that year, the service became a formally reviewed program. Results of the two providers' reviews, one of which was reviewed twice, scored 98 percent with all questions scoring at least 93 percent. \nBHQR Focused Outcome Areas (FOA) \nFocused outcome areas questions are answered once per record reviewed. Each FOA has a different number of questions for a total of 24 questions scored. Figure 3 provides the results of FY 2017 compared to FY 2016. \n30 \n \n Quality Management Annual Report FY 2017 \nFigure 3. BHQR Focused Outcome Areas Overall Scores by Year \n \n92% \n \n91% \n \n93% 87% \n \n80% 83% \n \n74% \n \n96% 92% \n \n90% 93% \n \n85% 89% \n \n63% \n \nWhole Health \n \nSafety \n \nPerson Community Centered Life Practices \n \nChoice \n \nRights FOA Score \n \nFY 2016 (N= 141) \n \nFY 2017 (N= 167) \n \nWhole Health questions address whether the records reviewed demonstrated individuals were treated holistically with their physical health needs being assessed, documented, and monitored. Of the six FOAs, Whole Health continues to remain the lowest scoring, yet results of FY 2017 reviews reflect the greatest amount of improvement statistically significant at p \u003c .05. Whole Health for FY 2017 was 74 percent compared to 63 percent in FY 2016. Medical conditions being assessed, monitored, and recorded continues to be the area most in need of improvement. \nSafety questions address whether providers were attending to certain risk factors for individuals, such as their tendency to experience a crisis, have suicidal or homicidal thoughts or actions, and whether individuals had received information and education about the risks and benefits of prescribed medications. This area represents the second-lowest-scoring area of the FOAs through both the initial fiscal year of implementation as well as the subsequent follow-up year but did demonstrate significant improvement in year-to-year result. For FY 2016, of the three questions in the subcategory, the one most often scored \"no\" indicated individuals (or their legal guardians) \n \n31 \n \n Quality Management Annual Report FY 2017 \noften had not signed medication consent forms along with the prescriber. This remained the case with slight improvement from FY 2016 (52 percent) to FY 2017 (58 percent). \nPerson Centered Practice questions are used to measure whether documentation shows that individuals have a voice and participate in creating their care plans and are active participants in modifying them as needed and desired. While the result of person-centered practices declined, it was not of a statistically significant value. All questions scored more than 80 percent for FY 2017, with one of the higher-scoring questions indicating that individuals were active participants in the planning and receiving of services (95 percent). \nCommunity Life questions address how individuals were engaged in their communities of choice and whether they held valued social roles. The five Community Life questions are used to measure whether individuals had been assessed for their need to make changes in their living, learning, working, and/or social environments (99 percent), and whether they had been assisted with establishing goals to address any needed changes (94 percent). Results in FY 2017 improved compared to results of FY 2016, with slight improvement in all questions noted and an overall significant six-point increase from FY 2016 (87 percent) to FY 2017 (93 percent). \nChoice questions address how, and if, individuals were provided with options of services and were encouraged to make educated choices concerning supports and services provided. When barriers to services were identified, assessors looked for documentation as to how the individual was engaged in addressing the identified barriers (96 percent). This is a four-point significant increase from the previous year and yielded the highest FOA score in FY 2017. \nRights questions address whether individuals had been apprised of their rights while in treatment, at the point of entry into the system and on an annual basis after, and whether they had been informed of their rights under Federal HIPAA laws. The increase in Rights from FY 2016 to FY 2017 was found to be significant at p \u003c .05. The area of greatest concern identified in FY 2016 (58 percent) continued to remain the highest concern based on data from FY 2017 (65 percent) and addresses whether individuals were apprised of their rights and responsibilities on an annual basis. \n32 \n \n Quality Management Annual Report FY 2017 \nThe remaining four questions exceeded 94 percent or above for FY 2017 and reference individuals' appraisal of rights at the onset of services, in a language understandable to the individual, as well as individuals having HIPAA Privacy rules and laws reviewed with evidenced signature by an individual (or legal guardian) of acknowledgement for being informed of rights. \nBHQR Staff and Individual Interviews \nThe interview questions were divided into the six FOAs. Individual interviews served to assess the individual's quality of life and the perception of care with the provider and services rendered. Staff interviews helped determine whether a person-centered approach was used in providing services and empowering individuals. \nData in Figure 4 represent the average scores for the individual FY 2017 interviews. Figure 5 represents average scores of staff interviews. It is notable all areas scored in the 90th percentile, demonstrating consistency in results from comparison to the FY 2016 results, which may indicate a high level of satisfaction for individuals who were served by the providers. Results of interviews identified the following: \n Personal outcome approaches being used in designing person-centered supports and services as well as the individual feeling free from abuse, neglect, and exploitation (99 percent). \n Staff interview responses seem consistent with individuals' general perception as it relates to person-centered and personalized approaches being used (98 percent). \nNotably, the most apparent discrepancy between individual and staff interviews for FY 2017 was specific to the question of health-related needs being addressed. Of the individuals interviewed, 89 percent felt their health needs were addressed while staff perceived this to be the case in 97 percent of the interviews conducted. Furthermore, although not directly assessed via the interview questions, provider staff frequently made statements which indicated a high level of employment satisfaction with the reviewed agency. \n33 \n \n Quality Management Annual Report FY 2017 \n \nFigure 4. BHQR Individual Interview Data by Fiscal Year \n \nWhole Health Safety PCP \nCommunity Life Choice Rights Overall \n \nFY 2016 (N = 737) \n \nFY 2017 (N = 735) \n \n92.7% 93.9% \n96.2% 96.3% 95.8% 96.7% 95.0% 94.7% 97.4% 97.8% 97.6% 97.8% 95.6% 96.2% \n \nFigure 5. BHQR Staff Interview Data by Fiscal Year \nWhole Health \n \nSafety \n \nPCP \n \nCommunity Life \n \nChoice \n \nRights \n \nOverall \n \nFY 2016 (N = 753) \n \nFY 2017 (N = 774) \n \n97.7% 97.4% \n97.7% 97.6% \n98.3% 97.9% \n99.2% 99.2% \n98.5% 98.5% \n99.4% 99.5% \n98.5% 98.1% \n \n34 \n \n Quality Management Annual Report FY 2017 \nAssertive Community Treatment \nQuality Management conducted reviews of 19 assertive community treatment (ACT) providers, embedded within the routine BHQR. For the 17 providers who offered both non-intensive outpatient and ACT, up to 15 additional individuals receiving ACT services were selected for review. The two providers who offered only ACT services had 30 individuals selected for each review. One ACT provider was reviewed for a second time within the fiscal year due to low initial scores in FY 2016. This provider had fifteen ACT records reviewed at each scheduled visit. Across all ACT providers, 332 records were reviewed to include up to 10 billing claims per record for a total of 3,221 claims. The data below represent findings from reviews of ACT services only. \nACT Quality Review Overall Scores \nFigure 6 provides ACT statewide averages by category for FY 2017. The overall score for FY 2017 resulted in 87 percent for all ACT reviews. \nFigure 6. FY 2017 ACT Statewide Averages \nBilling Validation FY16: 92% FY17: 90% \n \nService Guidelines \nFY16: 85% FY17: 88% \n \nOverall Score \nFY16: 88% (N = 19) FY17: 87% (N = 20) \n \nFocused Outcome \nAreas \nFY16: 88% FY17: 90% \n \nAssess. \u0026 Planning \nFY16: 85% FY17: 80% \n \n35 \n \n Quality Management Annual Report FY 2017 \nFigure 77 provides the result by category for BHQR reviews and ACT reviews for both FY 2016 and FY 2017. It should be noted three of the four category scores for ACT reviews, except for service guidelines (which equates to the same BHQR result of 88 percent), are higher than the corresponding BHQR scores for non-intensive services for FY 2017. Additionally, in FY 2017 and consistent with BHQR scores, assessment \u0026 treatment planning demonstrated a decrease while FOA scores demonstrated an increase. \n \nFigure 7. ACT and BHQR Category Score Results by Fiscal Year \n \n95% \n \n90% \n \n85% \n \n80% \n \n75% \n \n70% \n \n65% \nFY 2016 ACT Averages FY 2017 ACT Averages FY 2016 BHQR Averages FY 2017 BHQR Averages \n \nBilling \n92% 90% 81% 84% \n \nFOA \n \nAssessment/ Service Planning Guidelines \n \nOverall \n \n88% \n \n85% \n \n85% \n \n88% \n \n90% \n \n80% \n \n88% \n \n87% \n \n85% \n \n79% \n \n90% \n \n84% \n \n89% \n \n77% \n \n88% \n \n84% \n \nACT Billing Validation \nFigure 8 shows the total dollar amount reviewed through claims analysis during the ACT-specific BHQRs for FY 2016 compared to FY 2017, $306,628.32 and $340,428.04 respectively. The total dollar amount found to be unjustified in FY 2017 was two percentage points higher resulting in 9.6 percent of funds being unjustified ($32,707.80) when compared to the previous year. \n \n7 ACT review scores are inclusive of the BHQR review. For the purpose of evaluation, monitoring and analysis providers servicing specific to ACT services are reviewed individually as a group within this section. \n36 \n \n $350,000.00 $300,000.00 $250,000.00 $200,000.00 $150,000.00 $100,000.00 \n$50,000.00 $0.00 \n \nQuality Management Annual Report FY 2017 \nFigure 8. ACT Billing Validation by Fiscal Year \nTotal Amount Reviewed in FY 2016: $306,628.32 Total Amount Reviewed in FY 2017: $340,428.04 \n \n$23,422.57 8% \n \n$32,707.80 10% \n \n$283,205.75 92% \n \n$307,720.24 90% \n \nFY 2016 Total Amount Justified \n \nFY 2017 Total Amount Unjustified \n \nInformation in Table 7 indicates the specific billing discrepancy reasons found during the ACT reviews with 3,221 claims reviewed for FY 2017 compared to 3,029 claims reviewed in FY 2016. Assessors select each reason a reviewed claim was identified as a discrepancy; therefore, one claim may have multiple discrepancy reasons identified. The most prevalent billing discrepancy reasons for FY 2017 were the same as FY 2016 and were as follows: \n Staff credential missing  Location was missing out-of-network claims  Content did not support units billed \nThere was demonstrated improvement noted for both missing staff credential (previously 69 instances [2.3 percent] for FY 2016 versus 40 instances [1.2 percent] in FY 2017), as well as missing location for out-of-clinic claims (39 instances [1.3 percent] for FY 2016 compared to 32 instances [1.0 percent] in FY 2017). However, the billing discrepancy \"content did not support the units \n37 \n \n Quality Management Annual Report FY 2017 \nbilled\" nearly doubled from 27 instances (0.9 percent) in FY 2016 to 51 in FY 2017 (1.6 percent). Furthermore, while an additional 192 claims were reviewed for FY 2017 than in FY 2016, instances of discrepancy identification also increased overall from 8.3 percent to 9.9 percent. Several discrepancy reasons increased, negatively, from the previous year and are identified as follows: \n Signature missing  Date of entry missing  Date of service incorrect  Missing/incomplete service order  Time in/time out missing \n \nTable 7. ACT BHQR Billing Discrepancy Reasons by Year \n \nFY 2016 \n \nFY 2017 \n \nValue \n \nOccurrences Occurrences Change \n \nClaims Reviewed \n \n3029 \n \n3221 \n \n+192 \n \nNot meet admission criteria for service billed \n \n3 (0.1%) \n \n20 (0.6%) \n \n+17 \n \nMissing/incomplete order \n \n0 (0.0%) \n \n12 (0.4%) \n \n+12 \n \nQuantitative \n \nOccurrences \n \nChange \n \nStaff credential missing \n \n69 (2.3%) \n \n40 (1.2%) \n \n-29 \n \nLocation missing (out-of-clinic) \n \n39 (1.3%) \n \n32 (1.0%) \n \n-7 \n \nCode is missing / different than code billed \n \n25 (0.8%) \n \n19 (0.6%) \n \n-6 \n \nUnits billed exceed time / units documented \n \n17 0.6%) \n \n24 (0.7%) \n \n+7 \n \nProgress note is missing \n \n16 (0.5%) \n \n17 (0.5%) \n \n+1 \n \nDate of entry missing \n \n1 (0.03%) \n \n22 (0.7%) \n \n+24 \n \nSignature missing \n \n1 (0.03%) \n \n25 (0.8%) \n \n+21 \n \nTime in / time out missing \n \n0 (0.0%) \n \n10 (0.3%) \n \n+10 \n \nDate of service incorrect/missing Performance Standards \n \n0 (0.0%) \n \n3 (0.1%) \n \nOccurrences \n \n+3 Change \n \nContent does not support units billed \n \n27 (0.9%) \n \n51 (1.6%) \n \n+24 \n \nContent does not support code billed \n \n17 (0.6%) \n \n24 (0.8%) \n \n+7 \n \nIntervention outside staff's scope of practice \n \n12 (0.4%) \n \n0 (0.0%) \n \n-12 \n \nNon-billable activity \n \n9 (0.3%) \n \n8 (0.2%) \n \n-1 \n \nMultiple services billed at the same time \n \n7 (0.2%) \n \n2 (0.1%) \n \n-5 \n \n38 \n \nPercent Change \n\u003e100% N/A \nChange -45.5% -22.8% -28.5% 32.8% -0.1% \u003e100% \u003e100% \nN/A N/A Change -77.6% -32.8% 100% 16.4% 73.1% \n \n Quality Management Annual Report FY 2017 \n \nTable 7. ACT BHQR Billing Discrepancy Reasons by Year \n \nClaims Reviewed \n \nFY 2016 \n \nFY 2017 \n \nOccurrences Occurrences \n \n3029 \n \n3221 \n \nValue Change \n+192 \n \nContent does not match service definition \n \n5 (0.2%) \n \n5 (0.2%) \n \n0 \n \nContent is not unique to the individual \n \n4 (0.1%) \n \n5 (0.2%) \n \n+1 \n \n* Due to the low response volume, statistical difference in proportions testing was not conducted on the above table. \n \nPercent Change \n6.0% -17.5% \n \nThe billing validation score is the percent of justified billed dollars divided by the total paid/billed dollars for the reviewed claims. The statewide average ACT billing score of 92 percent was higher than the BHQR statewide average of 81 percent in FY 2016. This remained consistent with the FY 2017 result with an ACT billing score of 90 percent versus BHQR score of 84 percent demonstrating ACT providers excel in fewer funds being identified as unjustifiable compared to BHQR reviews overall. \n \nWhile the majority of scores exceeded 86 percent, more than 50 percent of reviews conducted in FY 2017 resulted in a billing score of 96 percent or greater. Additionally, the one provider who had been reviewed for a second time in FY 2017 scored at or below 80 percent for both reviews and consecutively declined in billing score from the initial review conducted in FY 2016. ACT billing scores decreased for this individual provider from 92 percent (FY 2016) to 80 percent (FY 2017 first review) and further to 74 percent (subsequent FY 2017 review). This is consistent with the steady decline of the BHQR billing scores for this provider by review period of 88, 85, and 76 percent respective to the time of initial review to the time of last review. Targeted technical assistance is conducted in cases such as this where scores continually decline. Ongoing education and frequency of those providers who fall below the 80 percent threshold will continue to be reviewed at two intervals in the upcoming year. \n \nACT Assessment and Treatment Planning \nFigure 9 provides the ACT assessment and treatment planning score results by specific question for both FY 2016 and FY 2017. Questions assessing whether co-occurring health conditions, whole health and wellness, and all assessed needs are being addressed on the IRP were the lower scoring \n39 \n \n Quality Management Annual Report FY 2017 \nquestions statewide. Additionally, discharge plans defining criteria significantly decreased from 90 percent in FY 2016 to 46 percent in FY 2017 for ACT reviews. Changes to both the ACT and BHQR question specific to discharge plan criteria in which assessors are required to identify additional factors to grant credit for this question may be the most likely reason for the decrease. However, IRPs within the ACT program, again in FY 2017, scored high regarding individualized language (97 percent) and increased significantly by five points from FY 2016 to FY 2017 for this subcategory. \n \nFigure 9. ACT Assessment and Treatment Planning Scores by Year \n \nIndividual meets admission criteria \nCurrent medical screening is present \nIRP is individualized in personalized language Interventions/objectives are goal-linked \u0026 service- \nconsistent Goals/objectives honor hopes, choice, preferences, \noutcomes Current behavioral health assessment N/A \nWhole health \u0026 wellness in IRP \nAll assessed needs are addressed \nCo-occurring health conditions addressed in IRP \nDischarge plan defines criteria \n \n100% 99% 100% 99% 92% 97% 99% 97% 94% 94% \n \n59% 51% 47% 49% \n46% \n \n91% 75% 76% \n90% \n \nFY 2016 (N = 19) \n \nFY 2017 (N = 20) \n \nACT Compliance with Service Guideline Scores \nThe ACT compliance with service guideline score contains 14 indicators. An additional question was added for the 2017 fiscal year and scored a result of 92 percent specific to documentation of the individual's response and a discussion related to the agreement of services identified in the \n \n40 \n \n Quality Management Annual Report FY 2017 \ntreatment planning/individual recovery planning. Table 8 below shows the item-level detail to illustrate both the percentage point and percent change across indicators as well as comparing fiscal year results within the ACT compliance with service guidelines. The ACT compliance with service guidelines average increased to 88 percent for FY 2017 compared to 85 percent in FY 2016. Documentation showed 100 percent of individuals received at least one symptom assessment and medication management contact per month. This remains consistent with FY 2016 findings. Important to note is results for all questions specific to compliance with service guidelines exceeded 90 percent, except for the following: \n Completion of a treatment plan by ACT staff before reauthorization of services (59 percent)  Evidence the ACT team is working with informal support/contacts at least 2-4 times per \nmonth (39 percent) \nThis is consistent when compared to FY 2016 review findings of 58 percent and 43 percent respectively. Thus, it is evident that, although ACT staff work closely with individuals and informal supports to identify and monitor progress and interventions as well as goals and objectives, continued effort is needed in the above two areas specific to the reauthorization period. \n \nTable 8. ACT Compliance with Service Guidelines Scores \n \nFY 2016 FY 2017 \n \nDocumentation shows that the individual meets admission or continuing stay criteria. \n \n100% \n \n99% \n \nThe ACT team completes a treatment plan review with the staff, \n \nthe individual, and his/her family/informal supports prior to the \n \n58% \n \n59% \n \nreauthorization of services. \n \nThere is documentation to support when substance use services are needed and are integrated into the treatment plan. \n \n94% \n \n93% \n \nThere is evidence the ACT team is working with informal support \n \nsystems/collateral contacts at least 2-4 times per month with or \n \nwithout the individual present (and it is documented) to provide 43% \n \n39% \n \nsupport and skills training to assist the individual in his/her \n \nrecovery. (Review specific to authorization period.) \n \nValue Percent Change Change \n \n-1 \n \n-1.0% \n \n+1 \n \n1.7% \n \n-1 \n \n-1.1% \n \n-4 -10.3% \n \n41 \n \n Quality Management Annual Report FY 2017 \n \nTable 8. ACT Compliance with Service Guidelines Scores \n \nFY 2016 FY 2017 \n \nThe ACT team is working with the individual toward educational or vocational needs, interests, per IRP (once per authorization). \nFollowing admission to a psychiatric facility, the ACT team is involved in each individual's discharge planning. \nThere is documentation of individual's involvement in transition planning. \nOne of the contacts per month addresses the symptom assessment and management of medications (once a month). \nThe ACT team has all required staff. \nFor discharged individuals, there are multiple documented attempts to locate and make contact with the individual prior to discharge (over a 45-day period). \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan. \nThe staff interventions reflected in the progress notes are related to the staff interventions listed on the treatment plan. \nThe progress notes document individual response to the staff intervention provided. There is documentation of individual's responses and a discussion to the agreement of services identified in the treatment planning/individual recovery planning. \nACT Compliance with Service Guidelines Result \n \n95% 87% 82% 100% 76% 100% \n91% \n98% 99% \nN/A 85% (N = 19) \n \n97% 95% 90% 99.7% 91% 100% \n99% \n97% 99% \n92% 88% (N = 20) \n \nValue Percent Change Change \n \n+2 \n \n2.1% \n \n+8 \n \n8.4% \n \n+8 \n \n8.9% \n \n-.3 -0.3% +15 16.5% \n \n0 \n \n0.0% \n \n+8 \n \n8.1% \n \n-1 \n \n-1.0% \n \n0 \n \n0.0% \n \nN/A \n \nN/A \n \n+3 \n \n3.4% \n \nACT Focused Outcome Areas \nFocused outcome area (FOA) questions are answered once per record reviewed. Each FOA has a different number of questions for a total of 24 questions scored in this category. The overall score for ACT FOA is consistent from previous year's results equaling 90 percent. While there was improvement noted in the results specific to the lowest scoring indicators of whole health and safety from FY 2016, several indicators decreased slightly, including the categories of rights, \n42 \n \n Quality Management Annual Report FY 2017 \ncommunity, and person-centered practices. Figure 10 provides the ACT FOA results for each category by fiscal year. The greatest improvement in the FOA sub scores specific to ACT reviews was whole health, which increased from 76 percent in FY 2016 to 90 percent in FY 2017. \n \nFigure 10. ACT FOA Scores by Category by Year \n \n100% 90% 80% 70% \n \n90% 76% \n \n82% 78% \n \n99% 91% \n \n97% 94% \n \n97% 97% \n \n89% 87% \n \n90% 90% \n \n60% \n \n50% Whole Health \n \nSafety \n \nPerson Community Centereed Life Practices \n \nChoice \n \nRights FOA Score \n \nFY 2016 (N = 19) FY 2017 (N = 20) \n \n43 \n \n Quality Management Annual Report FY 2017 \nCrisis Stabilization Unit Quality Reviews \nAt DBHDD's request, a review of all crisis stabilization unit (CSU) providers was initiated in January of FY 2016. The purpose of the CSU review was to assess the provider's overall practices and quality of service delivery, and to determine adherence to DBHDD standards through individual record reviews.8 Reviews were conducted in conjunction with a BHQR when the CSU providers also provided BHQR services but resulted in separate CSU scores and final assessment reports. \nReview questions are based on the DBHDD Provider Manual and DBHDD Policies, and were organized into three review categories: individual record review (IRR), compliance with service guidelines, and FOAs. The score for each category represents the percent of applicable questions met or present. The CSU overall score is calculated by averaging the three categories, with each category accounting for 33.3 percent of the overall score. \nDuring FY 2017, all 19 eligible CSU providers were reviewed. Like FY 2016, two were freestanding CSUs, and 17 occurred in conjunction with behavioral health quality reviews (BHQRs). Additionally, four CSUs had a second review during FY 2017 due to previous year's results falling below the threshold (79 percent overall score). Results for the reassessment reviews were included within the overall results incorporating results of 23 reviews. Five of the 23 reviews were conducted at CSU locations who provide services to adolescents and 18 reviews were conducted at CSUs providing services to adults only. \nCrisis Stabilization Unit Sample Method \nA random sample of 15 individuals who had received services within the three months preceding the review was selected for record reviews. When providers did not have an adequate number of individuals served in the three months preceding the review, samples were pulled from individuals served up to six months preceding the review. A total of 343 individual records were reviewed for \n8 Please refer to the following link to access a full description of the review process and review tools. http://georgiacollaborative.com/providers/prv-BH.html \n44 \n \n Quality Management Annual Report FY 2017 \nCSU providers in FY 2017. Two CSUs had only 14 records reviewed either because of limited claims available or the individual being on the CSU fewer than 24 hours. The sample for the interviews was selected and scored similarly to the BHQR process: individuals and staff were selected by the provider and quality assessors conducting the CSU review. Results from the interviews were not included in calculating the provider's overall CSU score. Quality assessors completed a minimum of five individual and five staff interviews per CSU review; however, the actual number fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of staff, and the number of individuals the provider served at the time of the review. Individuals selected for interviews were currently being seen at the CSU, and the staff selected was providing services on the CSU. If an individual or staff declined to be interviewed, assessors selected a different individual or staff to be interviewed. \nCSU Quality Review \nTable 9 shows the overall score for the 23 reviews performed in FY 2017 as well as the results of each category comparing each of the two fiscal years reviewed. The overall score mean was 86 percent for FY 2017 compared to 83 percent in FY 2016, demonstrating a three-point increase. Eighteen reviews (78 percent) scored within the 81-100 percent range compared to the FY 2016 total of thirteen (68 percent). Results of FY 2017 increased for all categories. Focused Outcome Areas remained the highest-scoring category in FY 2017 at 91 percent and again was an increase from FY 2016 result of 88 percent. In addition, consistent with the baseline year compliance with service guidelines followed at 87 percent, a five-point increase from the previous year (82 percent). The lowest scoring category remained as the individual record review (IRR), averaging 80 percent. However, IRR did also demonstrate an increase from the previous year by two percentage points (78 percent). Difference in proportions test conducted did not demonstrate statistical significance in year-to-year review related to CSUQR category scores. \n45 \n \n Quality Management Annual Report FY 2017 \n \nTable 9. CSU Overall Scores by Fiscal Year \n \n(FY 2016 N = 19/FY 2017 N = 23) \n \nCategory \n \nFY 2016 FY 2017 \n \nOverall \n \n83% \n \n86% \n \nIndividual Record Review \n \n78% \n \n80% \n \nService Guidelines \n \n82% \n \n87% \n \nFocused Outcome Areas \n \n88% \n \n91% \n \nFour CSU providers had been reviewed for a second time during FY 2017 due to low overall score results upon the initial FY 2016 review. The results of these four specific CSUQR reassessment reviews are graphically portrayed within the reassessment review section of this report in Figure 20 on page 61. \nCSU Individual Record Review (IRR) \nIndividual record review (IRR) questions were answered once per record reviewed. Each of the six subcategories had its own unique number of questions with 31 scored within the IRR category. Although no statistical significance was identified in analysis of each individual subcategory as a whole, overall improvement is apparent within the IRR reviews for the 23 reviews completed. The IRR Score in FY 2016 was 80 percent and has since increased to 82 percent for FY 2017. Additionally, it is evident there is room for improvement when reviewing subcategories and specific indicators or questions of the CSU IRR. Figure 11 identifies the subcategories and results for each by fiscal year. \n \n46 \n \n Quality Management Annual Report FY 2017 \n \nFigure 11. CSU Individual Record Review Category Scores by Year \n \nAssessment/Treatment Planning Admission/Initial Evaluation/Screening for Risk Crisis Stabilization Specific Treatment Planning \nCrisis Stabilization Course of Stay Documentation \nTransition/Discharge Planning \n \n86% 85% \n69% 69% \n70% 72% \n83% 86% \n87% 89% \n63% 62% \n \nFY 2016 (N = 19) \n \nFY 2017 (N = 23) \n \nThe CSU intake assessment data, similar to FY 2016, resulted in several positive findings such as the following: \n Almost all records, in FY 2017, 99 percent (99.6 percent, FY 2016) confirmed that individuals having met admission criteria. \n Ninety-eight percent were assessed by a physician or a physician extender within 24 hours of admission (99 percent, FY 2016). \n Individual records reflected daily status updates by a registered nurse (RN) in 95 percent of records. \n Ninety percent of records contained a correctly documented medication administration record (MAR). \n47 \n \n Quality Management Annual Report FY 2017 \nAlthough the results have decreased for daily RN status updates (significant at p \u003c .01) and a correctly documented MAR (no significance found) from FY 2016 in which results were 99 and 93 percent, respectively, CSU intake assessment data remains well above average. Furthermore, 92 percent of records had a confirmed admission order by the physician/extender to the CSU. \nOf the subcategories and individual questions, areas for growth remain in CSU treatment planning. These areas remain similar to areas identified within the BHQR assessment and treatment planning category in FY 2017. These areas include not incorporating all identified needs (primarily, cooccurring physical health issues) in the IRPs or nursing care plans (NCP). Addressing co-occurring physical health issues continued to remain an area of concern for FY 2017 due to 42 percent of records either minimizing or completely lacking evidence of including these co-occurring issues in the IRP or NCP. Moreover, 69 percent of records documented co-occurring disorders are assessed and addressed simultaneously, 74 percent incorporated medical needs within the IRP or NCP, and only 50 percent of records addressed safety issues when applicable. Furthermore, documentation of the American Society for Addiction Medicine (ASAM) patient placement criteria within the record having occurred in only 19 percent of records in FY 2016 remained low at 38 percent in FY 2017. Although transition/discharge planning continues to result in a relatively low score for FY 2017, the question of transition/discharge plans containing the needed documentation has significantly increased from the previous year's result from 56 percent to 62 percent. Moreover, only 58 percent of the records reviewed in FY 2017 had specific step-down service/activity/supports documented to meet the individualized needs. For FY 2017, decreases in initial bio-psychosocial assessments being present (82 percent) and comprehensive nursing assessments completed upon admission (73 percent) were evident compared to FY 2016 (84 percent and 78 percent, respectively). \nCSU Compliance with Service Guidelines \nThe 16 CSU compliance with service guidelines questions (14 scored and two non-scored) were answered once per review to assess the CSU program. CSU staffing requirements had been met for 96 percent of the reviews conducted. The one provider who had not met the requirement was reviewed for a second time and succeeded in meeting the requirement at the second review. Of the \n48 \n \n Quality Management Annual Report FY 2017 \nfour CSUs serving children and adolescents, all met the staffing requirements and ratios for FY 2017. An additional question was added to the FY 2017 review process specific to a psychiatrist consultation availability in cases in which the CSU physician(s) is/are not specialized in pediatric psychiatry, which scored at 100 percent. Only three service guidelines questions had decreased results when compared to the baseline year and are detailed in Figure 12. \n \nFigure 12. CSU Compliance with Service Guidelines Questions with Greatest Decrease in Score by Fiscal Year \n \nTheraputic Blood Level Monitoring \nFY 2016 89% \nFY 2017 83% \n \nSeclusion and Restraint Policy \nAdherence \nFY 2016 95% \nFY 2017 91% \n \nProtocols for Handling Drugs \nFY 2016 95% \nFY 2017 87% \n \nThe lower-scoring questions of FY 2017 remained the same as the baseline year review, yet demonstrated some improvement. These questions included the following: \n The provider is adhering to current policy for the safe storage of medication, previously 53 percent for FY 2016 increased to 57 percent in FY 2017. \n Policies and procedures are present for adherence to required categories of crisis service plans for provision of crisis services to individuals who are deaf, deaf-blind, blind, and hardof-hearing (53 percent in FY 2016) increased to 65 percent for FY 2017. \n Documentation in FY 2016 reflected CSUs did not have access to specialists such as addictionologists (67 percent) or pediatric psychiatrists (67 percent) when needed increased in FY 2017 to 96 percent and 83 percent respectively. \n49 \n \n Quality Management Annual Report FY 2017 \n CSU policies during the baseline year (FY 2016) often failed to identify a model for substance use treatment (69 percent) improved to 91 percent in FY 2017. \nResults for providers adhering to their policies on the notification of medication errors; previously identified as an area for growth, improved from 79 percent to 83 percent; a four-point increase. From FY 2016 to FY 2017, scores increased across the CSU providers specific to service guideline compliance with more than half of reviews scoring above 90 percent. The median score in FY 2017 was 91 percent compared to the median score in FY 2016 at 80 percent. Overall, the statewide average for compliance with service guidelines increased from 82 percent in FY 2016 to 87 percent in FY 2017. See Figure 13 for an annual comparative result of CSU review questions, mentioned above, specific to compliance with service guidelines which did not meet 100 percent for FY 2017. \n50 \n \n Quality Management Annual Report FY 2017 \n \nFigure 13. CSU Compliance with Service Guidelines Results by Question by Year \nCSU Staffing Requirements Met (FY16 n = 19, FY17 n = 23) \n \n100% 96% \n \nAccess to Addictionologist (FY16 n = 18, FY17 n = 23) \n \n67% 96% \n \nC\u0026A Psychiatrist (Non-scored) (FY16 n = 3, FY17 n = 6) \n \n67% 83% \n \nModel/Curriculum for SU treatment (Non-scored) (FY16 n = 16, FY17 n = 22) \n \n69% 91% \n \nDeaf, Deaf-Blind, Hard of Hearing Policies (FY16 n = 19, FY17 n = 23) \n \n53% 65% \n \nInfection Control Plan Adherence (FY16 n = 19, FY17 n = 23) \n \n84% 87% \n \nTheraputic Blood Level Monitoring (FY16 n = 19, FY17 n = 23) \n \n89% 83% \n \nSeclusion \u0026 Restraint Policy Adherence (FY16 n = 19, FY17 n = 23) \n \n95% 91% \n \nMedication Storage Policy Adherence (FY16 n = 19, FY17 n = 23) \n \n53% 57% \n \nAdherence to Medication Notification Policy (FY16 n = 19, FY17 n = 23) \n \n79% 83% \n \nProtocols for Handling Drugs (FY16 n = 19, FY17 n = 23) \nFY 2016 (N = 19) \n \nFY 2017 (N = 23) \n \n95% 87% \n51 \n \n Quality Management Annual Report FY 2017 \nCSU Focused Outcome Area \nFocused outcome area (FOA) questions were answered once per record reviewed. Each FOA had its own unique number of questions for a total of 23 questions assessed overall. Please refer to the BHQR FOA section for a definition of the six FOAs on page 31 and 32. \nAll agencies reviewed in FY 2017 scored at or above 85 percent for all FOAs and as illustrated below in Figure 14. The overall score for CSU providers increased from 88 percent to 91 percent. Like FY 2016, results were highest in the areas of Choice (98 percent), Rights (92 percent), and Community Life (94 percent). Although both Choice and Rights decreased somewhat from the previous year's review by five and three percentage points, respectively, community life increased from 92 percent to 94 percent. Whole health (89 percent), Safety (85 percent), and Person Centered Practices (88 percent) increased from the baseline year with results of 83 percent (Whole Health), 76 percent (Safety), and 77 percent (Person Centered Practices). All categories except for Choice and Community Life demonstrated statistically significant improvement at the p \u003c .05 level. \nThe following were findings based on each individual FOA for FY 2017: \n Whole Health demonstrated consistent findings when compared with FY 2016 results as well as in line with what is noted in the IRR category and within the BHQRs, documentation lacked evidence whole health and wellness was consistently incorporated into treatment. While there remains improvement as noted when comparing the score results from FY 2016 to FY 2017, 83 percent to 89 percent respectively, data suggests documentation of current medical conditions being assessed, monitored or recorded remains a pronounced area for improvement (79 percent). \n Safety documentation demonstrated services were offered in an environment ensuring patient safety (96 percent) and provider's work with the individual to devise a crisis/safety plan as needed (95 percent). However, documentation of the individual (or other) being educated on the risks and benefits of their prescription medications, as evidenced by a \n52 \n \n Quality Management Annual Report FY 2017 \nsigned consent form, has been identified as an area for improvement due to the average result of 63 percent for FY 2017.  Person Centered Practices documentation in FY 2016 did not support that individuals were active participants in CSU treatment planning; however, the result for FY 2017 depicts otherwise with 91 percent of records reviewed indicating the individual was an active participant in the planning of services and 83 percent actively participating in the modification of said plan or services.  Community Life documentation supported transition planning throughout service delivery (92 percent), informed choice of the individual specific to housing option (95 percent), discussion of aftercare placement (95 percent) and provider support in assisting individuals with after care placement (95 percent).  Choice reflected documentation supports the individual's preferences for treatment while at the CSU were honored (98 percent), and when barriers to treatment were identified, measures were taken to address the barriers (95 percent).  Rights supported privacy and confidentiality being reviewed with individuals (95 percent) yet demonstrated lower results with individuals being informed about their rights and responsibilities at the onset of services (88 percent). \n53 \n \n Quality Management Annual Report FY 2017 \nFigure 14. FY 2016 and FY 2017 CSU BHQR by FOA \n \nRights 97% \nChoice 99% \n \nWhole Health \n83% \n88% \n(N = 19) FY 2016 \nCommunity 92% \n \nSafety 76% \nPerson Centered Practices \n77% \n \nRights 92% \nChoice 98% \n \nWhole Health \n89% \n91% \n(N = 23) FY 2017 \nCommunity 94% \n \nSafety 85% \nPerson Centered Practices \n88% \n \n* The FOA subcategories are individually scored and are not averaged for the final overall FOA result at the review level. The final overall FOA result is calculated by adding all \"yes\" or \"present\" responses of all FOA subcategories and dividing by the total \"yes\" or \"no\" responses combined for each review. \nCSU Staff Interview (SI) and Individual Interview (II) \nThe CSU interview questions were similar to the BHQR interview questions and were divided into the six FOAs. Individual interviews were used to assess the individual's quality of life and the perception of care with the provider and services rendered. Staff interviews helped determine if a person-centered approach was used in providing services and empowering individuals. The data and anecdotal information gathered from individual and staff interviews supplemented the record review and enhanced the review process by providing a tailored perspective to illustrate the quality of care the agency provided using both a quantifiable and qualitative approach. \nData in Figure 15 illustrates the results of staff and individual interviews by fiscal year. Individual interview scores declined slightly from the baseline year to FY 2017 while staff interviews remained steady above 98 percent. \n \n54 \n \n Quality Management Annual Report FY 2017 \n \nFigure 15. CSU Individual and Staff Interview Results by Fiscal Year \n \nINDIVIDUAL INTERVIEW \n \nFY 2016 94.9% (N = 94) \nFY 2017 93.0% (N = 114) \n \nSTAFF INTERVIEW \n \nFY 2016 98.6% (N = 96) \nFY 2017 98.7% (N = 115) \n \nFigure 16 represents the average FOA scores for the individual interviews while Figure 17 provides results of Staff Interviews for both FY 2016 and FY 2017. Individual interview results remained consistent compared to FY 2016. Again, all but one FOA scored in the 90th percentile, indicating a high level of satisfaction for individuals served by the providers. The lowest-scoring FOA for IIs based on indicator level data remained whole-health (83 percent), which may indicate an increased need for CSUs to attend to and plan for individuals' co-occurring health conditions or health needs such as preventive services, dental services, or primary care. This issue was also highlighted in the IRR and FOA categories. \nAll SI questions answered for FY 2017 exceeded 95 percent except one safety question in which 79 percent felt adequately staffed related to acuity level of the individuals on the CSU. Additionally, CSU and BHQR SI scores seem to indicate knowledge regarding providing services that align with standards assessed by the FOA questions, as well as knowledge of the individuals they serve. \n \n55 \n \n Quality Management Annual Report FY 2017 \n \nFigure 16. CSU Individual Interview Subcategory Data by Year \n \nWhole Health Safety \nPerson-Centered Planning Community Life Choice Rights Overall \n \nFY 2016 (N = 94) \n \n84.6% 82.5% \n95.3% 96.2% 96.0% 93.8% \n99.4% 95.6% 96.8% 97.6% \n99.6% 98.8% 94.9% 93.0% \nFY 2017 (N = 114) \n \nFigure 17. CSU Staff Interview Subcategory Data by Year \n \nWhole Health Safety \nPerson Centered Planning Community Life Choice Rights Overall \n \n96.8% 98.1% 98.7% 98.1% 98.2% 99.5% 98.7% 98.8% 99.6% 99.6% 97.3% \n100% 98.6% 98.7% \n \nFY 2016 (N = 96) FY 2017 (N = 115) \n \n56 \n \n Quality Management Annual Report FY 2017 \nReassessment Frequency Review \nBHQR Reassessment Review Findings \nBeginning in FY 2017, the frequency of a BHQR and CSUQR was based on minimum scoring thresholds: less than 80 percent overall score or less than 70 percent billing validation. Providers scoring above the minimum threshold receive one BHQR/CSUQR per fiscal year; providers falling below the threshold receive two reviews. A total of 35 providers were evaluated twice during the 2017 fiscal year. However, two of these providers had not been previously reviewed in FY 2016 due to either being a new provider for FY 2017 or having insufficient claims use to justify an on-site review. Providers whose scores fell below the threshold were scheduled for a repeat review at least six months following their initial FY 2016 review allowing ample time for claims submission and documentation to reflect any changes made by the provider based on previous review findings. The average number of days between reviews for these providers was 195 days. The majority of reassessed providers in FY 2017 were Tier 2 (N = 29). Figure 18 displays the distribution of provider scores for the reassessed providers for scores obtained during the first review in FY 2016 and scores obtained during the first and second reviews conducted in FY 2017. Improvement in scores is clearly illustrated when comparing FY 2016 to FY 2017. In FY 2016, more than half of providers scored in the 71 to 80 percent range. The number of providers scoring in this range gradually declined through each subsequent review of FY 2017 while the number of providers scoring in a higher range, i.e., 81 to 90 percent and 91 to 100 percent range, steadily increased. \n57 \n \n Quality Management Annual Report FY 2017 \n \nFigure 18. BHQR FY 2017 Overall Score Distribution \n \nof Reassessed Providers \n70% \n \n60% \n \n58% \n \n50% \n \n46% 43% 40% \n \n40% 30% \n \n30% \n \n23% \n \n23% \n \n20% \n \n11% \n \n10% \n \n3% 3% 6% 6% \n \n6% 3% \n \n0% \n \nFY 2016 Review (N = 33, Mean = 80%, Median = 79%) First Review FY 2017 (N = 35; Mean = 79%; Median = 80%) Second Review FY 2017 (N = 35; Mean = 83%; Median = 86%) \nOverall Score: The average result for the reassessed providers at first review of FY 2017 was 79 percent compared to 83 percent later in the year. Overall score results demonstrated improvement for the 35 providers as a group based not only on the distribution of scores but also in reviewing those that fell below the established thresholds at either time. Further results show the following: \n Sixteen providers (9.6 percent), at the time of first FY 2017 review, fell below the 80 percent threshold for overall score compared to 11 providers or 6.6 percent at time of the subsequent review. \n Nine providers (25 percent) decreased in overall scores from initial FY 2017 to subsequent FY 2017 review with five declining by 10 percent or more. Of the providers who declined in overall score, the majority declined in at least three of the four categories with the category of billing validation demonstrating the greatest loss in percentage points. \n While 74 percent of all reviews demonstrated an increase in overall score, three providers decreased across all categories from first to second review. \n58 \n \n Quality Management Annual Report FY 2017 \nDistributions of reassessed providers based on each category can also be found in Appendix B. Those providers who fell below the established threshold at either of the two reviews during FY 2017 will be required to have an initial and subsequent review scheduled for the 2018 fiscal year regardless of improvement in one or both categories, overall score, and billing score. \nAssessment and Treatment Planning was the lowest scoring category for the reassessed providers in FY 2017. The 35 providers initially scored an average result of 70 percent at the time of first review in FY 2017 compared to 78 percent at time of the subsequent FY 2017 review demonstrating improvement. Furthermore, while 25 of the reassessed providers (71 percent) demonstrated improvement in FY 2017, several providers (N = 23) continued to score at or below 80 percent for this category at the time of their second FY 2017 review. However, of the 71 percent of providers demonstrating improvement in this category, more than half exceled by 10 points or more. Improvement is also noted against the statewide average of 77 percent as 21 (60 percent) providers scored at or above this result at the time of their second FY 2017 review, compared to only 10 (17 percent) providers at the time of their first FY 2017 review. \nBilling Score: Nearly half (N = 17) of the 35 providers demonstrated improvement from the initial FY 2017 billing score ranging from a one-point to 35-point difference; see Figure 19 below. Eight reviews fell below the threshold of 70 percent initially in FY 2017, yet five (63 percent) increased at the time of their subsequent score with four of the eight exceeding 90 percent. Twenty-seven providers (77 percent) exceeded the 70-percent threshold during the second review in FY 2017. The average billing score for the reassessed providers at both times of review in FY 2017 was 79 percent which is an increase from the average FY 2016 score of 75 percent for these providers; see figure below. As shown in the distribution below, approximately 63 percent of the providers had billing validation scores between 71 and 100 percent in FY 2016; these providers were placed on a reassessment schedule due to a low overall score. \n59 \n \n Quality Management Annual Report FY 2017 \n \nFigure 19. BHQR FY 2017 Billing Score Distribution \n \n45% \n \nof Reassessed Providers \n \n42% \n \n43% \n \n40% \n \n40% \n \n35% \n \n30% \n \n26% 26% \n \n25% \n \n21% \n \n20% \n \n15% 10% \n \n6% \n \n6%6%9% \n \n11% \n \n12% 11% \n \n9% \n \n9% \n \n5% \n \n3%3% 3%3% 3% 3% 3% \n \n3% \n \n0% \n \nFY 2016 Review (N = 33; Mean = 75%; Median = 79%) First Review FY 2017 (N = 35; Mean = 79%; Median = 84%) Second Review FY 2017 (N = 35; Mean = 79%; Median = 89%) \nCompliance with Service Guidelines: Although the behavioral health statewide average specific to compliance with service guidelines decreased from FY 2016 (90 percent) to FY 2017 (88 percent), this category demonstrated the most improvement in the number of providers increasing in score from review one to review two compared to any other. Of the reassessed providers, only eight (23 percent) had a decline in score from the time of the first FY2017 review. One provider remained at 100 percent for both reviews specific to compliance with service guidelines, and the remaining 26 (74 percent) demonstrated an increase ranging from one percentage point to 57 percentage points relative to their individual provider score between the two reviews. While 31 percent of providers scored at or below 80 percent at the time of their first review, only nine of the 35 providers (26 percent) fell below 80 percent for their second review score. Comparatively, only 26 percent of the reassessed providers scored above 90 percent at the time of their initial review while more than half (51 percent) reached or exceeded this score at the time of their second review. \n60 \n \n Quality Management Annual Report FY 2017 \n \nFocused Outcome Areas: Providers who were reviewed for a second time in FY 2017 demonstrated improvement overall in FOA scores, 86 percent to 90 percent respectively by first FY 2017 to second FY 2017 review. Twenty-five providers, over 71 percent (n = 25), demonstrated improvement in this area from the time of the first FY 2017 review to the second FY 2017 review while the remaining 29 percent (n = 10) resulted in a decline ranging from three percentage points to 24 points. Personcentered practices showed the greatest impact amongst the providers declining in score and remains an area for growth and improvement. \n \nCSU Reassessment Review Findings \nFour of the 19 CSU providers had two reviews during FY 2017. This was a direct result of low overall scores during the review that occurred in FY 2016 (below 80 percent). Figure 20 provides the overall score result by CSU provider at the time of initial FY 2017 review and subsequent FY 2017 review. Of the four CSU providers reviewed for a second time in the fiscal year, the average overall score for these providers at the time of first FY 2017 review was 86 percent. This decreased by two points to a score of 84 percent at the second review. Two CSU providers demonstrated an increase in results at the time of second review in FY 2017, yet one remained below the 80 percent threshold and, thus, is expected to be reviewed for a second time in FY 2018. \n \nFigure 20. CSU Overall Scores of Reassessed Providers \n \nFirst 2017 Review Score Second 2017 Review Score 2016 Review Score \n \n100% 90% 80% 70% 60% 50% \n \n88% 90% \n63% CSU 1 \n \n91% 81% \n78% \n \n91% 87% 72% \n \n73% 77% 76% \n \nCSU 2 \n \nCSU 3 \n \nCSU 4 \n \n61 \n \n Quality Management Annual Report FY 2017 \n \nTechnical Assistance/Exit Conference \nUpon completion of all BHQRs and CSUQRs, the lead assessor completed a formal exit conference. The exit conference supplied providers with tentative scores, provider strengths, and opportunities for growth. Providers received immediate, preliminary feedback of the BHQR and CSUQR findings at the time of the exit conference. Technical assistance was also provided during the exit conference and throughout the review process pertaining to opportunities for growth and areas of risk. The table below (Table 10) provides details on the technical assistance/exit conferences completed during FY 2017. \n \nTable 10. Technical Assistance/Exit Conference Details for FY 2017 \n \nFiscal Year 2017 \n \nQuarter 1 Quarter 2 Quarter 3 Quarter 4 \n \nTotal Year End \n \nTotal BHQR Exits \n \n45 \n \n40 \n \n35 \n \n29 \n \n149 \n \nTotal Minutes \n \n2,700 \n \n2,448 \n \n1,995 \n \n1,737 \n \n8,880 \n \nTotal Attendees \n \n356 \n \n306 \n \n217 \n \n207 \n \n1,086 \n \nTotal CSU Exit Total CSU Minutes Total CSU Attendees \n \n2 \n \n4 \n \n6 \n \n11 \n \n23 \n \n195 \n \n235 \n \n467 \n \n760 \n \n1,657 \n \n14 \n \n40 \n \n123 \n \n183 \n \n360 \n \nTotal Exits \n \n47 \n \n44 \n \n41 \n \n40 \n \n172 \n \nA total of 172 exit conferences were completed in FY 2017. The total number of attendees was more than 1,400 with conferences incorporating over 10,500 minutes. Exit conferences and technical assistance will continue to be provided throughout FY 2018. \n \n62 \n \n Quality Management Annual Report FY 2017 \nSummary of Findings and Recommendations for Behavioral Health Providers \nQuality Management continually reassesses processes to ensure we are capturing results and analyzing outcomes that give us the best information and truest picture of service quality in Georgia. This includes reviewing of feedback received from both providers and individuals, as well as revision of review tools as needed to ensure accurate and measurable reporting of results. \nSystem Strengths \nJune 2017 marked the completion of the second year of the Quality Management contract. Quality assessors completed 167 BHQRs, an additional 18 ACT service reviews, and 23 CSUQRs in FY 2017. A total of 172 exit conferences were conducted across all review types. Exit conferences may have occurred jointly across the BHQR, CSUQR, and ACT reviews dependent on whether the provider rendered more than one of those services. Thirty-five providers were reviewed for a second time in FY 2017 due to initial FY 2016 low scores in overall, billing, or both. The overall score for all three review types was greater than 80 percent for the second year: BHQR  84 percent, ACT  87 percent, and CSU  86 percent (Table 11), and FOAs implemented in FY 2016 demonstrated improvement across all three review types in FY 2017. The overall score for BHQRs remained static at 84 percent for both FY 2016 and FY 2017. This is due to the lower scoring categories of FY 2016 increased in FY 2017 while the higher scoring categories of FY 2016 decreased in FY 2017. \n63 \n \n Quality Management Annual Report FY 2017 \n \nTable 11. BHQR, ACT, CSU Overall Averages by Year \n \nBilling \n \nValidation \n \nFOA \n \nBHQR FY 2016 \n \n81% \n \n85% \n \nAssessment/ Planning \n79% \n \nService Guidelines \n90% \n \nBHQR FY 2017 \n \n84% \n \n89% \n \n77% \n \n88% \n \nOverall 84% \n84% \n \nACT FY 2016 \n \n92% \n \n88% \n \n85% \n \n85% \n \n88% \n \nACT FY 2017 \n \n90% \n \n90% \n \n80% \n \n88% \n \n87% \n \nCSUQR FY 2016 \n \nNA \n \n88% \n \nIRR  78% \n \n82% \n \n83% \n \nCSUQR FY 2017 \n \nNA \n \n91% \n \nIRR  80% \n \n87% \n \n86% \n \nThe table below, Table 12, summarizes the FOAs by review type and year. The overall score for the FOAs were above 80 percent both years; the CSUs scored close to 100 in FY 2017. Whole health, continually the lowest score for BHQR, has greatly improved from FY 2016 to FY 2017 for both CSU and behavioral health reviews. However, it remains an area for improvement across all providers providing behavioral health routine services. Both safety and person-centered practices, falling below 80 percent for CSU reviews, had a substantial increase to 98 and 99.5 percent, respectively. To summarize, areas suffering the greatest deficit in FY 2016 have all improved based on FY 2017 results across all review types, with CSUs demonstrating the largest impact with an increase in each subcategory to 98 percent or above. \n \n64 \n \n Quality Management Annual Report FY 2017 \n \nTable 12. BHQR and CSUQR FOA Scores \n \nFocused Outcome Areas \n \nBHQR BHQR CSUQR FY 2016 FY 2017 FY 2016 \n \nWhole Health \n \n63% \n \n74% \n \n83% \n \nSafety \n \n81% \n \n83% \n \n76% \n \nPerson Centered Practices \n \n92% \n \n91% \n \n77% \n \nCommunity \n \n87% \n \n93% \n \n92% \n \nChoice \n \n92% \n \n96% \n \n99% \n \nRights \n \n90% \n \n93% \n \n97% \n \nOverall Score \n \n85% \n \n89% \n \n88% \n \nCSUQR FY 2017 \n98% 98% 99.5% 99% 99.6% 100% \n99% \n \nWhile reassessments supply lower-scoring providers the opportunity for improvement within the fiscal year, anticipation of increased scores are generally expected. Specific to reassessed providers, Table 13 reflects the number of providers who improved from the first FY 2017 review to the second review within the year. While assessment and treatment planning, as well as compliance with service guidelines, both decreased in statewide BHQR average from FY 2016 to FY 2017, each of these categories rendered results demonstrating the greatest improvement amongst the reassessed providers. Providers who obtained low FY 2016 overall or billing scores were scheduled for review and reassessment in FY 2017. While many of the reassessed providers demonstrated improvements on an individual level from first FY 2017 review to second FY 2017 review, those that continued to fall below the established thresholds will be scheduled for an additional two reviews in FY 2018. Additionally, providers are supplied with technical assistance during reviews to assist with performance improvement. \n \n65 \n \n Quality Management Annual Report FY 2017 \n \nTable 13. Reassessment Review Results \n \nNumber of Providers Who Increased in Score from \n \nFY 2017 Review #1 to FY 2017 Review #2 \n \nBHQR CSU \n \nCategory \n \n(N = 35) (N = 4) \n \nBilling \n \n18 (51%) \n \nN/A \n \nIRR \n \nN/A \n \n2 (50%) \n \nCompliance w/Service Guidelines 26 (74%) 0 (0%) \n \nFocused Outcome Areas \n \n25 (71%) 2 (50%) \n \nAssessment \u0026 Treatment Planning 25 (71%) \n \nN/A \n \nOverall Scores \n \n26 (74%) 2 (50%) \n \nOpportunities for Improvement \nThe focus of billing validation is on compliance with standards for ensuring claims are justified in accordance with the DBHDD Provider Manual, which remains the second-lowest score of the four categories through FY 2017. In FY 2016, Quality Management launched a Quality Training Series with the first module being billing validation. Although the billing category remains second lowest, a three-point increase resulting in 84 percent for FY 2017 demonstrates improvement across the network. Thus, increasing the threshold to 79 percent from 69 percent for billing thresholds is recommended for FY 2018 specific to reassessment scheduling in anticipation of aligning with the key performance indicators in FY 2019. Billing compliance trends in FY 2018 will be used to determine whether additional training or technical assistance to providers is indicated. \nWhile several providers remained consistent or demonstrated improvements in their results from FY 2016 to FY 2017, the areas of compliance with service guidelines, as well as assessment and treatment planning, both declined overall in results. A specific treatment planning training via in person and available via webinar was introduced in FY 2017 as part of the Quality Training Series. Additionally, Quality Management conducted a specific psychosocial rehabilitation versus case management training to aid in alleviating provider confusion specific to the services and their \n66 \n \n Quality Management Annual Report FY 2017 \nrequirements. Quality Management shall continue including topics related to lower scoring categories and questions within its annual Quality Training Series. \nRecommendation I: Based on data analysis from both FY 2016 and FY 2017, modifications to and the addition of some questions or tools are anticipated to occur may provide additional information about quality as reviews begin in FY 2018. Some of these modifications and the tools affected include the following: \n Discharge summary/note documentation includes: 1) Individuals' status at discharge, 2) Specific instructions for ongoing care including appointment date/time, 3) Living situation at the time of discharge, and 4) Date, time, and method of discharge. Must meet all factors for question answered as \"yes\" (Assessment and Treatment Planning). \n Separation of a medical screening and the behavioral health assessment as two separate questions (Assessment and Treatment Planning). \n The addition of opioid maintenance therapy as a measurable service (Compliance with Service Guidelines). \n Question alteration to include annual diagnosis verification in the individual meeting admission criteria (Assessment and Treatment Planning). \n Staff Interview additions and alterations such as the following: 1. Staff can identify the individual's warning signs and triggers prior to a crisis to promote early intervention. 2. Person-Centered Practices: Repeat Staff Interview: Staff can describe any new procedures the agency has adopted to encourage individual involvement in the development and redevelopment of IRPs no matter their age or perceived competency. 3. Community Life: Repeat Interview: Staff can describe how they have assisted the individual in accessing opportunities for community involvement in the past year (per individual preferences). \n67 \n \n Quality Management Annual Report FY 2017 \n4. Staff has received training by the agency in providing whole-health informed services. \nRecommendation II: Continue the Quality Training Series in FY 2018 based on data analysis from FY 2017, for all behavioral health and CSU agencies to include the following: \n CSU Transition/ discharge planning  CSU Service Guidelines focusing on medication procedures  BHQR Compliance with Service Guidelines  DBHDD Provider Manual  Safety/Crisis Planning \nRecommendation III: CSU review tool(s) should be incorporated into the electronic review system allowing for efficient and effective data gathering used to assess quality within the CSU programs. \nRecommendation IV: Access to Addictionologists specific to compliance with service guidelines for CSU providers was scored based on verbal affirmation from the provider in FY 2016 and FY 2017. However, to ensure access to Addictionologists is in fact available to individuals, Quality Management is recommending the provider supply documented evidence to assessors during the review for FY 2018. \nRecommendation V: Develop and implement an electronic quality of care process to collect, monitor, and report various issues to DBHDD, specific to provider environmental atmospheres, staffing, or quality of services are outside of the scope of the quality reviews. \nRecommendation VI: Sample sizes for FY 2018 BHQRs will be based on provider size and individuals served. See the table below (Table 14) for provider size and sample size information. Providers with ACT and CSU will continue to have 15 individuals sampled and will have no change to the sampling method. Quality Management will oversample individual records for BHQRs and CSUQRs to ensure targeted sample sizes are met in FY 2018. \n68 \n \n Quality Management Annual Report FY 2017 \n \nTable 14. Sample Size Determination for BHQR in FY 2018 \n \nSize of Provider \nSmall Medium \nLarge \n \nSample Size \n5 - 10 20 30 \n \n# of Individuals Served (within six months) \n\u003c 50 51-100 \u003e 101 \n \nRecommendation VII: Beginning in FY 2018, changes to the quality review procedure specific to voided/adjusted claims and encounters will be altered. During FY 2016 and FY 2017, assessors excluded claim(s)/encounter(s) that had been voided or adjusted prior to the beginning of the review. To prevent providers voiding extensive numbers of claims just prior to scheduled BHQRs, providers may no longer void or adjust any claims/encounters following their notification of BHQR/CSUQR. Providers may resume voiding and adjusting claims/encounters after their review has been completed. Voids/adjustments to claims appearing in the sample will be considered at the time of review only and must include the date adjusted. \nRecommendation VIII: Although the billing category remains second lowest for the BHQR, a threepercentage point increase resulting in 84 percent for FY 2017 demonstrates improvement across the network. Thus, increasing the billing threshold for reassessment to 79 percent from 69 percent is recommended for FY 2018. \n \n69 \n \n Quality Management Annual Report FY 2017 \nSection 4: Intellectual and Developmental Disabilities \nBackground \nPerson Centered Reviews (PCR) and Quality Enhancement Provider Reviews (QEPR) are used to assess the extent to which individuals with intellectual and developmental disabilities are satisfied with their services and achieve outcomes important to them, and to evaluate provider systems. The purpose of the PCR is to assess the individual's quality of life as well as the effectiveness of and the satisfaction individuals have with the service delivery system. The purpose of the QEPR is to review providers' systems and practices to ensure they meet requirements set forth by the Medicaid waiver and DBHDD, and to evaluate the effectiveness of their service delivery system. Follow-up review activities provide technical assistance to help providers improve service delivery systems through Quality Technical Assistance Consultation (QTAC). \nQuality assessors use various tools to collect data from interviews, observations, and record reviews to compile a well-rounded picture of the individual's circle of supports, how involved the individual is in the decisions and plans developed for that individual, as well as the quality of services provided. Individuals sampled for the PCR or QEPR participate in the Individual Interview (II) and Individual Service Plan Quality Assurance Checklist (ISP QA). Both review processes also include a Provider Record Review (PRR) and the Developmental Disability Service Specific (DDSS) requirements for each service received by the individual, a Staff Interview (SI) with a sample of direct support providers, and onsite Observation of day or residential programs. \nDuring the PCR, the Support Coordinator Record Review (SCRR) and Support Coordinator Interview (SCI) tools are completed for the support coordinator working with the individual. During the QEPR, each provider organization receives one administrative review to monitor compliance with requirements through the Qualifications and Training (Q\u0026T) component of the review. The Q\u0026T includes a review of a sample of personnel/staff records to determine if staff has the necessary \n70 \n \n Quality Management Annual Report FY 2017 \nqualifications specific to services rendered, and whether required training was received within specified timeframes. \nIn this section of the report, results for both the QEPR and PCR are presented by each of these review tools. Results are also presented by Focused Outcome Area (FOA), as presented in the BHQR and CSU sections. The overall average scores for intellectual and developmental disability reviews are a weighted average, based on the total number of standards scored [total met / (total met + total not met)]. Except for the Q\u0026T and DDSS tools, indicators within each tool are grouped into six FOAs, areas of the individual's life important to achieve and maintain: \n Whole Health--individuals are healthy, aware of their health-related needs, and direct their own health care regimen \n Safety--individuals are safe in their home and work environments and in their communities; they understand or are learning how to self-preserve in all environments \n Person-Centered Practices--supports and services are being provided based on the individual's preferences and direction \n Community Life--individuals are actively participating and developing social roles in their communities as desired \n Choice--individuals have information they need to make informed choices on life decisions, such as where to live, where to work, and which supports, services and providers to use \n Rights--individual rights are upheld and information and education is provided to ensure understanding of their rights \nSampling Method \nPerson-Centered Review (PCR) \nThe PCR used a random sample of 481 individuals receiving services from providers selected for a QEPR. The PCR sample was stratified by region and sampled proportionate to the regions. Therefore, the number of PCRs per region are proportionate to the number of individuals receiving \n71 \n \n Quality Management Annual Report FY 2017 \nservices within the region. The PCR sample was selected from individuals, age 18 and over, who had not received a PCR during the previous year and were receiving services. Table 15 shows the number and percent of individuals receiving services across the state, as well as the number of PCRs completed within each region. \n \nTable 15. State Population and PCR Sample \n \nRegion \n1 2 3 4 5 6 Total \n \nPopulation \n \nN \n \n% \n \n2,750 \n \n20.5% \n \n2,252 \n \n16.8% \n \n3,436 \n \n25.6% \n \n1,415 \n \n10.5% \n \n1,726 \n \n12.8% \n \n1,853 \n \n13.8% \n \n13,342 100.0% \n \nPCR Sample \n \nN \n \n% \n \n96 \n \n20.0% \n \n90 \n \n18.7% \n \n122 \n \n25.3% \n \n51 \n \n10.6% \n \n63 \n \n13.1% \n \n59 \n \n12.3% \n \n481 \n \n100.0% \n \nQuality Enhancement Provider Review (QEPR) \nDuring FY 2017, a stratified random sample of 93 providers who did not receive a QEPR in FY 2016 and rendered services to eligible individuals was selected for a QEPR.9 10 Based on the number of eligible individuals, providers were stratified into three categories by size: \"Large,\" \"Medium,\" and \"Small.\" One support coordination agency and one crisis services provider were randomly selected for review as well. In addition, DBHDD selected five additional providers for review, including three crisis providers that resulted in a sample size of 100 providers (Table 16). The number of providers by size is listed in Table 16 for both FY 2016 and FY 2107. \n \n9Providers offering ineligible services at the time of sample selection were excluded from the population. Ineligible services include: environmental accessibility adaptation, financial support services, Georgia Crises Response System (GCRS), hospital residential services, individual directed goods and services, natural support training services, specialized medical equipment, specialized medical supplies, transition services, and vehicle adaptations. 10 Adults who were receiving review-eligible services at the time of sample selection and who were not reviewed as part of a PCR in FY 2016 were considered eligible for review in FY 2017. \n72 \n \n Quality Management Annual Report FY 2017 \n \nTable 16. QEPR Provider Sample by Size \n \nFY 2016 \n \nFY 2017 \n \nProvider Size \n \nNumber \n \nSmall (caseload  30) \n \n46 \n \n52 \n \nMedium (30 \u003c caseload \u003c 100) \n \n36 \n \n19 \n \nLarge (caseload  100) \n \n16 \n \n24 \n \nSupport Coordination Agency \n \n1 \n \n1 \n \nCrisis Services \n \n1 \n \n4 \n \nTotal \n \n100 \n \n100 \n \nReview Processes \nOnsite Reviews (PCR and QEPR) \nThe focus of the PCR is on the individual's quality of life and quality of services received. The focus of the QEPR is on the provider's overall practices, quality of services offered to all individuals served, and level of compliance with Medicaid waiver and state requirements. Both the PCR and QEPR use the individual observation staff assessment (IOSA), which includes an interview with individuals, interviews with their staff and onsite observation(s) (OBS) at residential and day programs, as applicable. In addition to the IOSA, the PCR and QEPR include an evaluation of the Individual Service Plan (ISP QA), a review of the provider's records, as well as compliance with service delivery requirements using the DDSS review tool. The number of PRR and DDSS reviews completed depends upon the number of services received by the individual. \nWhile the PCR and QEPR share most of the same tools, there are a few exceptions. The PCR also includes an interview with the individual's Support Coordinator and a review of the record in the consumer information system (CIS) maintained by the support coordinator for the individual. The QEPR has an additional review tool, qualifications and training, which is used to review a sample of records from all staff ensuring required training and other state requirements are current and \n \n73 \n \n Quality Management Annual Report FY 2017 \ndocumented (e.g., background screenings, level of education). The total number of records or interviews completed this year for the PCR and QEPR is listed in Table 17, for each review tool.11 \n \nTable 17. Number of Records by Review Tool and Review Type \n \nReview Tool \n \nPCR N \n \nQEPR N \n \nTotal \n \nIOSA - Individual Interview \n \n481 \n \n440 \n \n921 \n \nIOSA - Observation \n \n225 \n \n473 \n \n698 \n \nIOSA - Staff Interview \n \n304 \n \n398 \n \n702 \n \nISPQA Checklist \n \n481 \n \n440 \n \n921 \n \nSupport Coordinator Interview (SCI) \n \n481 \n \n3 \n \n484 \n \nSupport Coordinator Record Review (SCRR) \n \n481 \n \n20 \n \n501 \n \nProvider Record Review (PRR) \n \n894 \n \n1,356 2,250 \n \nStaff Qualifications and Training (Q\u0026T) \n \nN/A \n \n947 \n \n947 \n \nDDSS - Behavioral Supports Consultation \n \n0 \n \n11 \n \n11 \n \nDDSS - Community Access (Group) \n \n80 \n \n610 \n \n690 \n \nDDSS - Community Access (Individual) \n \n12 \n \n193 \n \n205 \n \nDDSS - Community Living Support \n \n21 \n \n129 \n \n150 \n \nDDSS - Community Residential \n \n60 \n \n313 \n \n373 \n \nDDSS - Crisis \n \nNA \n \n46 \n \n46 \n \nDDSS - Occupational Therapy \n \n0 \n \n12 \n \n12 \n \nDDSS - Physical Therapy \n \n0 \n \n8 \n \n8 \n \nDDSS - Prevocational \n \n10 \n \n138 \n \n148 \n \nDDSS - Respite \n \n0 \n \n11 \n \n11 \n \nDDSS - Support Coordination \n \n481 \n \n36 \n \n517 \n \nTotal Number of PCRs/QEPRs \n \n481 \n \n96 \n \n577 \n \nWhen a PCR is completed, a report is provided that identifies the strengths of the individual's team. It evaluates the supports and services provided and makes recommendations for the individual and \n \n11 To alleviate administrative burden on providers, PCRs were completed at the same time as the QEPR. Beginning in Year 2, the observations and staff interviews completed as part of the PCR were attached to the QEPR. As a result, the number completed for the PCR appears to be lower; however, these PCR review components are incorporated into the QEPR. \n74 \n \n Quality Management Annual Report FY 2017 \nthe support team (including the support coordinator, provider, and family). A provider who participates in a QEPR receives a comprehensive report that identifies strengths of the service delivery systems, recommendations for improvement, and several performance scores. These scores include the overall score, qualifications and training score, and DDSS score. \nQuality Technical Assistance Consultation (QTAC) \nThe QTAC is an additional review process that may be conducted 90 days after completion of the QEPR. This additional review is based on any service concerns identified during the PCR or QEPR, or if the provider requests technical assistance. Using findings from the QEPR, technical assistance is provided to support providers and to offer suggestions and guidance to help improve their service delivery systems. The process used a consultative approach to address specific issues and concerns related to an individual receiving services or systems and practices needing improvement. The QTAC supplements the PCR and QEPR processes by affording contracted providers the opportunity to solicit technical assistance for specific needs within the service delivery milieu. \nPerson-Centered Review \nPCR Scores by Tool \nFigure 21 shows the average score for each tool used during the PCR, comparing FY 2016 and FY 2017. Findings each year show a similar pattern, with scores for interviews with individuals, staff, and support coordinators higher than scores for provider or support coordinator documentation (record reviews). Scores across all tools have decreased somewhat, with the greatest differences from Year 1 to Year 2 in the provider record review (down 8.8 points), the support coordinator interview (down 7.5 points) and support coordinator record review (down 6.2 points).12 \n12 All three changes were statistically significant at p\u003c.001. However, the test was completed with large denominators, based on all the indicators scored. \n75 \n \n Quality Management Annual Report FY 2017 \n \nFigure 21. PCR Scores by Tool and Year \n \n100% 90% 80% 70% \n \n95.1% 91.9% \n \n98.4%96.8% \n \n96.4% 94.3% \n \n79.0% \n \n79.9% \n \n70.2% \n \n73.7% \n \n90.8% 83.3% \n \n60% \n \n50% \n \nIndividual Observation \n \nStaff \n \nInterview \n \nInterview \n \nFY 2016 (N = 484) \n \nProvider Record Review \n \nSupport \n \nSupport \n \nCoordinator Coordinator \n \nRR \n \nInterview \n \nFY 2017 (N = 481) \n \nDeclines in the PRR, SCRR, and SCI scores were driven by decreases on some indicators within certain FOAs. \n The PRR decline was attributed mostly to the FOA choice, for which scores fell 18.9 points, from 71.6 percent in FY 2016 to 53 percent in FY 2017. The greatest differences were related to a lack of documentation demonstrating how individuals were provided choices regarding their living environments and living situations. \n The greatest impact for the SCRR was within the rights FOA, for which scores declined 19.9 points, from 73.4 percent to 53.5 percent. Differences were primarily related to documentation regarding the review of: o Individuals' rights and responsibilities o The complaints/grievances policy o The HIPAA privacy and security rules o Informed consent from individuals for taking psychotropic medications \n76 \n \n Quality Management Annual Report FY 2017 \nThe SCI tool also saw a decline within the rights FOA, for which scores fell 10.2 points, from 86.3 percent to 76.3 percent, and on some safety standards. There was a statistically significant decline in the percent of support coordinators who were aware of: \n Individuals' restrictive interventions  How to address identified rights restrictions with the individual  The individuals' triggers related to behavioral health (i.e., behavioral issues, \ndecompensation, or relapse)  Individualized techniques for implementing behavior, safety and emergency plans, or if the \nindividual has one of these plans when needed \nPCR Scores by Focused Outcome Area (FOA) \nAll the PCR tools (II, SI, OBS, PRR, SCI, and SCRR) are designed to measure the six basic FOAs, with many indicators within each FOA. Results for each FOA, based on all six tools, are shown in figure 22, by year. Scores have decreased in each area, particularly for Choice (down 12 points), Community Life (down 9.5 points), and Whole Health (down 8.9 points). \n77 \n \n Quality Management Annual Report FY 2017 \n \n100% 90% 80% 70% \n \nFigure 22. PCR Scores by FOA and Year \n \n93.6% 84.7% \n \n96.5% 90.3% \n \n89.1% 82.6% 80.6% \n \n90.7% \n \n95.4% 90.2% \n \n78.7% \n \n71.1% \n \n60% \n \n50% Whole Health \n \nSafety \n \nPerson Centered Practices \n \nCommunity Life \n \nChoice \n \nFY 2016 (N = 484) FY 2017 (N = 481) \n \nRights \n \nPCR Scores by Tool and Focused Outcome Area (FOA) \nIn this section, PCR results for FY 2017 are presented by FOA and tool (Figure 23). Findings for the review components varied across each FOA and indicate the following: \n Service provider documentation was the lowest scoring component across all FOAs, except for Rights (82.8 percent). Conversely, Support Coordinators' documentation was the lowest scoring component in Rights (53.5 percent), while they performed best documenting standards in Whole Health (93.7 percent) and Safety (92.5 percent). \n Both Support Coordinators and service providers were least likely to be in compliance with areas of Community Life and Choice. \n Interview findings were at least 75 percent or higher, across all FOAs.13 \n \n13 Beginning in FY 2017, staff interviews were only conducted for staff who offer services to the individual but are not connected to the provider who is participating in a QEPR. Therefore, only the individual's \"B\" providers who were not receiving a QEPR were interviewed. This is a different process than in FY 2016 so comparisons to that year should be made with caution. \n78 \n \n Quality Management Annual Report FY 2017 \n Individual and Support Coordinator interview responses were similar regarding Community Life, Whole Health and Person Centered Practices but lower than information obtained from staff or the onsite observations. \nFigure 23. FY 2017 PCR Scores by Tool and FOA \nOpportunities for Growth by FOA \nEach PCR tool is organized around the FOAs and each FOA uses several standards, or indicators, to measure how well the area is being addressed. As indicated in this report, most findings are positive, with relatively high scores across all perspectives of the PCR, i.e., interviews, observations and documentation areas all averaging over 70 percent. However, there are specific areas within \n79 \n \n Quality Management Annual Report FY 2017 \neach FOA where indicator level scoring is relatively low or has decreased significantly since FY 2016. These are presented in this section by FOA. \nWhole Health The average Whole Health score in FY 2017 was 84.7 percent, down from 93.6 percent in FY 2016. An important aspect of each individual's life is the use of various types of medications, particularly psychotropic and anticonvulsant medication. Indicators across several review components suggest a statistically significant decrease in some areas of medication administration, particularly surrounding aspects of education. Some of the lower scoring indicators also decreased since FY 2016 (Table 18). \n \nTable 18. Low Scoring Whole Health Indicators (PCR) \n \nIndicator \nIndividual Interview The individual was often not aware of: \nWhat medications he/she is taking Why medications are prescribed The side effects of the medications Staff Interview Staff could not always describe: How to support the individual to learn about medications The side effects of medications taken Provider Record Review Documentation did not demonstrate how: Education is provided to the individual on the risks and side effects of the medication \nIndividuals and families are provided education on all prescribed medications \n* Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nFY 2016 \n \nFY 2017 \n \n78.3% (n = 428) 86.0% (n= 351) 74.3% (n = 350) \n \n68.3% (n = 435) 68.5% (n= 410) 52.4% (n = 410) \n \n81.0% (n = 357) 86.3% (n = 388) \n \n67.7% (n = 195) 78.7% (n = 211) \n \n60.5% (n = 332) 33.9% (n = 610) 56.6% (n = 327) 25.7% (n = 723) \n \nIn addition to issues surrounding education about medications, data indicated providers often did not have the individual's preventive health care report in the record for the male and female preventive screening(s): mammogram, bone density, hearing evaluation/supports, wheelchair \n80 \n \n Quality Management Annual Report FY 2017 \nevaluation, vision evaluation/supports, or dental care - each of these areas showing compliance of less than 50 percent. \n \nSafety Scores were relatively high in areas surrounding Safety, an average of 90.3 percent in FY 2017, down just over six points from 96.5 percent in FY 2016. Some safety indicators showed relatively low scores for FY 2017, and some decreased significantly since FY 2016. Most providers were not documenting how they address abuse, neglect and exploitation or offer education on how individuals can self-preserve. Support Coordinators were not always aware of restrictive interventions, needed behavior or crisis plans, and some triggers related to behavior health. \n \nTable 19. Low Scoring Safety Indicators(PCR) \n \nIndicator Provider Record Review Documentation did not demonstrate: \n \nFY 2016 \n \nOngoing evidence of identifying, addressing, and seeking prevention of abuse, neglect, and exploitation How they offer education to the individual on how to self-preserve or develop effective resiliency skills according to the individual's learning style \n \n53.7% (n = 499) 52.5% (n = 549) \n \nSupport Coordinator Interview Support Coordinator was not aware: \n \nOf restrictive interventions/plans in place Of needed crisis plans when the individual had used Georgia Crisis Response services \nIf the individual had a behavior plan Of the individualized techniques for following the behavior plan \nOf specific triggers related to behavior health issues: \nDecompensation Relapse \n* Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \n87.8% (n= 181) 84.7% (n= 118) 83.2% (n= 143) 79.5% (n = 132) \n87.4% (n = 174) 88.9% (n = 180) \n \nFY 2017 \n22.9% (n = 893) 41.1% (n = 893) \n47.8% (n = 92) 53.6% (n= 97) 56.2% (n= 105) 55.8% (n = 104) \n58.2% (n = 117) 60.5% (n = 114) \n \n81 \n \n Quality Management Annual Report FY 2017 \nPerson Centered Practices Person Centered Practices showed an average score of 82.6 percent, down from 89.1 percent in FY 2016. This is not the lowest scoring FOA, but indicators accounting for much of the change showed a 10 to 20-point decrease since FY 2016 (Table 20). Individuals were not as likely to identify new experiences or something new they had learned, and providers were less likely to document progress toward goals or the unique strengths and talents of people they serve. \n \nTable 20. Low Scoring Person-Centered Practices Indicators(PCR) \n \nIndicator \n \nFY 2016 \n \nFY 2017 \n \nIndividual Interview The individual was often unable to identify something, in the past 6 months: \n \nNew or experienced \n \n70.4% (n = 479) 60.3% (n = 471) \n \nThat had been learned \n \n78.8% (n = 480) 65.8% (n = 474) \n \nProvider Record Review Documentation did not reflect: \n \nHow providers regularly review, with the individual, progress toward and benefit of goals \n \n51.4% (n = 552) 40.4% (n = 891) \n \nThe individual's talents \n \n48.9% (n = 552) 38.9% (n = 894) \n \nThe individual's hopes and dreams \n \n53.3% (n = 552) 42.0% (n = 893) \n \nThe individual's strengths \n \n57.0% (n = 553) 48.9% (n = 894) \n \nSupport Coordinator Record Review Documentation did not always demonstrate: \n \nHow support coordinators regularly review, with the individual, progress toward and benefit of goals \n \n52.0% (n = 483) 56.1% (n = 472) \n \nProgress notes that describe progress toward goals including the individual's response to the intervention or activity, based on data \n \n63.9% (n = 482) 57.8% (n = 481) \n \nThe individual's talents \n \n49.2% (n = 480) 28.8% (n = 479) \n \nThe individual's strengths \n \n56.4% (n = 479) 41.0% (n = 480) \n \n* Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nCommunity Life Community Life, the degree to which individuals were interacting with and integrated in their surrounding community, showed the lowest average score among the FOAs for both FY 2016 and FY \n \n82 \n \n Quality Management Annual Report FY 2017 \n2017 - 80.6 percent and 71.1 percent respectively. Information from the face-to-face interviews is shown in Table 21. Findings indicate from the three different interview perspectives (individual, support coordinator, staff), individuals were often not developing, or being supported to develop, valued social roles or exploring community employment options. \n \nTable 21. Low Scoring Community Life Interview Indicators (PCR) \n \nIndicator \n \nFY 2016 \n \nFY 2017 \n \nIndividual Interview The individual was often not: \n \nA member of community clubs (i.e., athletic, arts/craft, photography, YMCA), as desired \n \n57.7% (n = 414) 34.0% (n = 400) \n \nA member of neighborhood associations, as desired \n \n65.3% (n = 236) 46.4% (n = 140) \n \nExposed to new community activities (in the past 6 months) \n \n71.4% (n = 472) 63.2% (n = 473) \n \nAble to describe any new community experiences \n \n77.8% (n = 472 69.1% (n = 475) \n \nProvided opportunities to develop new social roles \n \n75.4% (n = 476) 69.4% (n = 477) \n \nProvided opportunities to learn about social roles in the community \n \n81.1% (n = 476) \n \n71.5% (n = 478) \n \nActively pursuing preferences related to goals of employment (not a readiness model) \n \n82.4% (n = 335) 70.4% (n = 338) \n \nSupport Coordinator Interview The Support Coordinator often was not: \n \nAware of the individual's new community experiences 73.0% (n = 467) 62.0% (n = 471) \n \nAware of how the individual is provided opportunities to develop new valued social roles in the community \n \n71.2% (n = 463) 66.9% (n = 475) \n \nAble to define or explain how the individual is supported to uphold valued social roles \n \n79.5% (n = 472) 72.6% (n = 475) \n \nStaff Interview Staff was unable to describe how the individual is provided opportunities to: \n \nDevelop new social roles \n \n59.3% (n = 791) 60.4% (n = 298) \n \nDevelop community employment \n \n71.4% (n = 795) 58.3% (n = 235) \n \n* Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nDocumentation compliance from Support Coordinator and Provider Record Reviews indicates most providers, both service providers and Support Coordinators, were not documenting how they \n \n83 \n \n Quality Management Annual Report FY 2017 \nsupport individuals to develop social roles, seek employment, or how individuals participate in the community. Documentation results for Community Life, from record reviews, are shown in Table 22. \n \nTable 22. Low Scoring Community Life Record Review Indicators (PCR) \n \nIndicator \n \nFY 2016 \n \nFY 2017 \n \nDocumentation did not demonstrate: How the individual is supported to learn about, explore and experience the community Support Coordinator Record Review Provider Record Review \n \n48.7% (n = 476) 44.3% (n = 532) \n \n39.0% (n = 480) 39.5% (n = 881) \n \nHow the individual is supported to have or has responsibilities in the community as desired \nSupport Coordinator Record Review \nProvider Record Review \nHow the individual is supported to/able to participate in community activities and employment the same as individuals without disabilities \n \n58.5% (n = 480) 62.8% (n = 530) \n \n59.5% (n = 477) 42.8% (n = 883) \n \nSupport Coordinator Record Review Provider Record Review Development of social roles and natural supports that reflect the individual's interests Support Coordinator Record Review Provider Record Review \n \n76.9% (n = 455) 79.1% (n = 460) \n \n62.2% (n = 458) 40.4% (n = 854) \n \n48.5% (n = 480) 42.9% (n = 532) \n \n46.4% (n = 481) 25.4% (n = 881) \n \nOpportunities to seek employment in competitive integrated settings \n \nSupport Coordinator Record Review Provider Record Review \n \n48.2% (n = 363) 55.2% (n = 364) \n \n41.4% (n = 394) 24.9% (n = 794) \n \n* Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nChoice Choice is the second-lowest scoring FOA showing an average score of 78.7 percent for FY 2017. Compared to last year's score of 90.7 percent, this FOA had the greatest decrease  a 12-point drop. Scores within this FOA are most negatively affected by 10 to 50-point decreases on standards from the provider and Support Coordinator Record Reviews. Findings showed a lack of documentation \n84 \n \n Quality Management Annual Report FY 2017 \nfor how individuals were making meaningful and informed choices in different aspects of their lives, including living environments, living situations, competitive employment and community participation. \n \nTable 23. Low Scoring Choice Indicators (PCR) \n \nIndicator \n \nFY 2016 \n \nFY 2017 \n \nProvider Record Review Providers did not always demonstrate through documentation how individuals: \n \nMake informed choices about competitive or supported employment options \n \n54.6% (n = 379) \n \n30.8% (n = 827) \n \nMake informed choices about or options to change living environments \n \n76.7% (n = 257) 23.0% (n = 708) \n \nAre provided a choice of living situations \n \n74.9% (n = 267) 26.4% (n = 708) \n \nAre provided with information to make informed choices (education, exploration, and experiences) \n \n40.7% (n = 548) 27.4% (n = 891) \n \nMake informed choices about community participation and social interaction \n \n54.8% (n = 540) 42.8% (n= 888) \n \nSupport Coordinator Record Review Support Coordinators did not always demonstrate through documentation how individuals: \n \nAre offered employment or educational options \n \n55.4% (n = 401) 49.3% (n = 423) \n \nAre provided choices of living situations \n \n75.0% (n = 324) 47.1% (n = 433) \n \nMake meaningful choices about community participation \n \n56.5% (n = 480) 50.3% (n = 441) \n \nAre provided with education, exploration and experiences \n*Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \n41.9% (n = 482) 31.2% (n = 481) \n \nRights Similar to Safety, scores surrounding Rights were relatively high, showing an average score of 90.2 percent in FY 2017 -- down approximately five points since FY 2016 (95.4 percent met). While the overall score for Rights remains relatively high, up to 40-point decreases were seen on standards within the Support Coordinator Interview and record review tools. Interview results showed a 28point decrease in the percent of Support Coordinators who address identified rights restrictions \n \n85 \n \n Quality Management Annual Report FY 2017 \n(90.6 percent to 62.0 percent met) and a 40-point decrease in the percent of coordinators who are aware of the individual's restrictive interventions/plans (87.8 percent to 47.8 percent met). \nSupport Coordinator documentation did not always show evidence of a signed informed consent form for psychotropic medications or of a signature on the notification of rights and responsibilities. Documentation was much less likely to show evidence the complaints/grievance policy was being shared with the individual /guardian in a manner accommodating the individual's learning style. \n \nTable 24. Low Scoring Rights Indicators (PCR) \n \nIndicator \n \nFY 2016 \n \nSupport Coordinator Interview Support Coordinators were often not: \n \nAware of restrictive interventions/plans in place 87.8% (n = 181) \n \nAddressing identified rights restrictions \n \n90.6% (n = 223) \n \nSupport Coordinator Record Review Support Coordinators did not always demonstrate evidence: \n \nThe individual or legal guardian's signature on notification that all individuals are informed about their rights and responsibilities at least annually \n \n58.4% (n = 473) \n \nHIPAA privacy and security rules are specifically reviewed with individuals \n \n72.1% (n = 480) \n \nOf an informed consent with the individual/guardian signature for taking psychotropic medications prescribed by a psychiatrist or psychiatric nurse practitioner \n \n49.1% (n = 281) \n \nThe complaints/ grievance policy is being shared with the individual/guardian in a manner accommodating the individual's learning style \n \n61.5% (n = 478) \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \nFY 2017 47.8% (n = 92) 62.0% (n = 129) \n22.7% (n = 476) 55.2% (n = 478) 35.5% (n = 251) 34.7% (n = 479) \n \n86 \n \n Quality Management Annual Report FY 2017 \nIndividual Service Plan Quality Assurance Checklist (ISP QA) \nDuring the PCR, assessors review the content of the individual's plan to assess compliance with requirements and the extent to which the plan specifically addresses the individual's needs, goals, and desires. \nISP Written to Support a Meaningful Life The ISP QA checklist provides an overall rating for each service plan, based upon the degree to which the ISP is written to provide a meaningful life for the individual receiving services. There are three different categories for each ISP. \nService Life: The ISP supports a life with basic paid services and paid supports. Needs \"important for\" the individual are addressed, such as health and safety. However, there is not an organized effort to support an individual in obtaining other expressed desires \"important to\" the individual, such as getting a driver's license, having a home, or acting in a play. The individual is not connected to the community and has not developed social roles, but expresses a desire to do so. \nGood but Paid Life: The ISP supports a life with connections to various supports and services (paid and non-paid). Expressed goals \"important to\" the individual are present, indicating the individual is obtaining goals and desires beyond basic health and safety needs. The individual may go out into the community but with only limited integration into community activities. For example, the individual may go to church but not have the opportunity to participate in Sunday school or the choir. Community connections are lacking, and the individual indicates a desire to achieve more. \nCommunity Life: The ISP supports a life with the desired level of integration in the community and in various settings preferred by the individual. The individual has friends and support beyond providers and family members. The individual has developed meaningful social roles, such as belonging to a Red Hat club or a book club, or having employment in a competitive rather than segregated environment. Rather than just going to church, the individual may be an usher at the church or sing in the choir. Relationships developed in the community are reciprocal. The ISP is \n87 \n \n Quality Management Annual Report FY 2017 \nwritten with goals that help support the individual in moving toward a community life, as she or he chooses. \nOverall findings are presented in Figure 24 and show very similar results for FY 2016 and FY 2017. A majority of ISPs reviewed each year was written to support a \"Good but Paid Life.\" The smallest percent supported a service life, while 16.6 percent (FY 2016) to 17.3 percent (FY 2017) supported a community life. The ISP results support other findings in the report  there is opportunity for improvement to achieve more integration in the community or in various settings. \n \n100% 75% 50% 25% 0% \n \nFigure 24. PCR ISP QA Life Question by Year \n73.8% 70.8% \n \n9.6% 11.9% \n \n16.6% 17.3% \n \nService Life \n \nA Good but Paid Life \n \nCommunity Life \n \nFY 2016 (N = 481) FY 2017 (N = 480) \n \nFindings by region and year are presented in Table 25. While N sizes are relatively small, each region had over 50 ISPs reviewed. Differences between years were relatively small, but those with statistical significance of p \u003c=.05 are shaded in blue in the table below. Compared to FY 2016, in FY 2017 a significantly greater proportion of ISPs in Region 5 supported a Service Life while a significantly greater proportion in Region 2 supported a community life. \n \n88 \n \n Quality Management Annual Report FY 2017 \n \nTable 25. ISP QA Life Question by Region and Year \n \nRegion \n \nService Life \n \nFY 2016 \n \nFY 2017 \n \nGood but Paid Life FY 2016 FY 2017 \n \nCommunity Life \n \nFY \n \n2016 \n \nFY 2017 \n \n1 \n \n5.3% \n \n6.3% \n \n58.5% 55.8% 36.2% \n \n37.9% \n \n2 \n \n12.6% \n \n10.0% \n \n82.8% 75.6% 4.6% \n \n14.4% \n \n3 \n \n9.1% \n \n8.2% \n \n73.6% 74.6% 17.4% \n \n17.2% \n \n4 \n \n5.6% \n \n9.8% \n \n83.3% 86.3% 11.1% \n \n3.9% \n \n5 \n \n5.3% \n \n20.6% \n \n78.9% 68.3% 15.8% \n \n11.1% \n \n6 \n \n19.1% \n \n23.7% \n \n72.1% \n \n*Areas highlighted in blue within table reflect significance at a p \u003c .05. \n \n69.5% 8.8% \n \n6.8% \n \nISP Expectations \nQuality assessors reviewed 12 different sections in the ISP. Each section represents an expectation, listed in Table 26, and has four indicators that must be addressed in the plan. The expectation is rated on a scale from zero to four, zero meaning the section is blank or did not address the requirements for any of the indicators and four meaning 100 percent of the indicators or requirements in the section are addressed in the ISP. \nData in Table 26 show the percent of ISPs at each rating on the 12 different expectations for FY 2017. For example, 52.0 percent of ISPs reviewed in FY 2017 had all four indicators present (addressed) for the expectation regarding the communication chart. Data indicate: \n 58.7 percent of all ISPs reviewed addressed all elements in each section.  On average, over 85 percent of ISPs reviewed had three or four indicators present.  Expectations regarding the rights, psychotropic medications, or behavior supports and the \nhealth and safety sections showed the highest rates. All four indicators present in 94.2 percent of ISPs. \n89 \n \n Quality Management Annual Report FY 2017 \n The ISP expectations measure how well the individual's hopes and dreams are addressed and whether goals are person-centered. These measures were most likely to have zero or one indicator addressed, 17.9 percent and 15.3 percent respectively. \n \nTable 26. ISP QA Checklist Ratings by Expectation (FY 2017; N = 481) \nRatings \n \nISP QA Checklist Description \n \n0 \n \n1 \n \n2 \n \n3 \n \n4 \n \nRelationship Map \u0026 discussion on ways to develop relationships \nCommunication Chart Person-centered Important to/For Hopes and Dreams Service Summary Rights, Psychotropic Medications, Behavior Supports Section Meeting Minutes SIS completed and support needs are addressed in the ISP Health and Safety Review Section completed accurately and thoroughly Goals are Person Centered Training Goal Action Plan Action Plans/Objectives \nTotal \n \n0.8% \n0.8% 0.8% 13.1% 1.9% \n0.8% \n2.7% \n0.8% \n0.8% \n6.2% 1.5% 0.8% 2.6% \n \n2.3% \n1.7% 0.2% 4.8% 4.2% \n0.0% \n6.0% \n0.6% \n0.0% \n9.1% 1.0% 1.0% 2.6% \n \n12.5% \n5.0% 2.9% 8.3% 13.9% \n0.2% \n16.2% \n1.5% \n \n36.0% \n40.5% 28.1% 18.5% 25.8% \n4.8% \n21.2% \n32.8% \n \n48.4% \n52.0% 68.0% 55.3% 54.3% \n94.2% \n53.8% \n64.2% \n \n0.4% \n19.3% 10.8% 22.5% 9.5% \n \n4.6% \n21.4% 50.5% 36.2% 26.7% \n \n94.2% \n43.9% 36.2% 39.5% 58.7% \n \nAverage Rating \n3.3 \n3.4 3.6 3.0 3.3 \n3.9 \n3.2 \n3.6 \n3.9 \n2.9 3.2 3.1 3.4 \n \nTable 27 shows the average rating (0  4) by expectation for FY 2016 and FY 2017. The average rating for both years was 3.4 and showed very little change by year for each expectation. Expectations measuring how well the individual's hopes and dreams are addressed and whether goals are person centered were the lowest rated expectations both years. Expectations regarding the rights, psychotropic medications, or behavior supports and the health and safety sections showed the highest average ratings, 3.9 for FY 2016 and FY 2017. \n \n90 \n \n Quality Management Annual Report FY 2017 \n \nTable 27. Average Rating by Expectation (0 - 4) \n \nFY 2016 \n \nISP QA Checklist \n \n(N = 484) \n \nRelationship Map \u0026 discussion on ways to develop relationships \n \n3.4 \n \nCommunication Chart \n \n3.5 \n \nPerson-centered Important to/For \n \n3.7 \n \nHopes and Dreams \n \n3.0 \n \nService Summary \n \n3.2 \n \nRights, Psychotropic Medications, Behavior Supports Section \n \n3.9 \n \nMeeting Minutes \n \n3.2 \n \nSIS completed and support needs are addressed in the ISP \n \n3.6 \n \nHealth and Safety Review Section completed accurately and thoroughly \n \n3.9 \n \nGoals are Person Centered \n \n3.0 \n \nTraining Goal Action Plan \n \n3.2 \n \nAction Plans/Objectives \n \n3.2 \n \nOverall Average Rating \n \n3.4 \n \nFY 2017 (N = 481) \n3.3 3.4 3.6 3.0 3.3 3.9 3.2 3.6 \n3.9 \n2.9 3.2 3.1 3.4 \n \nPCR Results by Service \nDuring the PCR, a record review is conducted with every provider who offers eligible services to the individual. Information in Table 28 shows results for record reviews completed during the PCRs, by service and year. The N is the number of records reviewed for each service and the percent met is based on the total number of indicators reviewed. \nThe decline in PCR scores, from FY 2016 to FY 2017, was seen across all services as well. Among the services with at least 45 records reviewed: \n With the exception of Support Coordination, Supported Employment showed the least amount of change over the two-year period, with the highest score in FY 2017 (82.3 percent) and close to the highest score in FY 2016 (77.8 percent). \n Community Living Supports showed the greatest decline, close to 17 points.  Prevocational service scores dropped by 11 points and with the exception of Respite (only \none record reviewed) showed the lowest score in FY 2017. \n91 \n \n Quality Management Annual Report FY 2017 \n \nTable 28. Record Review Results by Service and Year \n \nFY 2016 \n \nFY 2017 \n \nService \n \nN \n \n% Met \n \nN \n \n% Met \n \nProvider Record Review \n \n558 \n \n79.0% \n \n894 \n \n70.2% \n \nBehavioral Supports \n \n- \n \n- \n \n2 \n \n91.7% \n \nCommunity Access(Group) \n \n233 \n \n77.1% \n \n378 \n \n67.9% \n \nCommunity Access(Individual) \n \n60 \n \n79.7% \n \n95 \n \n72.9% \n \nCommunity Living Supports \n \n47 \n \n83.8% \n \n72 \n \n66.8% \n \nCommunity Residential \n \n91 \n \n80.4% \n \n195 \n \n73.4% \n \nPrevocational \n \n70 \n \n75.9% \n \n74 \n \n64.9% \n \nRespite \n \n1 \n \n87.5% \n \n6 \n \n61.8% \n \nSupported Employment \n \n54 \n \n82.3% \n \n71 \n \n77.8% \n \nTransportation \n \n2 \n \n84.7% \n \n- \n \n- \n \nNursing Services \n \n- \n \n- \n \n1 \n \n87.8% \n \nSupport Coordination Record Review \n \n484 \n \n79.9% \n \n481 \n \n73.7% \n \nThe following table displays scores by FOA for the services reviewed this year during the PCR. Providers of respite (n = 6) scored relatively low in areas of Whole Health, Community Life and Choice, and Prevocational provider records also showed relatively low scores in Community Life and Choice. Support Coordination was the lowest scoring service area relative to Rights. \n \n92 \n \n Quality Management Annual Report FY 2017 \n \nTable 29. FY 2017 PCR Record Review Results by FOA and Service \n \nService \n \nWhole Health Safety PCP \n \nCom \n \nAverag \n \nLife Choice Rights \n \ne \n \nBehavioral Supports (N=2) Community Access (Grp) (N=378) Community Access (Ind) (N=95) Community Living (N=72) Community Residential (N=195) Prevocational (N=74) Respite (N=6) Supported Employment (N=71) Nursing Services (N=1) Support Coordination (N=481) \n \n81.3% 59.5% 63.6% 66.0% 83.9% 56.1% 47.8% 53.3% 100.0% 93.7% \n \n96.0% 75.2% 75.9% 71.5% 76.5% 76.1% 73.1% 79.6% 90.9% 92.5% \n \n96.9% 75.1% 80.9% 70.5% 72.6% 73.2% 81.6% 83.4% 76.5% 78.6% \n \n100.0% 61.8% 69.9% 59.9% 62.5% 53.3% 45.5% 86.3% 100.0% 63.5% \n \n93.3% 50.0% 59.8% 52.8% 51.4% 45.3% 46.2% 72.4% 100.0% 65.1% \n \n80.0% 84.2% 85.9% 77.0% 79.9% 84.4% 70.8% 90.2% 69.2% 53.5% \n \n91.7% 67.9% 72.9% 66.8% 73.4% 64.9% 61.8% 77.8% 87.8% 73.7% \n \nPCR Strengths and Recommendations \nDuring each PCR, assessors identify strengths about services offered to the individual and provide recommendations to help improve services and the individual's quality of life. Displayed in Table 30 and 31 display strengths and recommendations identified in at least 40 percent of the PCRs completed in FY 2017. Staff strengths most often cited include an awareness of the individual's unique safety needs, achievements, and how the individual communicates choices. The individual felt valued in approximately 70 percent of PCRs completed. Recommendations include using web resources to assist with rights education, community life, informed choice, and safety profiles. \n \n93 \n \n Quality Management Annual Report FY 2017 \n \nTable 30. Strengths Most Often Identified During a PCR \n \nStrength \n \nTimes Noted \n \nStaff is aware of the unique safety needs of the individual. \n \n383 \n \nStaff acknowledges the individual's achievements. \n \n341 \n \nThe individual feels valued. \n \n336 \n \nStaff has a clear understanding how the individual communicates choice making in everyday decisions. \n \n324 \n \nStaff is knowledgeable of and provided examples of how an \n \nindividual's preferences for exercising rights are actively being \n \n238 \n \nsupported. \n \nThe individual is aware of how to self-preserve in all settings. \n \n205 \n \nStaff consistently promotes independence. \n \n197 \n \nStaff demonstrates the use of person-centered values and approaches in everyday interactions with the individual. \n \n194 \n \nStaff is aware of the addendum process and when a request can be made. \n \n193 \n \nPercent of PCRs 79.6% 70.9% 69.9% 67.4% \n49.5% \n42.6% 41.0% 40.3% \n40.1% \n \nTable 31. Recommendations Most Often Identified During a PCR \n \nRecommendation \n \nTimes Percent Noted of PCRs \n \nUtilize a video to educate individuals on Rights at http://www.youthforhumanrights.org. \n \n253 52.6% \n \nHelp foster learning that shares knowledge relating to a community \n \nlife using The International Learning Community website, \n \n239 49.7% \n \nhttp://www.learningcommunity.us. \n \nConduct \"what if\" scenarios to determine the individual's skills in various safety situations. \n \n233 48.4% \n \nSupport the individual to explore other potential dreams. This can be done by using the 3 E's, education, exposure, and experience. \n \n219 45.5% \n \nProvide guidance in making informed choices using the following \n \nwebsite: \n \n217 45.1% \n \nhttp://mn.gov/mnddc/extra/publications/choice/Its_My_Choice.pdf \n \nCreates a safety profile for residential settings for first responders using the http://www.smart911.com website. \n \n213 44.3% \n \nEnsure daily schedules and activities promote exposure to new things ('new places and new faces') and are not stagnant in nature. \n \n208 \n \nIdentify ways to expose the individual to new experiences in his/her community. \n \n206 \n \n43.2% 42.8% \n \n94 \n \n Quality Management Annual Report FY 2017 \nQuality Enhancement Provider Review (QEPR) \nQEPR Scores by Tool \nSeveral different scores are calculated and presented to providers at the conclusion of the QEPR. The Overall Score is based on findings from the Individual Interviews, Staff Interviews, Observations and Provider Record Reviews (PRR). Findings are also calculated for the Administrative Qualifications and Training, based on a sample of staff rendering services, and the DDSS reviews. During FY 2017, four crisis providers participated in a QEPR. Because these services are very different, we present results for these four providers separately. In FY 2016, 100 providers participated in a QEPR; 46 Small, 36 Medium, 17 Large providers, and one QEPR for a crisis provider. The sample of providers in FY 2016 was randomly drawn from across the state. For FY 2017, providers were selected from the remaining providers who had not yet received a QEPR; 52 Small, 19 Medium, 25 Large providers and four crisis providers. Consequently, while scores for all the QEPR components appear to be lower in FY 2017, comparisons by year are not an appropriate measure as the FY 2017 sample was not selected to be representative of the state. Moreover, the measure was not selected from the entire population of providers and it is unclear of the underlying sources that may be causing the variation. See Figure 25 for distribution of QEPRs by provider size for FY 2016 and FY 2017. \n95 \n \n Quality Management Annual Report FY 2017 \nFigure 25. FY 2016 and FY 2017 QEPR Samples by Provider Size and Type \nOverall QEPR Score \nFigure 26 shows the average overall scores for providers reviewed in FY 2016 and FY 2017 (excluding crisis providers) and scores for each tool used in the review process. The overall score for FY 2017 was 83.7 percent, with the Observation and Staff Interviews showing the highest scores of just over 95 percent. As in FY 2016, the PRR reflected the lowest-scoring area (69.6 percent). Each tool used to calculate the overall score in the QEPR showed results lower than in FY 2016. While the pattern of scores is similar across the tools each year, it is important to note the QEPRs in FY 2017 used a sample not designed to be representative statewide but rather to guarantee all providers are reviewed within a three to three and a half year period. As a result, comparisons made to FY 2016 data are generally not appropriate and should be made with caution. \n96 \n \n Quality Management Annual Report FY 2017 \nFigure 26. Overall QEPR Scores by Tool and Year \nIndividual Interview FY16: 95.1% FY17: 90.7% \n \nProvider Record Review \nFY16: 73.6% FY17: 69.6% \n \nOverall Score \nFY16: 88.4% (N = 99) \nFY 17: 83.7% (N = 96) \n \nStaff Interview \nFY16: 96.9% FY17: 95.2% \n \nObservation FY16: 98.2% FY17: 95.7% \nOverall Crisis Provider Scores \nFive crisis providers were selected for a QEPR, one in FY 2016 and four in FY 2017. Figure 27 shows the overall scores for these providers and scores by tool and year. Compared to other providers reviewed with the QEPR, crisis providers showed scores somewhat lower for the interviews and Observations but close to 15 points higher on the PRR. \n \n97 \n \n Quality Management Annual Report FY 2017 \nFigure 27. Overall Crisis Provider Scores by Tool and Year \nIndividual Interview FY16: 83.4% FY17: 86.0% \n \nProvider Record Review \nFY16: 88.0% FY17: 84.1% \n \nOverall Score \nFY16: 85.1% (N = 1) \nFY17: 86.8% (N = 4) \n \nStaff Interview \nFY16: 81.3% FY17: 89.1% \n \nObservation FY16: 87.7% FY17: 92.5% \nQualifications and Training (Q \u0026 T) and Service Specific (DDSS) \nEvery QEPR includes a review of staff qualifications and training. In FY 2017, a sample of 947 staff records was reviewed. The sample was stratified by service to ensure all services offered by each provider were included in the review process. The primary purpose of the Q\u0026T record review is to confirm relevant staff information is accurate and up to date (e.g., driver's license, performance evaluations, background screening) and staff has received all required trainings specific to services provided. DDSS reviews are completed to ensure services are provided as specified by DBHDD (Figure 28). \n \n98 \n \n Quality Management Annual Report FY 2017 \nFigure 28. QEPR Qualifications and Training and DDSS Results by Year \n \nQEPR Providers \n \nCrisis Providers \n \nQ \u0026 T \n FY 2016: 80.6% (N = 1,003)  FY 2017: 82.6% (N = 947) \n \nQ \u0026 T \n FY 2016: 92.8% (N = 11)  FY 2016: 91.8% (N = 51) \n \nDDSS \n FY 2016: 99.9% (N = 1,389)  FY 2017: 99.8% (N = 1,667) \n \nDDSS \n FY 2016: 100% (N = 8)  FY 2017:100% (N = 48) \n \nQualifications \u0026 Training Opportunities for Improvement \nProvider staff, particularly for crisis providers, appears to do relatively well maintaining compliance with required training and qualifications. Most staff (94.8 percent) followed DBHDD's policy 04-104 for Criminal Records Checks; most were properly licensed (97.2 percent); and 100 percent of crisis providers ensured crisis response system staff (mobile team members and intensive support staff) had participated in training and passed an examination demonstrating competence in all crisis protocols and requirements. For providers reviewed this year, several areas present opportunities for improvement in staff training, including the following: \n Within the first 60 days of hire, many staff did not have training on: o Use of the Georgia Crisis and Access Line (43.0 percent met) o How to work with individuals with co-occurring diagnoses (47.4 percent met) \n99 \n \n Quality Management Annual Report FY 2017 \no Suicide prevention skills (51.5 percent met) o Holistic care of the individual (66.4 percent met) o Medical, physical, behavioral, and social needs and characteristics of individuals \nserved (68.9 percent met)  Required training for Developmental Disability Professionals did not always include: \no Supports Intensity Scale overview (54.5 percent met) o Individual service planning training (67.9 percent met)  The following required topics were not always included in the annual training: o Specific individual medications and their side effects (54.9 percent met) o Emergency and disaster plan procedures (68.8 percent met) \nThe Q\u0026T review component uses a sample of employees to determine compliance with standards. The number of employees per provider varies based upon number of individuals served and services rendered. Therefore, while documentation for a majority of staff may support a \"met\" on any given standard, fewer providers may have all employees in compliance with the standard. For example: \n While 73.2 percent of staff had documented evidence of training on person centered values, principles, and approaches, only 55.3 percent of providers had documentation all employees had received the training, i.e., at least one staff was not in compliance with the standard. \n Approximately 95 percent of staff records reviewed were in compliance with background screening requirements standards. However, only about 75 percent of providers reviewed this year had all sampled staff in compliance with the standards, i.e., at least one staff was not compliant with a background screening standard for approximately 25 percent of the providers. \n 73.4 percent of employees had accurate and up to date annual work performance evaluations, while only 38.3 percent of providers were compliant across all staff reviewed. \n100 \n \n Quality Management Annual Report FY 2017 \n 82.6 percent of staff were current on annual tuberculous testing, but only 46.3 percent of providers were compliant across all staff reviewed. \nQEPR Scores by Provider Size14 \nFigure 29 displays the distribution of the QEPR Overall and Q\u0026T scores by size of the organization and year.15 There is very little difference across provider size on the overall scores, ranging from 82.7 percent to 86.1 percent in FY 2017. However, the 19 Medium-sized providers reviewed in FY 2017 appear to have scored somewhat lower on the Q\u0026T component (76.0 percent) compared to Small (81.5 percent) or Large (86.4 percent) providers. All providers, regardless of size, scored above 98 percent on the DDSS tool (not shown in the figure). \n14 Crisis provider scores were excluded. There were one large, one medium and two small crisis providers reviewed in FY 2017 and one small crisis provider reviewed in FY 2016. 15 See Figure 25 on page 96 for details regarding provider size categories. \n101 \n \n Quality Management Annual Report FY 2017 \nFigure 29. Overall and Qualifications and Training Score by Provider Size and Review Year \nQEPR Scores by Focused Outcome Area (FOA) \nThe Overall score for each review is divided into six FOAs. Each FOA score is calculated with the combined results from the Individual Interview, Staff Interview, Observation (IOSA), and Provider Record Review.16 Results for the non-crisis QEPRs are shown by FOA and year in Figure 30. Scores within each FOA declined from FY 2016. The lowest scoring areas each year were Community Life, Choice and Person Centered Practices. Measures surrounding Safety and Rights remained relatively high, showing scores of approximately 91 percent each year. \n16 FOAs calculated for the PCR also included the Support Coordinator Interview and SCRR, which are not part of the QEPR process. Comparisons between the PCR and QPER are not appropriate. \n102 \n \n Quality Management Annual Report FY 2017 \nFigure 30. Overall QEPR Scores by FOA and Year \n \nRights FY16: 92.4% FY17: 91.2% \nChoice FY16: 86.2% FY17: 75.9% \n \nWhole Health FY16: 89.1% FY17: 83.2% \nOverall Score FY16: 88.4% \n(N = 99) FY 17: 83.7% \n(N = 96) \nCommunity Life \nFY16: 78.0% FY17: 70.4% \n \nSafety FY16: 93.9% FY17: 90.9% \nPerson Centered Practices FY16: 82.6% FY17: 81.6% \n \nQEPR Scores by Tool and FOA \nIn this section, results by FOA for the QEPR are presented for each tool used in calculating the Ooverall score for the QEPR process--Individual Interview, Observation, Staff Interview and PRR (see Figure 31).17 Findings for the providers reviewed this year indicate the following: \n \n Provider documentation (PRR) is the lowest scoring tool across all the FOAs, ranging from a low of 51.3 percent for measures surrounding Choice to 82.3 percent for Rights. \n Observations and staff interviews reflected relatively high Community Life scores, 94.7 percent and 85.2 percent respectively, but the individual's perspective in this FOA was somewhat lower (75.9 percent), and providers did not document this as well (63.9 percent). \n Community Life was the lowest-scoring FOA from the individual's perspective. \n \n17 See Table 16 in the Onsite Review Section for the number of interviews and records completed for each QEPR component. 103 \n \n Quality Management Annual Report FY 2017 \n Individual and staff interviews and observations all showed scores over 90 percent in the FOAs of Safety, Choice, and Rights. \n Observation scores were approximately 95 percent or higher in all the FOAs with the exception of Person Centered Practices (88.8 percent). \n \nFigure 31. FY 2017 QEPR Scores by Tool and Focus Outcome Area \n \nWhole Health Safety \nPerson Centered Practices Community Life Choice Rights 0% \n \n66.5% \n \n87.4% 96.5% \n94.4% \n \n74.8% \n \n93.0% 98.4% 97.3% \n \n86.6% 88.8% 94.4% \n75.4% \n \n75.9% 94.7% \n85.2% 63.9% \n \n51.3% \n \n94.2% 95.0% 93.1% \n \n96.8% 92.7% \n97.5% 82.3% \n \n25% \n \n50% \n \n75% \n \n100% \n \nIndividual Interview (N = 440) Staff Interview (N = 398) \n \nObservations (N = 473) Provider Record Review (N = 1,356) \n \n104 \n \n Quality Management Annual Report FY 2017 \nQEPR Scores by FOA and Provider Size \nThere is little variation by FOA across the size of the provider organization (See Figure 32). One exception is small providers appear to have performed better in Whole Health than other providers reviewed in FY 2017. \nFigure 32. FY 2017 QEPR Provider Scores by FOA and Size \n \nWhole Health Safety \nPerson Centered Practices Community Life Choice Rights 50% \n \n81.1% 80.5% \n \n89.2% \n \n91.4% 91.6% 90.2% \n \n81.0% 81.4% 82.0% \n \n70.2% 70.8% 70.2% \n \n78.8% 75.9% 74.5% \n \n91.6% 90.5% \n91.4% \n \n60% \n \n70% \n \n80% \n \n90% \n \n100% \n \nSmall (N = 52) \n \nMedium (N = 19) \n \nLarge (N = 25) \n \n105 \n \n Quality Management Annual Report FY 2017 \nQuality and Technical Assistance Consultation (QTAC) \nA total of 208 QTACs were completed, of which 147 were at the provider level and 61 at the individual level. Due to the ability to check more than one referral type, totals do not sum to 147. Most QTACs were requested at the provider level (70.7 percent) and most had an internal referral source (75.5 percent). The majority of QTACs (64.6 percent) were generated from the QEPR. \n \nTable 32. QTACs by Referral Source and Type \n \nProvider (N = 147) Individual (N = 61) \n \nReferral Source \n \nNumber Percent Number Percent \n \nHealth Quality Manager \n \n1 \n \n0.7% \n \n9 \n \n15.0% \n \nInternal \n \n109 \n \n73.8% \n \n51 \n \n83.3% \n \nProvider \n \n37 \n \n25.5% \n \n1 \n \n1.7% \n \nReferral Type: \n \nNew Provider \n \n1 \n \n0.7% \n \n- \n \n- \n \nProvider Request \n \n40 \n \n27.2% \n \n1 \n \n2% \n \nSupport Coordinator Monitoring \n \n1 \n \n0.7% \n \n10 \n \n16% \n \nAlert (PCR/QEPR) \n \n13 \n \n8.8% \n \n50 \n \n82% \n \nQEPR QTAC \n \n97 \n \n64.6% \n \n- \n \n- \n \nThe following table provides a list of detailed reasons for the QTACs completed in FY 2017. The most frequently cited reasons were related to alert follow-ups and the need/request for Person Centered Thinking training. \n \nTable 33. QTAC Referral Reasons: FY 2017 \n \nReason \n \nN % \n \nAlert follow-up \n \n60 27.0% \n \nPerson-Centered Thinking training needed or requested \n \n27 12.2% \n \nOther \n \n19 8.6% \n \nReview of person-centered documentation \n \n14 6.3% \n \nTracking health related issues \n \n7 3.2% \n \nGoals not being tracked \n \n4 1.8% \n \nFollow up to previous QTAC \n \n4 1.8% \n \nMedication errors \n \n3 1.4% \n \nSafety concern for the person \n \n2 0.9% \n \n106 \n \n Quality Management Annual Report FY 2017 \n \nReason \n \nTable 33. QTAC Referral Reasons: FY 2017 \n \nAssistance needed for KPI reporting and data collection Lack of Behavior Support Plan Lack of financial tracking Assistance with criminal background checks Rights training \n \nTotal \n \nN % \n2 0.9% 1 0.5% 1 0.5% 1 0.5% 1 0.5% \n146 100.0% \n \nTechnical assistance is provided at every QTAC. The most common type of technical assistance offered for provider level QTACs was group discussion with the provider and staff (61.9 percent) and the most common type offered at the individual level was individual discussion (59.0 percent) with staff. The dispostion of most of the QTACs was \"closed,\" indicating no additional follow-up was necessary. \n \n107 \n \n Quality Management Annual Report FY 2017 \n \nIntellectual and Developmental Disability Summary of Findings and Recommendations \nBeginning in FY 2016, the Collaborative implemented revised review tools to evaluate the quality of intellectual and developmental disability services and supports--the QEPR and PCR. Most of the tools are based upon the six FOAs identified throughout this report and indicators to address a wide range of requirements and best practices within each FOA. June 2017 marked the completion of the second year of the Collaborative contract. Quality assessors completed 100 QEPRs, 481 PCRs, and 208 QTACs. \n \nSystem Strengths \nIn general, findings from reviews were quite positive. Strengths in the system included providers and staff who are aware of individuals' unique safety needs and achievements. Individuals are aware of how to self-preserve, and they feel valued, in part because most direct service providers have a clear understanding of each individual's unique communication styles and skills. In many PCRs, staff promotes independence as well as the use of person centered values and practices. \n \nFindings on many of the tools for both the PCR and QEPR showed scores of over 90 percent (see Table 34). Similar to FY 2016, compliance surrounding documentation is the lowest scoring area, for both service providers and Support Coordinators. \n \nTable 34. FY 2017 Summary by Tool and Review Type \n \nTool \n \nPCR \n \nQEPR Crisis \n \n(N = 481) (N = 96) (N = 4) \n \nIOSA - Individual Interview \n \n91.9% 90.7% 86.0% \n \nIOSA - Observation \n \n96.8% 95.7% 92.5% \n \nIOSA - Staff Interview \n \n94.3% 95.2% 89.1% \n \nProvider Record Review \n \n70.2% 69.6% 84.1% \n \nSC Record Review \n \n73.7% \n \nNA \n \nNA \n \nSC Interview \n \n83.3% \n \nNA \n \nNA \n \nAdmin Q\u0026T \n \nNA \n \n82.7% 91.8% \n \nDDSS \n \n99.8% 99.8% 100% \n \n108 \n \n Quality Management Annual Report FY 2017 \nFOA scores from both review types suggest the service delivery system appears to do fairly well across the six critical areas, showing scores of at least 70 percent. Scores from both review types were over 90 percent for Safety and Rights. Providers seem to have effective systems and practices in place to help individuals be safe in their environments and exercise their rights, and individuals have indicated they do feel safe and their rights are being upheld. \nWhole Health scores for both individuals who participated in a PCR and providers reviewed this year were approximately 85 percent. Support Coordinators did very well on the health-related sections of the ISP in accurately completing all components of the rights, psychotropic medications and behavior supports section and the health and safety review section. \nOpportunities for Improvement \nDespite the overall positive findings for this fiscal year, scores from the PCR showed decreases across most review indicators when compared to FY 2016, including the average score on all the tools and all the FOAs. While overall FOA scores remained relatively high, drill down to the standards/indicators measuring each FOA provides insight into opportunities for developing quality improvement initiatives or training programs. \nWhole Health The overall PCR FOA score for Whole Health was 84.7 percent, down from 93.6 percent in FY 2016. Issues surrounding medication, particularly education, were the lowest scoring areas. Many individuals were not aware of the reasons they were prescribed medications or the potential side effects. Providers often did not demonstrate through documentation how they provide education on the risks and side effects of medications. Staff was often unaware of the side effects of medications prescribed to individuals they supported and was often unable to describe how individuals learned about their medications. In addition, providers were not always receiving training required on the specific medications individuals are prescribed and their side effects. Just over 31 percent of staff reviewed did not have this training in place for providers reviewed in FY 2017. In FY 2016, approximately 45 percent of \n109 \n \n Quality Management Annual Report FY 2017 \nstaff records indicated this training was missing. Individuals receiving Respite and Supported Employment were least likely to have Whole Health standards present. \nRecommendation IX: Quality Management recommends adding to the DBHDD provider policy a requirement that individuals and families be provided education on their medications, to promote independence and an ability to help self-direct health care. Techniques on how to help educate individuals/families on medications could be developed by the ASO, and these techniques can be offered to providers to assist in implementing the training programs. \nSafety Information from the individuals' interviews indicates people feel safe, are free from abuse, neglect, and exploitation, and most (75 percent or more) feel they receive education in these areas. It is critical for providers to help individuals identify, address, and seek prevention from abuse, neglect, and exploitation (ANE). While providers may be helping most individuals understand safety issues surrounding ANE, only 22.9 percent of providers have this documented. Fewer than half of staff records included how education is provided to help the individual with self-preservation. In addition, only 43.4 percent of staff records reviewed this year, and 32.6 percent reviewed in FY 2016, documented staff/provider training on the Georgia Crisis System. \nInformation from Support Coordinator interviews appears to point to a need for a better understanding of some issues surrounding behavioral health. Advocating for the health and safety of individuals is critical and requires an understanding of each individual's health and safety needs. However, 50 percent to 60 percent of SCs were unaware of needed crisis plans or behavior support plans, the individualized techniques needed to follow a plan for the individual, or specific triggers that may be the catalyst to behaviors. \nRecommendation X: Some providers have already implemented practices using various techniques to focus on a specific area for training each month, such as \"The Right of the Month.\" Training by Quality Management could be developed and implemented to support providers to initiate \n110 \n \n Quality Management Annual Report FY 2017 \ntechniques specific to training on abuse, neglect, and exploitation and education on selfpreservation. \nPerson Centered Practices A key area of focus for the Centers for Medicare and Medicaid Services (CMS) and DBHDD is to ensure services and supports are provided using person centered practices and planning. To the extent possible, the individual should be at the center of all decisions, plans, and goals. Providers need to understand who the individual is and what he or she may want, hope for, and dream. Unfortunately, many ISPs did not ensure all the individual's goals were person centered or their hopes and dreams were appropriately addressed throughout the plan, approximately 44 percent and 55 percent respectively. \nIn addition, 44.7 percent of providers have staff who had not received training on person centered values, principles, and approaches. Most individuals receiving services seem to understand their own talents, but fewer than 40 percent of providers (38.9 percent) and Support Coordinators (28.8 percent) have documentation that reflects the individual's talents. \nRecommendation XI: DBHDD will be providing training on the changes to the ISP being implemented in FY 2018. It is recommended this training include a section regarding writing person centered goals. \nCommunity Life According to CMS standards and DBHDD's vision, individuals with intellectual and developmental disabilities should be able to participate in their communities in the same manner as individuals who do not have a disability. Community Life was the lowest scoring area in the PCR data for both FY 2016 and FY 2017 and for providers reviewed both years through the QEPR. During FY 2017, information from interviews with the individual suggests about 30 percent of individuals receiving services had not been given the opportunity to develop new social roles in the community. Findings across all the interview data (individuals and staff/providers), as well as from documentation reviews, indicate individuals are often not participating in their communities as desired, seeking and \n111 \n \n Quality Management Annual Report FY 2017 \nfinding competitive employment, having new community experiences, or participating in community activities and employment. \nThe ISP should be written to help ensure the individual is able to integrate into the community in various settings, as desired. Approximately 17 percent of ISPs met this level. Individuals receiving Prevocational and Respite were least likely to have Community Life standards present. \nRecommendation XII: Develop training that includes practical, hands-on assignments to help staff and Support Coordinators understand how to connect an individual to the community, such as searching the web or calling organizations who would support individuals with intellectual and developmental disability. The practical experiences and actual application of the session's philosophy, and steps on how to connect people in the community, will better equip staff and Support Coordinators to actively apply lessons learned for people being supported. \nRecommendation XIII: Provide a training on person centered goals and approaches to assist Support Coordinators in developing ISPs consistent with a Community Life. \nRecommendation XIV: As part of the DBHDD training on the revised ISP, provide training that includes specific approaches to assist Support Coordinators in developing ISPs consistent with an integrated life in the community. \nChoice Informed choice is the cornerstone of helping individuals understand and achieve meaningful goals and direct their own supports and services. Results for Choice have decreased more since FY 2016 than for any other FOA. This is evident in the PCR data, and for providers reviewed in FY 2016 compared to providers reviewed in FY 2017. Information from documentation indicates records are often lacking evidence informed choice is provided to the individual for competitive or supported employment, living situations and environments, community participation or social interactions, educational opportunities, or the manner in which services are provided. \n112 \n \n Quality Management Annual Report FY 2017 \nRecommendation XV: Quality Management could develop a webinar to specifically address how direct support staff and Support Coordinators can document how individuals are offered and making informed choices in various areas of their lives. \nRights The proper use of restrictive interventions is strictly monitored and enforced by DBHDD. Individuals have not indicated any violations regarding the improper use of any type of restrictive intervention or any unauthorized restrictions in their home, community, work, or day program. However, records maintained by providers and support coordinators, as well as interviews with the Support Coordinators, may indicate some issues regarding rights restrictions. \nThere has been a decrease in the percent of Support Coordinators who address identified rights restrictions or who are even aware of a plan for the restrictive intervention. Individuals are not always informed about their rights and responsibilities on an annual basis, and consent for psychotropic medication is often not present and signed by the individual. \nRecommendation XVI: Ensure education about rights and rights restrictions is a focus of the Support Coordinator Training noted in Recommendation XIV. \nRecommendation XVII: Support Coordinators should help provide education to individuals and families on the importance of giving and receiving copies of signed consent forms for psychotropic medications. Through this, Support Coordinators could ensure consent forms are being completed and subsequently document this in their own support notes. \nProvider Documentation Throughout the findings in this report, documentation by providers and Support Coordinators has shown consistently lower scores when compared across review tools or within the FOAs. Support Coordinators seem to do well documenting areas of health and safety, but not as well regarding rights, the only area in which providers scored higher. Provider documentation for providers reviewed this year was lowest in the area of choice (51.3 percent). \n113 \n \n Quality Management Annual Report FY 2017 \nRecommendation XVIII: Quality Management should develop a webinar that specifically addresses how direct support staff and Support Coordinators can improve documentation specific to areas identified throughout the intellectual and developmental disability section of this report. \n114 \n \n Quality Management Annual Report FY 2017 \nSection 5: Behavioral Health and Intellectual Developmental Disabilities: Overall Results by Focused Outcome Area \n \nQuality Management analyzed the Focused Outcome Areas (FOA), as described throughout this report, in both the behavioral health and intellectual and developmental disability review processes. While the FOAs are the same, there are some differences in the data collection methods. Data for behavioral health FOAs are taken only from record reviews; while for intellectual and developmental disabilities, indicators from the interviews, observations and record reviews are grouped into the six FOAs. Therefore, for intellectual and developmental disabilities, we use data from only the QEPR Provider Record Reviews to show a comparison of FOA scores, i.e., QEPR PRR and BHQR record reviews. \nFigure 33. Focused Outcome Areas by Review Type \n \nBHQR \nFOA Overall: 89% \nWhole Health 74% Safety 83% \nPerson Centered 91% Community 93% Choice 96% Rights 93% \n \nQEPR \nFOA Overall: 69.6% \nWhole Health 56.5% Safety 74.8% \nPerson Centered 75.4% Community 63.9% Choice 51.3% Rights 82.3% \n \nResults (Figure 33) indicate behavioral health providers appear to perform better than providers of intellectual and developmental disability services in documenting how FOAs are addressed for individuals receiving services, with average scores of 89 percent and 70 percent, respectively. This \n115 \n \n Quality Management Annual Report FY 2017 \nis a continued trend from FY 2016 to FY 2017 with providers of intellectual and developmental disability services showing lower scores on each FOA. The greatest differences are in Choice and Person Centered Practices, on which providers of intellectual and developmental disability services documentation is 28 and 21 percentage points lower, respectively. Choice is the highest behavioral FOA in behavioral health (96 percent) but the lowest for intellectual and developmental disability (51.3 percent). Rights appears to be a higher scoring FOA for both. In review of specific indicators in both the behavioral health and intellectual and developmental disability reviews, direct comparisons could not be identified. Although both reviews measure the same areas, behavioral health and intellectual and developmental disability questions were not initially designed to be compared. Furthermore, behavioral health and intellectual and developmental disability providers are required to record information using different provider manuals, requirements by waiver program and national standards. Recommendation XIX: Quality Management should review the behavioral health and intellectual and developmental disability record review tools to determine if any modifications can be made in order to draw future comparisons. \n116 \n \n Section 6: Feedback Survey Results \n \nQuality Management Annual Report FY 2017 \n \nFollowing completion of on-site reviews for both behavioral health and intellectual and developmental disabilities, providers are offered the opportunity to complete a feedback survey. For intellectual and developmental disability services, individuals who participate in the interview are also offered the opportunity to provide feedback about the process. Surveys are optional and may be completed by the same agency more than once in a fiscal year. In addition, it is not known who completes the survey, as individuals and staff are not required to submit their name or provider agency. A five-point Likert scale is used: strongly agree, somewhat agree, neither agree nor disagree, somewhat disagree, or strongly disagree. \n \nThe following table (Table 35) provides information from providers for data entered into the system during the year, July 2016 through June 2017. Response rates were generally low, but findings were overwhelmingly positive across all the review types and from both providers and individuals. The percent is calculated as follows: (Strongly Agree + Somewhat Agree) / (Strongly Agree + Somewhat Agree + Disagree + Strongly Disagree). Additionally, two surveys were linked to the joint review process through the provider submission of name with the survey responses. The results of the two surveys, using the calculation mentioned previously, was 100 percent. \n \nTable 35. The Collaborative Provider Feedback Surveys \n \nPercent: Strongly Agree + Somewhat Agree/ Total Responses \n \nIDD \n \nSurvey Questions \n \n(N=74) \n \nOverall, you are satisfied with the review/consultation process. \n \n98.6% \n \nThe Collaborative staff interacted with you and your staff in a professional manner. \n \n100.0% \n \nThe Collaborative staff interacted with the individuals you support in a professional manner. \n \n100.0% \n \nThe Collaborative staff answered your questions and concerns clearly and \n \nconsistent with DBHDD manual. (If you disagree, please explain at the end of 98.6% \n \nthe survey.) \n \nThe Collaborative staff facilitated an environment which was collaborative and positive. \n \n98.6% \n \nBHQR (N=43) 88.4% 97.7% 97.7% \n90.7% \n90.7% \n \n117 \n \n Quality Management Annual Report FY 2017 \n \nTable 35. The Collaborative Provider Feedback Surveys \n \nPercent: Strongly Agree + Somewhat Agree/ Total Responses \n \nIDD \n \nSurvey Questions \n \n(N=74) \n \nYou would contact the Collaborative staff for technical assistance, training, and resource support, if needed. \n \n100.0% \n \nThe process provided constructive feedback. \n \n100.0% \n \nThe process helped identify the strengths of your supports and services. \n \n100.0% \n \nThe feedback you received will help provide supports and services that meet the desired outcomes of the individuals you support. \n \n98.6% \n \nThe recommendations generated from this process can be used to make a positive contribution to the individuals served. \n \n98.6% \n \nThe recommendations generated from this process can be used to make a positive contribution to your organization overall. \n \n98.6% \n \nThe feedback provided will assist your organization with making quality improvements to systems and practices. \n \n98.6% \n \nBHQR (N=43) 95.4% 97.7% 97.7% 100.0% \n95.4% \n100.0% \n100.0% \n \nIndividuals are provided a survey after they complete the interview during either the PCR or QEPR. Only 17 surveys were received during FY 2017. All responses (100 percent) to the following questions were either Strongly Agree or Agree: \n1. Overall, I am pleased with how the interview went. 2. The purpose of the interview was explained to me. 3. I was treated with respect. 4. The person who interviewed me seemed interested in what I said. 5. The person who interviewed me was pleasant. 6. My questions were answered. 7. The length of the interview was good. \n \nQuality Management will continue to regularly review its processes, including information obtained from these surveys, to ensure providers are equipped with the necessary tools and opportunities to \n118 \n \n Quality Management Annual Report FY 2017 \nbest serve individuals across the state. Quality Management shall also tailor its quality training both to its internal staff as well as for Georgia providers to promote a cooperative and constructive partnership in the quality of services provided in the state of Georgia specific to the individual's needs. \n119 \n \n Quality Management Annual Report FY 2017 \nSection 7: Conclusion \nFY 2017 marked the second complete year of the Collaborative Quality Management review process. The tools and standards used in the behavioral health quality reviews (BHQR) differ from the intellectual and developmental disability quality reviews, but Quality Management assessors have worked collaboratively to best serve the needs of all individuals in the services they receive. \nDuring FY 2017, 789 reviews were completed for both behavioral health and intellectual and developmental disability reviews. The overall score for BHQRs for FY 2017 was 84 percent, consistent with FY 2016. Billing validation increased from baseline year to 84 percent. Focused outcome areas also increased from FY 2016 to FY 2017. Assessment and treatment planning and compliance with service guidelines both minimally decreased by approximately two points. Each of the four categories are equally weighted and comprise 25 percent of the overall score, thus decrease in two categories counter the increase of the other two categories when comparing FY 2016 to FY 2017. While there is demonstrated success noted in the different BHQRs categories, there are opportunities for growth specifically related to addressing all assessed needs, co-occurring issues being assessed and addressed, as well as appropriate discharge planning criteria documentation. \nAdditionally, FY 2017 marked the first year reassessments were completed for providers who met the scoring criteria. Thirty-five providers were reviewed for a second time during the fiscal year with many having individual success and improvement in scores. In reviewing scores for these specific providers at the time of their first FY 2017 review compared to their second FY 2017 review, providers increased their scores in all areas. For example, overall scores went from 79 percent to 83 percent. Furthermore, while it is anticipated the reassessment of providers will lead to elevated results from year-to-year, declines in two categories and increases in the remaining two categories counter balance within the overall score. \nIntellectual and developmental disability assessors completed 100 QEPRs, 481 PCRs, and 208 QTACs. Findings on many of the tools for both the PCR and QEPR showed scores of over 90 \n120 \n \n Quality Management Annual Report FY 2017 \npercent. Providers appear to have effective systems and practices in place to support individuals to be safe in their environments and to exercise their rights, and at the same time individuals have indicated they do feel safe and their rights are being upheld. Similar to FY 2016, compliance surrounding documentation within the record review is the lowest scoring area, for both service providers and Support Coordinators. \nWhere possible, Quality Management conducted joint BHQR and QEPRs to reduce administrative burden to providers. Using such a method promotes not only additional education to the network but also amongst assessors in the field, further strengthening expertise and collaboration. Each joint review lends valuable information and feedback through joint discussions and exit conferences, for providers and assessors alike. Additionally, providers are supplied an additional opportunity to complete a formalized survey following the process. Of the two identified surveys obtained regarding the joint review processes, both received 100 percent in terms of positive responses related to collaborative staff, its policies, and processes related to the review. \nFeedback obtained from the satisfaction surveys provides a qualitative approach to individual and staff perceptions related to review processes and yielded positive results in FY 2017. Quality Management will continue to use feedback from the surveys to review processes and provide professional, effective, and constructive approaches to ensure providers are equipped with the necessary tools and opportunities they need to best support individuals. \nProvider trainings in FY 2018 are to occur both in person as well as via webinar through the formalized Quality Training Program based on collaboration and partnership with DBHDD. Quality Management shall use findings from the behavioral health, CSU, and intellectual and developmental disability reviews to guide topics for trainings. Such trainings will be prioritized based on BHQR, CSUQR, and intellectual and developmental disability reviews for areas indicating low compliance and minimal documentation. Additionally, as the collaboration and partnership continues, changes to measurement tools to further align indicators is both recommended and required for ongoing smooth review processes. \n121 \n \n Appendix A: Abbreviations and acronyms \n \nQuality Management Annual Report FY 2017 \n \nAcronym \nACT AD ASAM ASO BH BHQR C\u0026A CIS CL CMS CST CSU CSUQR DBHDD DDSS FOA FY II IDD IFI IND IOSA IRR IRP ISP QA ISP GRP MAR MH N NA NCP OBS PCP \n \nDefinition \nAssertive Community Treatment Addictive Diseases American Society for Addiction Medicine Administrative Services Organization Behavioral Health Behavioral Health Quality Review Child and Adolescent Consumer Information System Community Life Centers for Medicaid and Medicare Services Community Support Team Crisis Stabilization Unit Crisis Stabilization Unit Quality Review Department of Behavioral Health and Developmental Disabilities Developmental Disability Service Specific Review Focused Outcome Area(s) Fiscal Year Individual Interview Intellectual and Developmental Disability Intensive Family Intervention Individual Individual Observation Staff Assessment Individual Record Review Individual Recovery / Resiliency Plan Individual Service Plan Quality Assurance Checklist Individual Service Plan Group Medication Administration Record Mental Health Number in sample Not Applicable Nursing Care Plan Onsite Observations Person Centered Practices \n \n122 \n \n PCR PRR QEPR Q\u0026T QTAC RN SC SCI SCRR SI SP SU TA \n \nPerson Centered Review Provider Record Review Quality Enhancement Provider Review Qualifications and Training Quality Technical Assistance Consultation Registered Nurse Support Coordinator Support Coordinator Interview Support Coordinator Record Review Staff Interview Service Provider Substance Use Technical Assistance \n \nQuality Management Annual Report FY 2017 \n \n123 \n \n Appendix B: Score Distributions \n \nQuality Management Annual Report FY 2017 \n \nGraphical distributions for each category show the percent of providers who scored within the specified range of scores, as indicated on the horizontal/x-axis. With each distribution, the median, mean, and mode may be provided. The median represents the score that falls in the middle of distribution (50th percentile) and the mean represents the average score. Standard deviation refers to the amount of variation or dispersion there is in a distribution of scores, or how much scores tend to spread-out from the mean. Standard deviations are sometimes used to determine significant differences between scores within a distribution; however, this application requires a distribution to be normally distributed (similar to a bell curve). Most of the distributions presented in this report do not meet the requirements of a normal distribution, meaning they do not look like a normal bell curve; therefore, standard deviations have been excluded from the report at this time. \n \nTable of Contents (Links available by clicking on Figure Name) \nBehavioral Health Quality Review Distributions \nBHQR Overall Score Distribution BHQR Tier 1 Overall Score Distribution BHQR Tier 2 Overall Score Distribution BHQR Tier 2+ Overall Score Distribution BHQR Tier 3 Overall Score Distribution BHQR Billing Score Distribution BHQR Assessment \u0026 Treatment Planning Distribution BHQR Compliance with Service Guidelines Score Distribution BHQR Focused Outcome Areas Score Distribution \nAssertive Community Treatment (ACT) Quality Review Distributions \nACT Overall Score Distribution ACT Billing Score Distribution ACT Assessment \u0026 Treatment Planning Distribution ACT Compliance with Service Guidelines Score Distribution ACT Focused Outcome Areas Score Distribution \nCSU Quality Review Distributions \nCSU Overall Score Distribution \n124 \n \n Quality Management Annual Report FY 2017 \nCSU Compliance with Service Guidelines Score Distribution CSU Focused Outcome Areas Score Distribution CSU Individual Record Review Score Distribution \nReassessment BHQR Distributions \nBHQR Overall Score Distribution of Reassessed Providers BHQR Billing Score Distribution of Reassessed Providers BHQR Assessment \u0026 Treatment Planning Distribution of Reassessed Providers BHQR Compliance with Service Guidelines Score Distribution of Reassessed Providers BHQR Focused Outcome Areas Score Distribution of Reassessed Providers \nPCR Distributions: Scores by Tool and Year \nPCR Individual Interview Scores by Year PCR Staff Interview Scores by Year PCR Support Coordinator Interview Scores by Year PCR Observation Scores by Year PCR Provider Record Review Scores by Year PCR Support Coordinator Review Scores by Year \nQEPR Distributions: Scores by Tool and Year \nFY 2017 Overall QEPR Score FY 2017 QEPR Individual Interview Scores FY 2017 QEPR Staff Interview Scores FY 2017 QEPR Observation Scores FY 2017 QEPR Provider Record Review Scores FY 2017 QEPR Qualifications and Training Scores FY 2017 QEPR Service Discrepancy Scores FY 2017 QEPR Overall Scores by Provider Size FY 2017 QEPR Qualifications and Training Scores by Provider Size \n125 \n \n Quality Management Annual Report FY 2017 \nBHQR Overall Score Distribution by Year \n \n60% \n \n51%49% \n \n50% \n \n40% \n \n30% \n \n26%25% \n \n20% \n \n16%19% \n \n10% \n \n1% 0% 1% 1% 2% 2% 3% 5% \n \n0% \n \nFY 2016 (N = 141; Mean = 84%; Median = 87%) FY 2017 (N = 167; Mean = 84%; Median = 86%) \n \nBHQR Tier 1 Overall Score Distribution by Year \n \n80% \n \n70% \n \n68% \n \n60% \n \n58% \n \n50% \n \n40% \n \n30% 20% \n \n17%21% \n \n25% 11% \n \n10% \n \n0% \n \nFY 2016 (N = 24; Mean = 86%; Median = 88% ) FY 2017 (N = 28; Mean = 85%; Median = 85%) \n \n126 \n \n Quality Management Annual Report FY 2017 \nBHQR Tier 2 Overall Score Distribution by Year \n \n60% \n \n51%50% \n \n50% \n \n40% \n \n30% \n \n24%24% \n \n19% 19% 20% \n \n10% \n \n1% \n \n2% 4% 5% \n \n0% \n \nFY 2016 (N = 90; Mean = 85%; Median = 87%) FY 2017 (N = 115; Mean = 85%; Median = 86%) \n \nBHQR Tier 2+ Overall Score Distribution by Year \n \n90% \n \n83% \n \n80% \n \n80% \n \n70% \n \n60% \n \n50% \n \n40% \n \n30% \n \n20% \n \n20% \n \n17% \n \n10% \n \n0% \n \nFY 2016 (N = 5; Mean = 88%; Median = 87% ) FY 2017 (N = 6; Mean = 86%; Median = 85%) \n \n127 \n \n Quality Management Annual Report FY 2017 \nBHQR Tier 3 Overall Score Distribution by Year \n \n60% \n \n56% \n \n50% \n \n41% \n \n40% \n \n30% \n \n27% \n \n20% 10% \n \n5% \n \n14% 5% 6% 6% \n \n11% 9% 11% \n \n11% \n \n0% \n \nFY 2016 (N = 22; Mean = 78%; Median = 83%) FY 2017 (N = 18; Mean = 83%; Median = 91%) \n \nBHQR Billing Score Distribution by Year \n \n60% \n \n50% \n \n48% 42% \n \n40% \n \n33% \n \n30% \n \n28% \n \n20% 10% 4% 1% 1% 2% 1% 1% 1% 1% 2% 2% 1% 4% 5% 5% 10%8% \n0% \n \nFY 2016 (N = 141; Mean = 81%; Median = 88%) FY 2017 (N = 167; Mean = 84%; Median = 90%) \n \n128 \n \n Quality Management Annual Report FY 2017 \nBHQR Assessment \u0026 Treatment Planning Distribution by Year \n \n35% \n \n31% 31%31% \n \n30% \n \n30% \n \n25% \n \n23% \n \n20% \n \n16% \n \n17% \n \n15% 10% \n \n9% 4% 5% \n \n5% \n \n1%1% 1%1% \n \n0% \n \nFY 2016 (N = 141; Mean = 79%; Median = 79%) FY 2017 (N = 167; Mean = 77%; Median = 78%) \n \nBHQR Compliance with Service Guidelines Score Distribution by Year \n \n70% \n \n60% \n \n58% 53% \n \n50% \n \n40% \n \n34%31% \n \n30% \n \n20% \n \n10% \n \n7% 6% 7% \n \n1% \n \n1% 1% 1% 1% \n \n0% \n \nFY 2016 (N = 140; Mean = 90%; Median = 92%) FY 2017 (N = 167; Mean = 88%; Median = 91%) \n \n129 \n \n Quality Management Annual Report FY 2017 \n \nBHQR Focused Outcome Areas Score Distribution by Year \n \n60% 50% \n \n47% \n \n50% \n \n41% \n \n40% \n \n30% \n \n30% \n \n20% \n \n17% \n \n10% \n \n2% \n \n4% 2% \n \n8% \n \n0% \n \nFY 2016 (N = 141; Mean = 85%; Median = 87%) FY 2017 (N = 167; Mean = 89%; Median = 90%) \n \nACT Overall Score Distribution by Year \n \n60% \n \n50% \n \n47% \n \n50% \n \n42% \n \n40% \n \n30% \n \n26% 25% \n \n20% 11% \n10% \n \n0% \n \nFY 2016 (N = 19; Mean = 88%; Median = 91%) FY 2017 (N = 20; Mean = 87%; Median = 91%) \n \n130 \n \n Quality Management Annual Report FY 2017 \n \nACT Billing Score Distribution by Year \n \n70% 60% \n \n58% 60% \n \n50% \n \n40% \n \n37% \n \n30% 20% \n \n15% 20% \n \n10% \n \n5% 5% \n \n0% \n \nFY 2016 (N = 19; Mean = 92%; Median = 96%) FY 2017 (N = 20; Mean = 90%; Median = 96%) \n \nACT Assessment \u0026 Treatment Planning Score Distribution by Year \n \n70% 60% \n \n58% \n \n50% 40% \n \n35% 40% \n \n30% 20% \n \n26% \n \n20% \n \n16% \n \n10% \n \n5% \n \n0% \n \nFY 2016 (N = 19; Mean = 85%; Median = 80%) FY 2017 (N = 20; Mean = 80%; Median = 80%) \n \n131 \n \n 60% 50% 40% 30% 20% 10% \n0% \n60% 50% 40% 30% 20% 10% \n0% \n \nQuality Management Annual Report FY 2017 \nACT Compliance with Service Guidelines Score Distribution by Year \n \n50% \n \n32% \n \n21% \n \n11% 5% \n \n10% \n \n37% 35% \n \nFY 2016 (N = 19; Mean = 85%; Median = 89%) FY 2017 (N = 20; Mean = 88%; Median = 89%) \n \nACT FOAs Score Distribution by Year \n \n55% 47% 37% 35% \n \n16% 10% \n \nFY 2016 (N = 19; Mean = 88%; Median = 91%) FY 2017 (N = 20; Mean = 90%; Median = 91%) \n132 \n \n Quality Management Annual Report FY 2017 \n \nCSU Overall Score Distribution by Year \n \n50% \n \n47% \n \n45% \n \n43% \n \n40% \n \n35% \n \n35% \n \n30% \n \n26% \n \n25% \n \n21% \n \n20% \n \n17% \n \n15% \n \n10% 5% \n \n5% 4% \n \n0% \n \nFY 2016 (N = 19; Mean = 83%; Median = 83%) FY 2017 (N = 23; Mean = 86%; Median = 88%) \n \nCSU Compliance with Service Guidelines Score Distribution by Year \n \n70% \n \n61% \n \n60% \n \n50% \n \n40% \n \n32% 32% \n \n30% \n \n20% 10% \n \n4% \n \n11%9% \n \n11% 4% \n \n17%16% 4% \n \n0% \n \nFY 2016 (N = 19; Mean = 82%; Median = 80%) FY 2017 (N = 23; Mean = 87%; Median = 91%) \n133 \n \n Quality Management Annual Report FY 2017 \n \nCSU Focused Outcome Areas Overall Distribution by Year \n \n60% \n \n57% \n \n50% \n \n47% \n \n40% \n \n37% 35% \n \n30% \n \n20% \n \n16% \n \n10% \n \n9% \n \n0% \n \nFY 2016 (N = 19; Mean = 88%; Median = 88%) FY 2017 (N = 23; Mean = 91%; Median = 92%) \n \nCSU Individual Record Review Score Distribution by Year \n \n70% \n \n65% \n \n60% \n \n50% \n \n42% \n \n40% \n \n37% \n \n30% \n \n26% \n \n20% \n \n11% \n \n10% \n \n5% 4% 5% 4% \n \n0% \n \nFY 2016 (N = 19; Mean = 79%; Median = 77%) FY 2017 (N = 23; Mean = 82%; Median = 82%) \n \n134 \n \n Quality Management Annual Report FY 2017 \n \nBHQR Overall Score Distribution \n \nof Reassessed Providers \n \n50% \n \n43% \n \n46% \n \n40% \n \n40% \n \n30% \n \n23% \n \n23% \n \n20% \n \n11% \n \n10% \n \n3% 3% \n \n6% \n \n3% \n \n0% \n \nFirst Review (N = 35; Mean = 79%; Median = 80%) Second Review (N = 35, Mean = 83%, Median = 86%) \n \nBHQR Billing Score Distribution \n \nof Reassessed Providers \n \n45% \n \n40% \n \n43% \n \n40% \n \n35% \n \n30% \n \n26% 26% \n \n25% \n \n20% \n \n15% \n \n11% 11% \n \n10% 5% \n \n9% 6% 3% 3% 3% 3% 3% 3% \n \n9% 3% \n \n0% \n \nFirst Review (N = 35; Mean = 79%; Median = 84%) Second Review (N = 35; Mean = 79%; Median = 89%) \n \n135 \n \n Quality Management Annual Report FY 2017 \n \nBHQR Assessment \u0026 Treatment Planning Score Distribution of Reassessed Providers \n \n50% \n \n40% \n \n37% \n \n40% \n \n30% \n \n29% \n \n20% \n \n20% \n \n11% \n \n17% \n \n11% \n \n14% \n \n10% \n \n9% 3% 3% \n \n6% \n \n0% \n \nFirst Review (N = 35; Mean = 70%; Median = 68%) Second Review (N = 35; Mean = 78%; Median = 78%) \n \nBHQR Compliance with Service Guidelines Score Distribution \n \nof Reassessed Providers \n \n50% \n \n46% \n \n43% \n \n40% \n \n30% \n \n29% 26% \n \n20% \n \n14% 14% 14% \n \n10% \n \n9% \n \n3% \n \n3% \n \n0% \n \nFirst Review (N = 35; Mean = 83%; Median = 86%) Second Review (N = 35; Mean = 85%; Median = 90%) \n \n136 \n \n Quality Management Annual Report FY 2017 \nBHQR FOAs Score Distribution of Reassessed Providers \n \n70% \n \n66% \n \n60% \n \n51% \n \n50% \n \n40% \n \n30% \n \n29% 23% \n \n20% \n \n14% \n \n10% \n \n6% 3% 9% \n \n0% \n \nFirst Review (N = 35; Mean = 86%; Median = 87%) Second Review (N = 35; Mean = 90%; Median = 91%) \n \nPCR Individual Interview Scores by Year \n \n80% \n \n70% \n \n67.8% 66.1% \n \n60% \n \n50% \n \n40% \n \n30% \n \n21.6% \n \n20% \n \n11.0% \n \n18.2% \n \n10% \n \n0.8%3.1%5.8% \n \n5.4% \n \n0% \n \nFY 2016 (N = 484; Mean = 95.1% ; Median = 97.1%) FY 2017 (N = 481; Mean = 91.9%; Median = 93.7%) \n \n137 \n \n Quality Management Annual Report FY 2017 \nPCR Staff Interview Scores by Year \n \n80% \n \n70% \n \n67.8%69.3% \n \n60% \n \n50% \n \n40% \n \n30% \n \n24.5% \n \n20% \n \n11.1% \n \n16.0% \n \n10% 0% \n \n6.8% \n \n0.0%0.4% \n \n0.9% 2.2% 0.0% 0.8% \n \nFY 2016 (N = 482; Mean = 96.4%; Median = 97.7%) FY 2017 (N = 224; Mean = 94.3%; Median = 96.0%) \n \nPCR Support Coordinator Review Scores by Year \n \n60% \n \n50% \n \n45.5% 45.1% \n \n39.6% \n \n40% \n \n33.1% \n \n30% \n \n20% \n \n10.6% \n \n10% 5.0% 0% \n \n0.4%0.0% \n \n0.4% 0.0% \n \n7.3% 7.1% \n0.2%0.2%0.2%0.0%0.2%1.0%1.5%2.5% \n \nFY 2016 (N = 477; Mean = 90.8%; Median = 98.4%) FY 2017 (N = 479; Mean = 83.3%; Median = 96.7%) \n \n138 \n \n Quality Management Annual Report FY 2017 \nPCR Observation Scores by Year \n \n60% \n \n51.9% \n \n50% \n \n48.5% 48.8% \n \n40% \n \n38.1% \n \n30% \n \n20% \n \n10% \n \n3.9%2.7%6.1% \n \n0% \n \n0.0% \n \nFY 2016 (N = 412; Mean = 98.4%; Median = 99.5%) FY 2017 (N = 181; Mean = 96.8%; Median = 98.7%) \n \nPCR Provider Record Review Scores by Year \n \n35% \n \n30% \n \n29.3% 26.6% \n \n25% \n \n23.9% 22.5% \n \n20% \n \n18.3% \n \n17.7%18.3% \n \n15% \n \n14.1% \n \n10% 5% 0% \n \n0.0%0.2% \n \n3.4%5.0% 6.2% 0.3%1.2% \n \n5.6%7.3% 0.2% \n \nFY 2016 (N = 356; Mean = 79.0%; Median = 79.1%) FY 2017 (N = 478; Mean = 70.2%; Median = 70.5%) \n \n139 \n \n Quality Management Annual Report FY 2017 \n \nPCR Support Coordinator Review Scores by Year \n \n30% \n \n25% \n \n24.3% 22.9% \n \n20.2%21.1% \n \n20% \n \n17.0% \n \n15% \n \n15.8%15.3% \n \n14.5% \n \n11.4% \n \n10% \n \n9.6% \n \n6.8% \n \n5.0%5.4% \n \n5% 0.8%0.2%0.4%0.8%1.7%1.5%0.2%2.7% \n \n2.5% \n \n0% \n \nFY 2016 (N = 484; Mean = 79.9%; Median = 84.1%) FY 2017 (N = 481; Mean = 73.7%; Median = 76.4%) \n \nFY 2017 Overall QEPR Score \n \n60% \n \n52.6% \n \n50% \n \n40% \n \n30% \n \n20.6% \n \n23.7% \n \n20% \n \n10% \n \n1.0% 2.1% \n \n0% \n \n(N = 96; Mean = 83.7%; Median = 85.7%) \n \n140 \n \n 70% 60% 50% 40% 30% 20% 10% \n0% \n100% 80% 60% 40% 20% 0% \n \nQuality Management Annual Report FY 2017 \nFY 2017 QEPR Individual Interview Scores \n57.7% 35.1% \n \n1.0% \n \n1.0% 4.1% \n \n1.0% \n \n(N = 96; Mean = 90.7%; Median = 85.8%) \nFY 2017 QEPR Staff Interview Scores \n \n83.3% \n \n2.1% 8.3% \n \n6.3% \n \n(N = 95; Mean = 95.2%; Median = 96.0%) \n \n141 \n \n Quality Management Annual Report FY 2017 \n \nFY 2017 QEPR Observation Scores \n \n80% \n \n72.2% \n \n70% \n \n60% \n \n50% \n \n40% \n \n30% \n \n20% \n \n12.2% \n \n11.1% \n \n10% \n \n1.1% 3.3% \n \n0% \n \n(N = 89; Mean = 95.7%; Median = 96.5%) \n \nFY 2017 QEPR Observation Scores \n40% \n \n30.9% 30% \n24.7% \n \n20% \n \n18.6% \n \n12.4% \n \n10% \n \n6.2% \n \n1.0% 1.0% 0% \n \n5.2% \n \n(N = 96; Mean = 69.9%; Median = 71.4%) \n \n142 \n \n 40% 30% 20% 10% \n0% \n100% 80% 60% 40% 20% 0% \n \nQuality Management Annual Report FY 2017 \nFY 2017 QEPR Qualifications and Training Scores \n \n33.0% 25.8% \n \n15.5% \n \n1.0% \n \n2.1% \n \n5.2% \n \n2.1% \n \n7.2% \n \n8.2% \n \n(N = 96; Mean = 82.6%; Median = 86.8%) \nFY 2017 QEPR Service Discrepancy Scores \n \n84.5% \n \n15.5% (N = 96; Mean = 99.8%; Median = 100.0%) \n \n143 \n \n Quality Management Annual Report FY 2017 \nFY 2017 QEPR Overall Scores by Provider Size \n \n80% \n \n70% \n \n68.0% \n \n60% \n \n50% \n \n48.1% \n \n42.1% \n \n40% \n \n36.8% \n \n36.5% \n \n30% \n \n28.0% \n \n20% 15.8% \n \n11.5% \n \n10% \n \n5.3% \n \n4.0% \n \n1.9% 1.9% \n \n0% \n \nSmall (N = 52; Mean = 89.2%; Median = 87.2%) Medium (N = 19; Mean = 81.1%; Median = 84.3%) Large (N = 25; Mean = 82.7%; Median = 82.8%) \n \n144 \n \n Quality Management Annual Report FY 2017 \nFY 2017 QEPR Qualifications and Training Scores by Provider Size \n \n60% \n \n50% \n \n48.0% \n \n40% \n \n36.8% \n \n30.8% 30% \n \n23.1% \n \n24.0% \n \n20% \n \n19.2% \n \n15.8% \n \n15.8% \n \n11.5% \n \n10.152%.0% \n \n9.6% \n \n10% \n \n8.0% \n \n5.3% 5.3% \n \n5.3% \n \n4.0% \n \n1.9% 1.9% 1.9% \n \n4.0% \n \n5.3% \n \n0% \n \nSmall (N = 52; Mean = 81.5%; Median = 87.6%) Medium (N = 19; Mean = 76.0%; Median = 81.8%) Large (N = 25; Mean = 86.4%; Median = 87.3%) \n \n145 \n \n "},{"id":"dlg_ggpd_y-ga-bb400-b-pa15-b2016-belec-p-btext","title":"Annual quality management report 2016","collection_id":"dlg_ggpd","collection_title":"Georgia Government Publications","dcterms_contributor":["Georgia. Department of Behavioral Health and Developmental Disabilities, issuing body."],"dcterms_spatial":["United States, Georgia, 32.75042, -83.50018"],"dcterms_creator":["Georgia. Department of Behavioral Health and Developmental Disabilities. Office of Quality Management"],"dc_date":["2016"],"dcterms_description":["Quality management annual report"],"dc_format":["application/pdf"],"dcterms_identifier":null,"dcterms_language":["eng"],"dcterms_publisher":["Atlanta, Ga. : Georgia. Department of Behavioral Health and Developmental Disabilities. Office of Quality Management"],"dc_relation":null,"dc_right":["http://rightsstatements.org/vocab/InC/1.0/"],"dcterms_is_part_of":null,"dcterms_subject":["Mental health services--United States--Georgia","Developmental disabilities--United States--Georgia"],"dcterms_title":["Annual quality management report 2016"],"dcterms_type":["Text"],"dcterms_provenance":["University of Georgia. Map and Government Information Library"],"edm_is_shown_by":["https://dlg.galileo.usg.edu/do:dlg_ggpd_y-ga-bb400-b-pa15-b2016-belec-p-btext"],"edm_is_shown_at":["https://dlg.galileo.usg.edu/id:dlg_ggpd_y-ga-bb400-b-pa15-b2016-belec-p-btext"],"dcterms_temporal":null,"dcterms_rights_holder":null,"dcterms_bibliographic_citation":null,"dlg_local_right":null,"dcterms_medium":["annual reports"],"dcterms_extent":null,"dlg_subject_personal":null,"iiif_manifest_url_ss":null,"dcterms_subject_fast":null,"fulltext":"FY 2016 QUALITY MANAGEMENT \nANNUAL REPORT \n \n TABLE OF CONTENTS \nSection 1: Executive Summary_________________________________________________________4 Section 2: Introduction _____________________________________________________________10 Section 3: Behavioral Health Quality Reviews ____________________________________________14 \nSection 3a: Sample selection ________________________________________________________________ 15 Individual Records and Billing Review _______________________________________________________ 15 Individual and Staff Interviews _____________________________________________________________ 17 \nSection 3b: BHQR Overall Review Scores _______________________________________________________ 17 BHQR Overall Scores _____________________________________________________________________ 19 BHQR Overall Scores by Tier _______________________________________________________________ 21 BHQR Billing Validation Scores _____________________________________________________________ 25 BHQR Assessment and Treatment Planning Scores _____________________________________________ 28 BHQR Compliance with Service Guidelines Scores _____________________________________________ 30 BHQR Focused Outcome Areas (FOA) Scores__________________________________________________ 33 BHQR Staff and Individual Interview Scores __________________________________________________ 36 \nSection 3c: Assertive Community Treatment ___________________________________________________ 38 ACT BHQR Billing Validation Scores _________________________________________________________ 41 ACT BHQR Assessment and Treatment Planning Scores _________________________________________ 43 ACT BHQR Compliance with Service Guideline Scores __________________________________________ 45 ACT BHQR Focused Outcome Areas Score ____________________________________________________ 48 \nSection 3d: Crisis Stabilization Unit Quality Reviews _____________________________________________ 50 Sample Selection________________________________________________________________________ 51 CSU Quality Review Overall Scores__________________________________________________________ 51 CSU Individual Record Review Scores _______________________________________________________ 53 CSU Compliance with Service Guidelines Scores _______________________________________________ 56 \n \nPage 1 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n CSU Focused Outcome Areas Scores ________________________________________________________ 58 CSU Staff and Individual Interview Scores ____________________________________________________ 60 Section 3e: Behavioral Health Summary of Findings and Recommendations __________________________ 62 Summary of Findings ____________________________________________________________________ 62 Recommendations ______________________________________________________________________ 64 \nSection 4: Intellectual and Developmental Disabilities ___________________________________ 66 \nSection 4a. Background (IDD) ________________________________________________________________ 66 Section 4b: Sample Selection ________________________________________________________________ 68 \nPerson Centered Review (PCR) _____________________________________________________________ 68 Quality Enhancement Provider Review (QEPR) ________________________________________________ 68 Section 4c: Review Processes ________________________________________________________________ 69 Section 4d. Overall Review Scores ____________________________________________________________ 71 Person Centered Review (PCR) _____________________________________________________________ 73 Quality Enhancement Provider Review (QEPR) ________________________________________________ 75 Section 4e. Person Centered Review __________________________________________________________ 77 PCR Scores by Focus Outcome Area (FOA)____________________________________________________ 77 PCR Scores by Tool ______________________________________________________________________ 78 PCR Scores by Tool and Focused Outcome Area (FOA) __________________________________________ 81 Section 4f: Quality Enhancement Provider Review (QEPR) _________________________________________ 85 QEPR Scores by Provider Size ______________________________________________________________ 85 QEPR Scores by Focused Outcome Area (FOA) ________________________________________________ 89 QEPR Review Scores by Tool _______________________________________________________________ 91 QEPR Scores by Tool and FOA______________________________________________________________ 94 Section 4g: ISP QA Checklist_________________________________________________________________ 97 ISP Written to Support ___________________________________________________________________ 97 ISP Expectations ________________________________________________________________________ 99 Section 4h: Quality and Technical Assistance Consultation (QTAC) _________________________________ 100 \n \nPage 2 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 4i: IDD Summary of Findings_________________________________________________________ 104 Community Life ________________________________________________________________________ 106 Choice _______________________________________________________________________________ 107 Person Centered Practices _______________________________________________________________ 107 Provider Documentation ________________________________________________________________ 108 \nSection 4j: Recommendations ______________________________________________________________ 108 \nSection 5: Behavioral Health and Intellectual Developmental Disabilities Comparisons __________109 \nOverall Results by Focused Outcome Area--Behavioral Health and IDD _____________________________ 109 Record Review Results by Indicator/Question and FOA __________________________________________ 111 Individual and Staff Interviews  IDD and Behavioral Health Comparison ____________________________ 112 \nSection 6: Conclusion______________________________________________________________115 Appendix A: Abbreviations and Acronyms _____________________________________________116 \n \nPage 3 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 1: EXECUTIVE SUMMARY \nOn June 30, 2016, The Georgia Collaborative Administrative Services Organization (ASO) completed the first year of the new and innovative quality management contract with the Georgia Department of Behavioral Health and Developmental Disabilities (DBHDD). The ASO was tasked with incorporating disparate service delivery systems into one collaborative and integrated quality program: behavioral health, crisis stabilization, and intellectual and developmental disability services. Throughout the implementation period (September 2014 to June 2015), ASO staff worked collaboratively with DBHDD to create an integrated quality management approach, sharing ideas across programs to develop and coordinate tools and processes used to evaluate provider systems and assess the quality of services for individuals receiving behavioral health and intellectual and developmental disability services. \n \nThere are many similarities between the behavioral health and intellectual and developmental disability program. Therefore, similar tools and processes are used to monitor quality and compliance within each area including interviews with individuals and providers/staff and record reviews maintained by providers for individuals receiving services. Similarities include, for example, review of the individual support plan (ISP) for intellectual and developmental disabilities and the individualized recovery plan (IRP) for behavioral health. In both systems, focused outcome areas (FOA) critical to a person's quality of life are assessed: person centered practices, choice, community, rights, whole health, and safety. However, there are some distinct differences as well. Intellectual and Developmental Disability (IDD) reviews include onsite observations of licensed residential and day programs, as well as administrative review of staff qualifications and training requirements, while Behavioral Health (BH) reviews include extensive claims review to identify and report billing validation and discrepancies. \n \nPage 4 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n During the first year of the contract, the ASO completed behavioral health quality reviews (BHQR) for 141 providers. At the request of DBHDD, Assertive Community Treatment (ACT) was analyzed separately from other behavioral health services. BHQRs are used to measure provider performance through four different components or categories of the review process. Findings are shown in the following table for BH and ACT reviews and indicate that ACT results were generally higher. BH showed lower compliance with assessment and planning standards than in other categories. \n \nBHQR and ACT Results by Category \n \nBilling \n \nAssessment \n \nService \n \nValidation FOA and Planning Guidelines \n \nBH \n \n81% \n \n85% \n \n79% \n \n90% \n \nACT \n \n92% \n \n88% \n \n85% \n \n85% \n \nOverall 84% 88% \n \nDetailed findings of the lowest-scoring areas within the broader categories may direct quality improvement initiatives for the ASO. These include: \n Whole Health was the lowest-scoring FOA for both BH and ACT reviews with some specific indicators that included: o Ongoing assessment to determine the need for external referrals for health services, supports, and treatment o Communication with external referral sources to determine results of testing and treatment o Medical conditions assessed, monitored, and recorded o Documented safeguards utilized for medications \n \nPage 5 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n  Co-occurring health conditions were only included in 36 percent of the IRPs reviewed and generally without explanation as to why \n Specific billing issues most often cited included: o Missing/incomplete service orders o Individuals not meeting admission criteria o Progress notes missing \nIn January 2016, the ASO began review of Georgia's 19 crisis stabilization units (CSU), reviewing 285 records. The focus was primarily to assess compliance with DBHDD and Medicaid requirements. CSUs showed an overall score of 83 percent, similar to BH reviews, with the FOAs showing the highest scoring area (88%). Findings suggest relatively high compliance with documentation of admission criteria, timely assessments, daily status updates, and required staffing and physician access. However, documentation lacked evidence that individuals routinely participate in the treatment planning process throughout their course of stay and treatment plans are not always tailored to the individual's presenting needs. \nDuring the year, the ASO completed 484 Person Centered Reviews (PCR) and 100 Quality Enhancement Provider Reviews (QEPR) as part of the IDD quality assurance reviews for intellectual and developmental disabilities. The focus of the QEPR is to monitor providers to ensure they meet requirements set forth by the Medicaid waiver and DBHDD and to evaluate the effectiveness of their service delivery system. The focus of the PCR is to assess the individual's quality of life, as well as the effectiveness of and satisfaction with the service \n \nPage 6 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n delivery system. Data indicate relatively high scores on most aspects of both processes. The following table lists results by tool and shows: \n Relatively high compliance rates across all tools  Documentation results (Provider Record Review [PRR] and Support Coordinator \nRecord Review [SCRR]) are lower than findings from observations and interviews  Qualifications and Training requirements is the second lowest scoring tool which may \nsuggest the need for additional training for providers in this area \n \nIDD PCR and QEPR Results by Tool \n \nTool \n \nQEPR PCR \n \nIndividual Interview \n \n95.1% 95.1% \n \nStaff Interview \n \n96.9% 96.4% \n \nProvider Record Review \n \n73.6% 79.0% \n \nObservation \n \n98.2% 98.4% \n \nSupport Coordinator RR \n \nN/A 79.9% \n \nSupport Coordinator Interview \n \nN/A 90.8% \n \nQualifications \u0026 Training \n \n82.9% N/A \n \nOverall \n \n88.4% 93.8% \n \nMore detailed findings indicate lower scoring areas in community, choice and person centered practices. Individuals are not always: \n Learning about, identifying or developing social roles  Exposed to new experiences in the community or able to identify something new they \nhave experienced or learned  Provided opportunities to develop community employment  Involved in life's decisions \n \nPage 7 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n  Offered informed choice for competitive employment or community participation \nRecommendations from the BH and IDD data include:  Adding more questions in BH to address qualitative issues as well as compliance  Adding the Quality Technical Assurance Consultation to the BH reviews as needed  Continuation of the Quality Training Series in FY 2017  Develop trainings that target community integration, promotion of supported employment and problematic areas in provider documentation \nThese and other results are discussed in more detail throughout the report, including a summary of review processes and sampling procedures. The report also includes a summary of findings for each FOA; in-depth analysis of data from the BH, IDD and CSU reviews; an initial comparative analysis of similar items across BH and IDD; as well as tabular and graphic displays of findings. Throughout the report, abbreviations and acronyms are employed for brevity and efficiency. Appendix A is a reference of all abbreviations and acronyms used in this report. Finally, throughout the report, we have included quotes from front line staff and individuals served, to give a voice to those often unheard. \n\"I can't even believe I am the same person I was three years ago. If someone told me life could be this way  I would not believe it.\" \n \nPage 8 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n The observations, findings, and recommendations in this report will be presented to leadership of DBHDD for consideration in identifying issues that need additional analysis, investigation, and interpretation to inform quality improvement initiatives. The responsibility for the use of the information within this report is that of the directors of Divisions of Performance Management and Quality Improvement, Behavioral Health, Intellectual and Developmental Disabilities, and Accountability and Compliance. The division directors will consider these and other quality data and recommendations collected by the department to improve the quality of services. DBHDD's reorganization provides a platform for clarified roles and responsibilities in addressing quality of services, including further analysis, implementation of targeted action steps, and determination of the impact of selected activities. The programmatic divisions of Behavioral Health and Intellectual and Developmental Disabilities will drive improvement strategies and be supported by the Divisions of Accountability and Compliance, and Performance Management and Quality Improvement, along with the resources and support of the Georgia Collaborative. \n-DBHDD, Division of Performance Management and Quality Improvement \n \nPage 9 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 2: INTRODUCTION \nThe Georgia Department of Behavioral Health and Developmental Disabilities (DBHDD) created the Georgia Collaborative Administrative Services Organization (ASO) to integrate disparate systems for data, service delivery, finance, and quality oversight into one, cohesive, unified organization. Historically, quality reviews for both behavioral health providers and providers offering services for individuals with intellectual and developmental disabilities had been completed by different contracted organizations. The ASO's Quality Department was tasked with incorporating these two distinct systems (IDD and BH) into one collaborative and integrated program. \nJuly 2015 marked the implementation of the new quality reviews developed by the ASO, in collaboration with and approved by DBHDD. In the first year, several goals were accomplished, including: \n Coordinating and integrating, where applicable, BH and IDD review tools and processes  Incorporating feedback from providers, individuals, families, advocates and \nstakeholders into the review process  Minimizing and alleviating some provider administrative burden by: \no Conducting joint BH and IDD reviews simultaneously when possible and appropriate \no Conducting person-centered reviews (PCR) during the quality enhancement provider reviews (QEPR) for providers rendering services for the intellectual and developmental disabilities population \n \nPage 10 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n o Including a quality review of crisis stabilization unit (CSU) services during the behavioral health quality reviews (BHQR), if offered by the provider \n Incorporating the Substance Abuse Mental Health Services Administration's (SAMHSA) National Behavioral Health Quality Framework recommendations into behavioral health review tools; and most importantly \n Implementing a stronger focus on quality so that individuals receiving services from the provider network have easy access to high-quality care that leads to a life of recovery and independence \n \nBy aligning IDD and BH review tools and processes, the ASO's Quality Department created a more seamless and uniform system to assist DBHDD with oversight of their programs. Through the joint BH/IDD review process, the ability to share best practices across the historically disparate service systems was enhanced. Other important enhancements made under the new Georgia Collaborative ASO system include the following: \n The Division of Developmental Disabilities adopted a more aggressive review schedule, similar to the BH reviews, to review more providers each year \n For the intellectual and developmental disabilities reviews, an overall score for the provider is calculated, similar to the behavioral health reviews, to provide a quantitative way to measure overall provider performance \n Behavioral Health adopted individual and staff interviews, similar to the IDD reviews, to hear directly from individuals receiving services as well as providers offering services \n Behavioral Health incorporated focused outcome areas (FOAs) into the review process, similar to the IDD reviews, to provide the ability to measure congruent outcomes across populations. \n \nPage 11 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n This report is the first annual quality performance report completed for the new and innovative ASO Quality Management System. The report includes BH, IDD and CSU findings as well as a detailed explanation of the review process, analysis of the network performance across providers, and information from individuals about the services received. While there are similarities in the review processes, there are also distinct differences. These differences primarily relate to the needs of the IDD and BH communities and the demands of the service delivery system (e.g., service array, diagnoses, types and levels of care, national reporting standards, and accounting for and reporting of billing and claims); therefore, BH, IDD, and CSU results are reported separately. \nThe body of the report is divided into sections:  Behavioral Health Quality Reviews, including o Assertive Community Treatment  Crisis Stabilization Unit Quality Reviews  Intellectual and Developmental Disabilities o Person Centered Reviews o Quality Enhancement Provider Reviews o Quality Technical Assistance Consultations  Behavioral Health and Developmental Disabilities Comparisons \n \nPage 12 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 1 depicts the overall number of Quality Reviews, records reviewed and interviews completed. The Quality Department conducted more than 700 reviews during the first year that included review of almost 6,000 individual records. The voices of more than 3,600 individuals and staff were heard and for the first time, including almost 1,500 individuals and staff from the behavioral health reviews. \n \nReview \nType \nBHQR CSU PCR QEPR TOTAL \n \nTable 1. Overview of Review Data \n \nNumber of \n \nRecords \n \nIndividuals \n \nReviews \n \nReviewed \n \nInterviewed \n \n141 \n \n3,687 \n \n19 \n \n285 \n \n484 \n \n558 \n \n100 \n \n1,337 \n \n744 \n \n5,867 \n \n737 94 483 667 1,981 \n \nStaff \nInterviewed \n753 96 447 353 1,649 \n \nAt the conclusion of each section, a summary of findings and evidence-based recommendations are offered to address areas that may need improvement or training/education programs to help improve the quality of services provided to Georgians. \n \n\"I got in the program. It was one of the best decisions I ever made - a life changing experience.\" \n \nPage 13 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 3: BEHAVIORAL HEALTH QUALITY REVIEWS \nThe purpose of the BHQR is to determine adherence to DBHDD standards and to assess the quality of the service delivery system through individual record and claims reviews.1 Review questions are based on DBHDD and Medicaid requirements, and are organized into four categories or review components: billing validation, assessment and treatment planning, compliance with service guidelines, and focused outcome areas (FOAs). The score for each category represents the percent of applicable questions that were met or present. The BHQR overall score is calculated by averaging the four categories. Each category accounts for 25 percent of the overall score. \nWhen a BHQR is completed, the lead assessor convenes the team to analyze data gathered and plan the exit conference. The exit conference is designed to give immediate, preliminary feedback of the BHQR findings to the provider. A report of these preliminary findings is left with the provider in the form of an exit conference report which outlines the provider's identified strengths and any areas of concern from the four primary categories of the review. Assessors also include any items of concern that fall outside the parameters of the review yet are determined to be an area of concern or risk. \nWithin 30 days of completion of a BHQR, a final assessment report is posted to the Georgia Collaborative website, and the provider is notified via electronic mail of the posting along with their final scores. Like the exit conference report, the final assessment identifies strengths and \n \n1 Please refer to the following link to access a full description of the review process and review tools. http://georgiacollaborative.com/providers/prv-BH.html \n \nPage 14 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n opportunities for growth in the four categories. Additionally, the final assessment includes recommendations for improvement. Much like the exit conference report, the final assessment highlights any areas of concern or risk that fall outside the scope or scoring of the BHQR in the form of additional comments on practices. \nSection 3a: Sample Selection \nIndividual Records and Billing Review \nDuring the 2016 fiscal year, all 141 providers eligible for review, as determined by DBHDD, received a behavioral health quality review (BHQR). A sample of individuals was selected for the record reviews and a sample of those individuals' claims was used for the Billing Review. To be eligible for the sample, each individual must have had at least three claims in the three months (or six months, if necessary) preceding the BHQR. \nTo select the individuals as part of the record review, an unduplicated list of individuals receiving services from the provider was stratified by payer source: Medicaid, state contracted, and fee-for-service claims submissions. From this list, a stratified random sample of individuals receiving services from the provider was selected, proportionate to the provider's ratio of individuals served by payer source. The sample for each provider consisted of up to 30 individuals for non-intensive and specialty service providers, with an additional 15 individuals for providers who also offered assertive community treatment. The number of individuals selected per provider was based on the number of individuals served. For FY 2016, a total of 3,687 individuals were sampled for record reviews, an average of 26 per provider. \n \nPage 15 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 2. Records Reviewed per Provider \n \nMedian \n \nMean \n \nStandard Deviation \n \n30 \n \n26 \n \n10.1 \n \nFor each individual chosen for record review, a random sample of up to 10 paid claims was selected for a billing review. The number of claims reviewed per person was based on services provided and claims submissions. The total number of claims reviewed for FY 2016 was 31,213, an average of 221 claims per provider. When providers did not have adequate claims submissions in the three months preceding their review, individuals and claims samples were pulled from claims submissions up to six months preceding the review. \n \nTable 3. Claims Reviewed per Provider \n \nMedian \n \nMean \n \nStandard Deviation \n \n240 \n \n221 \n \n100.6 \n \nAt the direction of DBHDD, the BHQRs focused on specific services. The services reviewed are listed in Figure 10 on page 29 of this report. All providers were reviewed once during the fiscal year with at least one occurrence per billed service included in the claims review, ensuring the complete array of services provided and billed were included in the BHQR. Per DBHDD request, Assertive Community Treatment (ACT) was included in the approved services and is highlighted separately in the report. \n \nPage 16 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Individual and Staff Interviews \nSamples used for the Individual and Staff Interviews were selected by the provider and quality assessors conducting the BHQR; services received or provided were not considered in the selection of interviewees. Quality assessors attempted to complete a minimum of five individual and five staff interviews per BHQR; however, the actual number interviewed fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of staff, and the number of individuals the provider served at the time of the review. If an individual or staff declined to be interviewed, assessors selected an additional individual or staff to be interviewed. \nIn FY 2016, the Quality Department completed 737 individual interviews and 753 staff interviews during the course of the BHQRs; this is not inclusive of the interviews conducted during the Crisis Stabilization Unit (CSU) reviews which are discussed later in the report. Results from the interviews conducted as part of the BHQR are not included in the BHQR overall scores. \nSection 3b: BHQR Overall Review Scores \nIn this report, data are aggregated and presented by overall provider scores, by category (billing validation, assessment and treatment planning, compliance with service guidelines, focused outcome areas [FOAs], and individual and staff interview). Each focused outcome \n \nPage 17 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n area subcategory (choice, person centered practices, whole health, safety, rights, and community life) also has scores displayed and discussed.2 \nGraphical distributions for each category show the number of providers who scored within the specified range of scores, as shown on the horizontal/x-axis. With each distribution, the median, mean, and standard deviation (SD) are provided. The median represents the score that falls at the middle of a distribution (50th percentile). The mean represents the average score. The standard deviation refers to the amount of variation or dispersion there is in a distribution of scores, or how much scores tend to spread-out from the mean. A smaller standard deviation (relative to the mean score) indicates the majority of scores tended to be very close to the mean. In this case, scores may cluster around the mean (or average) score, with only a few scores farther away from the mean (outliers). By contrast, a larger standard deviation (relative to the mean score) indicates that scores were more widely spread-out from the average score (mean). 3 \n \n2 Unless otherwise stated, category scores were determined by dividing all the \"Yes\" answers by the sum of the \"Yes\" and \"No\" answers in the category. Questions scored as \"Not Applicable\" (N/A) were not factored into the overall category score. Each subcategory's score was determined similarly based on the questions contained within each subcategory. \n3 Standard deviations are sometimes used to determine significant differences between scores within a distribution; however, this application requires a distribution to be normally distributed (similar to a bell curve). Most of the distributions presented in this report do not meet the requirements of a normal distribution, meaning they do not look like a normal bell curve; therefore, standard deviations may be used to determine the variation of scores around a mean but not to determine significant differences between scores, or between the score and the mean. \n \nPage 18 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR Overall Scores \nFigure 1 shows the average overall score for the 141 providers who received a BHQR and the scores for each category. Table 4 shows the mean, median and standard deviation for each. The mean of overall scores was 84 percent. The mean of compliance with service guidelines scores was the highest at 90 percent, followed by focused outcome areas (85%), and billing validation (81%). The lowest scoring category was assessment and treatment planning, averaging 79 percent. The following are highlights of the overall category scores: \n The lower assessment and treatment planning scores were primarily driven by the lack of documentation supporting the incorporation of whole health and wellness goals and objectives into individual resiliency plans (IRPs). \n The billing validation score indicates that approximately 20 percent of claims reviewed were unjustified and subject to recoupment. \n The focused outcome areas represent a new area of review and contain questions for which providers had not previously been assessed; however, these reflected higher scores for most providers. \n \nPage 19 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 1. FY 2016 BHQR Overall Review Scores (N = 141) \n \nCategory Overall Billing Validation Assessment \u0026 Tx Planning Service Guidelines Focused Outcome Areas \n \nTable 4. Overall Scores Data \n \nNumber of Reviews \n \nMedian \n \n141 \n \n87% \n \n141 \n \n88% \n \n141 \n \n79% \n \n140 \n \n92% \n \n141 \n \n87% \n \nMean 84.1% 81.4% 79.3% 90.3% 85.1% \n \nStandard Deviation \n10.4% 23.0% 11.7% 10.7% 12.5% \n \nPage 20 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 2: BHQR Provider Overall Review Scores Distribution (N = 141) \n \n40 \n \n32 \n \n31 \n \n17 \n \n1 \n \n2 \n \n12316 \n \n5 \n \nMedian 87% \n \nMean 84.1% \n \nStandard Deviation 10.4% \n \nBHQR Overall Scores by Tier \nIn July 2014, DBHDD implemented the Community Behavioral Health Provider Network Structure in which providers were classified using a three-tiered structure.4 Tiers are defined as Tier 1: Comprehensive Community Providers, Tier 2: Community Medicaid Providers, and Tier 3: Specialty Providers. A more detailed definition of each tier is provided below, along with a distribution graph of the overall scores for each tier. \nTable 5 provides a snapshot of the BHQR overall average review scores by tier. Scores were generally lower for Tier 3 providers (78%), with the highest average score for Tier 1 providers (86%), which is higher than the State Average of 84 percent. The distributions of providers across scores, by Tier, are shown in Figures 3-5. \n \n4 Policies regarding the implementation and definition of the DBHDD Community Behavioral Health Provider Network Structure can be found at DBHDD's PolicyStat website: https://gadbhdd.policystat.com \n \nPage 21 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Tier 1 (N = 24) \n86% \n \nTable 5. BHQR Overall Scores by Tier \n \nTier 2 \n \nTier 3 \n \nOverall Average \n \n(N = 96) \n \n(N = 21) \n \n(N = 141) \n \n85% \n \n78% \n \n84% \n \nTier 1 providers function as the safety net for behavioral health services in the state. These providers serve individuals from multiple payer sources (Medicaid, state contracted, and feefor-service), must provide mental health and addiction services for adults, adolescents, and children; must provide at least one intensive in-clinic service, such as psychosocial rehabilitation  group or mental health peer support  group; and must provide at least one intensive out-of-clinic service, such as assertive community treatment (ACT) or community support team (CST). Tier 1 providers may also provide crisis services such as CSU and specialty services such as intensive family intervention (IFI), but are not required to do so. \n \nFigure 3. Tier 1 Overall Provider Scores Distribution \n \n(N = 24) \n \n9 \n \n45 \n \n6 \n \nMedian 88% \n \nMean 86.3% \n \nStandard Deviation 4.8% \n \nTier 2 providers make up the largest number of providers in Georgia. There are 96 Tier 2 providers, compared to 45 Tier 1 and Tier 3 providers combined. Tier 2 providers may serve \n \nPage 22 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n adults, children and adolescents, or both, but must have the capacity to provide the entire array of non-intensive services including both mental health and addictive disease services. Tier 2 providers also have the opportunity to provide specialty services, but are not required to do so. \n \nFigure 4. Tier 2 Overall Provider Scores Distribution \n \n(N = 96) \n \n29 \n \n22 \n \n20 \n \n13 \n \n1 \n \n314 \n \n3 \n \nMedian 87% \n \nMean 84.9% \n \nStandard Deviation 8.8% \n \n\"The treatment has helped me a lot, I love my counselors. They are nice and friendly. They are strict about recovery and that is why I am here.\" \n \nPage 23 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Tier 3 providers provide an array of specialty services to individuals from multiple payer sources (depending on the service provided). Demographics of individuals who receive services from Tier 3 providers may also fluctuate depending on the specialty service provided. \n \nFigure 5. Tier 3 Overall Provider Scores Distribution (N = 21) \n6 \n \n4 \n \n2 \n \n2 \n \n2 \n \n2 \n \n1 \n \n1 \n \n1 \n \nMedian 83% \n \nMean 77.7% \n \nStandard Deviation 17.8% \n \nPage 24 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR Billing Validation Scores \nFigure 6 shows the total dollar amount reviewed through claims analysis across all providers during the BHQRs ($3,417,902.28) and the dollar amount found to be unjustified ($807,050.16). Information in Table 6 indicates the specific billing discrepancy reasons found during the BHQRs. When a claim was found to be unjustified, assessors select all applicable reasons a reviewed claim was identified as a discrepancy; therefore, one claim may have multiple discrepancy reasons identified. The most prevalent billing discrepancy reasons were: \n Missing/incomplete service orders (1,612) \n Individuals did not meet admission criteria (1,132) \n Progress note missing (779) \n \nFigure 6. BHQR Billing Validation Amount Reviewed \n \n$3,500,000.00 $3,000,000.00 $2,500,000.00 $2,000,000.00 $1,500,000.00 $1,000,000.00 \n$500,000.00 $0.00 \n \n$807,050.16 $2,610,852.12 \n \nTotal $ Justified Total $ Unjustified \n \nTotal Amount Reviewed in FY 2016: $3,417,902.28 \n \nPage 25 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 6. BHQR Billing Discrepancy Reasons \nEligibility Missing/incomplete order Does not meet admission criteria Quantitative Progress note is missing Code is missing/different than code billed Staff credential missing Signature missing Date of entry missing Units billed exceed time / units documented Consistency requirements missing Credential not supported by documentation Record not submitted within timeframe Location missing (out-of-clinic) Time in / time out missing Date of service incorrect / missing Printed name missing Performance Standards Content does not match service definition Content does not support code billed Content does not support units billed Intervention outside staff's scope/practice Content is not unique to the individual Multiple services billed at the same time Non-billable activity Diversionary activities billed Mutually exclusive services billed \n \nOccurrences 1,612 1,132 \nOccurrences 779 381 299 275 263 236 202 199 197 166 92 63 42 \nOccurrences 735 428 375 348 137 95 76 48 2 \n \nPage 26 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n The billing validation score for each BHQR is the percentage of justified billed dollars divided by the total paid/billed dollars for the reviewed claims. The distribution of providers across billing validation scores is shown in Figure 7. Four providers scored 100 percent and an additional eight providers scored 99 percent. Thirty-five providers scored below the FY 2016 DBHDD Performance Monitoring Report threshold of 80 percent and three providers scored 0 percent in billing validation.5 \n \n322 \n \nFigure 7. BHQR Billing Score Distribution \n \n(N = 141) \n \n25 31 28 21 \n \n9 \n \n111121134 \n \n5 \n \nMedian 88% \n \nMean 81.4% \n \nStandard Deviation 23.0% \n \n5 DBHDD's Performance Monitoring Report billing validation score threshold increased to 85% effective August 22, 2016. For more information, please refer to CCP Standard 13 - Administrative Services Organization and Audit Compliance, 01-213 and CMP Standard 9 - Administrative Services Organization (ASO) \u0026 Audit Compliance, 01239 on DBHDD's PolicyStat website: https://gadbhdd.policystat.com. \n \nPage 27 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR Assessment and Treatment Planning Scores \nThe assessment and treatment planning category consisted of nine questions that were answered once per record reviewed. The questions and percent \"yes\" on each are presented in Figure 8 and the distribution of the 141 providers across scores is shown in Figure 9. \nThe lowest-scoring question indicated that co-occurring health conditions were often not included in the individuals' plans of care (36% yes); therefore, in 64 percent of the plans, cooccurring health conditions were omitted from planning and without any explanation. Likewise, where other needs were identified (housing, employment, childcare, higher education, etc.), \"needs assessed are addressed,\" were included in 59 percent of the plans. Additionally, 59 percent of individuals had goals, objectives, or interventions in their plans to address wellness. \nThese trends in scoring reflect a deficit in a comprehensive, whole-person, whole-health approach in treatment planning with a number of the providers in the network. The average score for this category was 79.3 percent. Although scores for individualized language and discharge planning were higher than those already discussed (78% and 80%, respectively), data suggest areas for growth as all individuals require highly-individualized and tailored plans to meet their unique needs. \n \nPage 28 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 8. BHQR Assessment \u0026 Treatment Planning (N = 141) \n \nCore Customer Criteria Medical Screening \nIndividualized Language Goals honor achievement of the individual and/or family \nInterventions/objectives are related to goals Needs assessed are addressed \nWhole-health wellness goals and interventions Co-occurring health conditions included Discharge plan has step-down service \n \n95% 98% 78% 91% 96% 59% 59% 36% 80% \n \nFigure 9. Assessment \u0026 Treatment Planning Scores Distribution \n \n(N = 141) \n \n32 \n \n10 12 11 \n \n24 20 \n14 10 \n \n1 \n \n1 \n \n33 \n \nMedian 79% \n \nMean 79.3% \n \nStandard Deviation 11.7% \n \nPage 29 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR Compliance with Service Guidelines Scores \nThe Compliance with Services Guideline questions were answered once per individual record reviewed. The number of questions answered varied, specific to the service scored. The service reviewed for each individual record was based on which services were reflected in the billing claims sample for that individual; therefore, multiple services could be reviewed within one individual's record. There were a total of 18 services reviewed across all providers in FY 2016, as shown in Figure 10. The n size in Figure 10 represents the number of providers who were reviewed for that service, based on the claims sample. \nThe Compliance with Service Guidelines is the highest scoring category of the four, with an average of 90 percent. The most frequently provided services scored the highest (e.g., Individual Outpatient Services n = 113, and Family Outpatient Services, n = 92). The Specialty Service of Psychosocial Rehabilitation-Group was also a higher scoring service (95%) but was not reviewed as frequently (n = 25). The lowest scoring service was Addictive Disease (AD) Peer Support  Individual at 79 percent; however, only one provider reviewed was providing this service. Other lower scoring services included Addictive Disease Support Services (ADSS) at 81 percent and Community Support - Individual (84%). The lower scores in both of these services were driven by a lack of documentation reflecting coordination of care with natural supports and needed community resources and lack of documentation reflecting staff making required monthly contacts. \nOne of the 141 providers reviewed in FY 2016 was not reviewed for Compliance with Service Guidelines because the provider only billed a service that was not reviewed programmatically \n \nPage 30 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n (Opioid Maintenance Treatment). Over half (57.9%) of providers reviewed scored above 90 percent in this category (Figure 11). \n \nFigure 10. BHQR Service Guidelines Scores by Service (N = 140) \n \nAD Peer Support - Individual (n = 1) Addictive Diseases Support Services (n = 48) \nAssertive Community Treatment (n = 19) Case Management Services (n = 69) \nCommunity Support - Individual (n = 84) Community Support Team (n = 7) \nFamily Outpatient Services (n = 92) Group Outpatient Services (n = 63) Individual Outpatient Services (n = 113) Intensive Case Management (n = 10) Intensive Family Intervention (n = 30) MH Peer Support - Group (n = 33) MH Peer Support - Individual (n = 7) Nursing Assessment \u0026 Health Services (n = 85) Peer Support - Whole Health \u0026 Wellness (n = 7) \nPsychiatric Treatment (n = 91) Psychosocial Rehab - Group (n = 25) Psychosocial Rehab - Individual (n = 75) \n \n79% 81% 85% 85% 84% \n94% 96% 93% 97% 95% 86% 95% 87% 85% 92% 95% 96% 90% \n \nPage 31 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 11: Frequency Distribution of Compliance Scores \n \n(N = 140) \n \n45 \n \n36 \n \n28 \n \n20 \n \n1 \n \n136 \n \nMedian 92% \n \nMean 90.3% \n \nStandard Deviation 10.7% \n \n\"I am beyond satisfied and I am working on becoming a Certified Peer Specialist\" \n \nPage 32 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR Focused Outcome Areas (FOA) Scores \nFocused outcome areas questions are answered once per record reviewed. Each subcategory has a different number of questions for a total of 24 questions scored in this category. \nFigure 12. FY 2016 BHQR Focused Outcome Areas Overall Scores (N = 141) \n \nRights 90% \nChoice 92% \n \nWhole Health \n63% \n \nSafety 81% \n \n85% \nCommunity 87% \n \nPersonCentered Practices \n92% \n \nChoice questions address how and if individuals receiving care were provided with options of services and were encouraged to make educated choices with respect to supports and services provided. When barriers to services were identified, Assessors looked for documentation as to how the individual was engaged in addressing the identified barriers (93%). \n \nPage 33 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Person Centered Practices questions are used to measure whether documentation shows individuals have a voice and participate in creating their care plans and are active participants in modifying them as needed and desired. One of the higher-scoring questions indicated individuals were active participants in the planning and receiving of services (94%). \nWhole Health questions address whether the records reviewed demonstrated that individuals were treated as a whole person with their physical health needs being assessed, documented, and monitored, as evidenced by documented communication with outside healthcare providers. Of the six FOA subcategories, Whole Health was the lowest-scoring (63%), which is consistent with the scoring of questions related to Whole Health and Wellness in the Assessment and Treatment Planning category. \nAlthough item-level detail is not comprehensively provided in this report, The Georgia Collaborative determined that it was imperative to include the four questions from the Whole Health subcategory as it was the lowest FOA score (Table 7). \n \nTable 7. FOA Whole Health Questions: 63% (N = 141) \n \nOngoing assessment to determine need for external refers for health services, supports and treatment \n \n71% \n \nCommunication with external referral sources to determine results of testing and treatment \n \n61% \n \nMedical conditions assessed, monitored and recorded \n \n64% \n \nDocumented safeguards used for medications \n \n71% \n \nSafety questions address whether providers were attending to certain risk factors for individuals, such as their tendency to experience a crisis, have suicidal or homicidal thoughts or actions, and whether individuals had received information and education about the risks \n \nPage 34 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n and benefits of prescribed medications. This area represents the second lowest-scoring area of the FOAs. Of the three questions in the subcategory, the one most often scored \"No\" indicated individuals (or their legal guardians) often had not signed medication consent forms along with the prescriber (52%). \nRights questions address whether individuals had been apprised of their rights while in treatment, at the point of entry into the system and on an annual basis thereafter, and whether they had been informed of their rights under Federal HIPAA laws. The area of greatest concern in this group of five questions was whether individuals were apprised of their rights and responsibilities on an annual basis (58%). \nCommunity Life questions address how individuals were engaged in their communities of choice and whether they held valued social roles. The five questions in this subcategory are used to measure whether individuals had been assessed for their need to make changes in their living, learning, working, and/or social environments (96%), and whether they had been assisted with making these changes (92%). \n\"The groups I facilitate are like a mini-United Nations. I present the topic and then interpreters sit with small groups of people \nwho speak languages like Arabic, Farsi, and Swahili. They discuss and then the interpreters relay the information back to \nthe rest of the large group.\" \n \nPage 35 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 13: Focused Outcome Areas Score Distribution (N = 141) 41 \n \n30 25 \n \n17 \n \n12 \n \n1 \n \n3 \n \n147 \n \nMedian 87% \n \nMean 85.1% \n \nStandard Deviation 12.5% \n \nBHQR Staff and Individual Interview Scores \nThe interview questions were divided into the six FOA subcategories. Individual Interviews served to assess the individual's quality of life and also the perception of care with the provider and services rendered. Staff Interviews helped determine whether a personcentered approach was used in providing services and empowering individuals. The data and anecdotal information gathered from individual and staff interviews supplemented the record review and enhanced the review process by providing a tailored perspective to illustrate the quality of care the agency provided. \nData in Figure 14 and Table 8 represent the average scores for the individual and staff interviews. It is notable that all areas scored in the 90th percentile which may indicate a high level of satisfaction for individuals who were served by the providers. Likewise, provider staff \n \nPage 36 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n interview scores seem to indicate knowledge regarding providing services that align with standards assessed by the FOAs as well as knowledge of the individuals with whom they work. While not directly assessed via the interview questions, provider staff frequently made statements that indicated a high level of employment satisfaction with the reviewed agency. \n \nFigure 14: BHQR Interview Subcategory Data \n \nOverall \nCommunity Life \nRights \nSafety \nWhole Health Person Centered \nPractices Choice Staff Interviews (N = 753) \n \n98.1% 95.6% \n99.2% 95.0% \n99.5% 97.6% \n97.6% 96.2% \n97.4% 92.7% \n97.9% 95.8% \n98.5% 97.4% \nIndividual Interviews (N = 737) \n \nPage 37 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 8. BHQR Interview Subcategory Data \n \nIndividual Interviews \n \nStaff Interviews \n \nFocused Outcome Area Median Mean \n \nSD Median Mean \n \nSD \n \nChoice \n \n100% 97.4% 10.2% 100% 98.5% 9.2% \n \nPerson-Centered Practices 100% 95.8% 11.8% 100% 97.9% 9.2% \n \nWhole Health \n \n95% 92.7% 11.5% 100% 97.4% 9.9% \n \nSafety \n \n100% 96.2% 10.3% 100% 97.6% 9.4% \n \nRights \n \n100% 97.6% 9.4% 100% 99.5% 9.0% \n \nCommunity Life \n \n100% 95.0% 13.5% 100% 99.2% 9.0% \n \nOverall \n \n97% 95.6% 9.6% \n \n99% 98.1% 8.9% \n \nSection 3c: Assertive Community Treatment \nAt the direction of DBHDD, The Georgia Collaborative conducted reviews of 19 Assertive Community Treatment (ACT) providers, embedded within the routine behavioral health quality reviews (BHQR). For the 17 providers who offered both non-intensive outpatient and ACT, up to 15 additional individuals receiving ACT services were selected for review, and for the two providers who offered only ACT services, 30 individuals were selected for each review. This totaled 314 ACT records reviewed; and with up to 10 billing claims per record reviewed, a total of 3,029 claims were analyzed across all ACT reviews. The data below represents findings from reviews of ACT services only. \n \n\"ACT showed me that there was a reason to live. They never gave up.\" \n \nPage 38 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 9. ACT Records Reviewed per Provider \n \nMedian \n \nMean \n \nStandard Deviation \n \n15 \n \n16.5 \n \n4.8 \n \nTable 10. ACT Claims Reviewed per Provider \n \nMedian \n \nMean \n \nStandard Deviation \n \n148 \n \n159 \n \n46.7 \n \nFigure 15. ACT Statewide Averages (N = 19) \n \nPage 39 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n It should be noted that all four category scores for ACT reviews are higher than the corresponding BHQR scores for non-intensive services. \n \nTable 11. FY 2016 BHQR vs. ACT FY 2016 Average \n \nAssessment/ Service \n \nBilling \n \nFOA \n \nPlanning Guidelines \n \nBHQR Averages \n \n81% \n \n85% \n \n79% \n \n90% \n \nACT Averages \n \n92% \n \n88% \n \n85% \n \n85% \n \nOverall 84% 88% \n \nTable 12. ACT Overall Scores Data (N = 19) \n \nCategory \n \nMedian Mean SD \n \nOverall \n \n90% 87.6% 4.6% \n \nBilling Validation \n \n92% 92.3% 6.7% \n \nAssessment \u0026 Tax Planning \n \n84% 85.2% 6.0% \n \nService Guidelines \n \n89% 84.6% 9.1% \n \nFocused Outcome Areas \n \n90% 87.6% 7.6% \n \nFigure 16: ACT Overall Score Distribution \n \n(N = 19) \n \n8 \n \n4 \n \n5 \n \n2 \n \nMedian 90% \n \nMean 87.6% \n \nStandard Deviation 4.6% \n \nPage 40 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n ACT BHQR Billing Validation Scores \nFigure 17 shows the total dollar amount reviewed through claims analysis during the ACT BHQRs ($306,628.32) and the dollar amount found to be unjustified ($23,422.57). Information in Table 13 indicates the specific billing discrepancy reasons found during the BHQRs. Assessors select each reason a reviewed claim was identified as a discrepancy; therefore, one claim may have multiple discrepancy reasons identified. The most prevalent billing discrepancy reasons were as follows: \n Staff credential missing (69 progress notes)  Location was missing for out-of-clinic claims (39 progress notes)  Content did not support the units billed (27 progress notes) \nFigure 17. ACT Billing Validation (N = 254) \n \nPage 41 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 13. ACT BHQR Billing Discrepancy Reasons \n \nEligibility \n \nOccurrences \n \nMissing/incomplete order \n \n3 \n \nQuantitative \n \nOccurrences \n \nStaff credential missing \n \n69 \n \nLocation missing (out-of-clinic) \n \n39 \n \nCode is missing/different than code billed \n \n25 \n \nUnits billed exceed time / units documented \n \n17 \n \nProgress note is missing \n \n16 \n \nDate of entry missing \n \n1 \n \nSignature missing \n \n1 \n \nPerformance Standards \n \nOccurrences \n \nContent does not support units billed \n \n27 \n \nContent does not support code billed \n \n17 \n \nIntervention outside staff's scope of practice \n \n12 \n \nNon-billable activity \n \n9 \n \nMultiple services billed at the same time \n \n7 \n \nContent does not match service definition \n \n5 \n \nContent is not unique to the individual \n \n4 \n \nThe billing validation score is the percentage of justified billed dollars divided by the total paid/billed dollars for the reviewed claims. The statewide average ACT billing score of 92 percent was higher than the BHQR statewide average of 81 percent. \n \nPage 42 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 18. Frequency Distribution of ACT Billing Scores \n \n(N = 19) \n \n7 \n \n5 \n \n4 \n \n1 \n \n2 \n \nMedian 92% \n \nMean 92.3% \n \nStandard Deviation 6.7% \n \nACT BHQR Assessment and Treatment Planning Scores \nThe ACT assessment and treatment planning scores were similar to the BHQR statewide averages and reflected a deficit in a comprehensive, whole person, whole health approach in treatment planning. Questions assessing whether co-occurring health conditions, whole health and wellness, and all assessed needs were addressed on the IRP were also the lower scoring questions statewide. However, IRPs within the ACT program scored higher than for the BHQRs regarding individualized language and discharge planning (92% and 90%, compared to 78% and 80% respectively). \n \nAn ACT staff member stated that she really enjoys being able to \"encompass the whole health of an individual\" and link individuals to the necessary \nresources in the community. \n \nPage 43 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 19. ACT Assessment and Treatment Planning Scores (N = 19) \n \nCore Customer Criteria Medical Screening \nIndividualized Language Goals honor achievement of the individual and/or family \nInterventions/objectives are related to goals Needs assessed are addressed \nWhole-health wellness goals and interventions Co-occurring health conditions included Discharge plan has step-down service \n \n100% 100% 92% 94% 99% 59% 75% 47% 90% \n \nFigure 20. ACT Assessment \u0026 Tx Planning Distribution (N = 19) \n \n4 \n \n6 \n \n5 \n \n2 \n \n1 \n \n1 \n \nMedian 84% \n \nMean 85.2% \n \nStandard Deviation 6.0% \n \nPage 44 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n ACT BHQR Compliance with Service Guideline Scores \nThe ACT compliance with service guidelines overall average is 90 percent. Documentation supported that 100 percent of the 314 individual records reviewed met admission or continuing stay criteria. Additionally, 100 percent of individuals received at least one symptom assessment and medication management contact per month. The lower-scoring questions regarding working with natural supports monthly and treatment plan reviews (43% and 58%, respectively) indicate a deficit in providing person-centered, holistic care that included supports from the individual's environment. Also of note is that the question assessing the completeness of the ACT team scored 76 percent, indicating that many ACT teams were lacking one or more required team members. \nThe ACT compliance with service guideline score is calculated using 13 questions or indicators. Table 14 shows the item-level detail to illustrate the degree of variation across indicators within the ACT compliance with service guidelines mean score of 84.6 percent. \n \nPage 45 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 14. ACT Compliance with Service Guidelines Scores (N = 19) \n \nDocumentation supports that the individual meets admission or continuing stay criteria. \n \n100% \n \nThe ACT team completes a treatment plan review with the staff, the individual, and his/her family/informal supports prior to the reauthorization of services. \n \n58% \n \nThere is documentation to support when substance use services are needed and are integrated into the treatment plan. \n \n94% \n \nThere is evidence that the ACT team is working with informal support \n \nsystems/collateral contacts at least 2-4 times per month with or without the individual present (and it is documented) to provide support and skills training to \n \n43% \n \nassist the individual in his/her recovery. (Review authorization period.) \n \nThe ACT team is working with the individual towards educational or vocational needs, interests, per IRP (once per authorization). \nFollowing admission to a psychiatric facility, the ACT team is involved in each individual's discharge planning. \nThere is documentation of individual's involvement in transition planning. \nOne of the contacts per month addresses the symptom assessment and management of medications (once a month). \n \n95% \n87% 82% 100% \n \nThe ACT team has all required staff. \n \n76% \n \nFor discharged individuals, there are multiple documented attempts to locate and make contact with the individual prior to discharge (Over a 45 day period). \n \n100% \n \nProgress notes contain documentation of the individual's progress (or lack of) toward specific goals/objectives on the treatment plan. \n \n91% \n \nThe staff interventions reflected in the progress notes are related to the staff interventions listed on the treatment plan. \n \n98% \n \nThe progress notes document individual response to the staff intervention provided. 99% \n \nPage 46 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 21. ACT Compliance with Service Guidelines \n \nScores Distribution \n \n(N = 19) \n \n7 \n \n5 \n \n2 \n \n3 \n \n1 \n \n1 \n \nMedian 89% \n \nMean 84.6% \n \nStandard Deviation 9.1% \n \nPage 47 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n ACT BHQR Focused Outcome Areas Score \nFocused Outcome Area (FOA) questions are answered once per record reviewed. Each FOA has a different number of questions for a total of 24 questions scored in this category. The overall score for ACT FOA is 90 percent. Similar to the BHQR statewide scores, the lowest category scores for ACT are Whole Health (76%) and Safety (78%). Person Centered Practices received the highest score (99%). \nFigure 22. FY 2016 ACT BHQR Focused Outcome Area Overall Scores (N = 19) \n \nPage 48 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 23. ACT FOAs Score Distribution \n \n(N = 19) \n6 \n \n2 \n \n1 \n \n4 \n \n3 \n \n3 \n \nMedian 90% \n \nMean 87.6% \n \nStandard Deviation 7.6% \n \nPage 49 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 3d: Crisis Stabilization Unit Quality Reviews \nAt DBHDD's request, a special review of all Crisis Stabilization Unit (CSU) providers was initiated during FY 2016, as routine quality reviews of this specialty service had not been conducted previously. The purpose of the CSU review was to assess the provider's overall practices and quality of service delivery, and to determine adherence to DBHDD standards through individual record reviews.6 While these reviews were conducted in conjunction with a BHQR when the CSU provider also provided essential routine services, the CSU reviews resulted in separate scores and final assessment reports. \nReview questions are based on DBHDD and Medicaid requirements, and were organized into three categories or review components: Individual Record Review (IRR), Compliance with Service Guidelines and FOAs. The score for each category represents the percent of applicable questions that were met or present. The CSU overall score is calculated by averaging the three categories. Each category accounts for 33.3 percent of the overall score. Billing validation was not a component of the CSU reviews for FY 2016. \nDuring FY 2016, all 19 eligible CSU providers were reviewed. Two were freestanding CSUs and 17 occurred in conjunction with behavioral health quality reviews (BHQRs). Of the CSUs reviewed, three were conducted jointly with a BHQR and a QEPR at one provider location to reduce administrative burden. Three of the CSUs provided services to adolescents and 18 of the CSUs provided services to adults. \n \n6 Please refer to the following link to access a full description of the review process and review tools. http://georgiacollaborative.com/providers/prv-BH.html \n \nPage 50 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Sample Selection \nA random sample of 15 individuals who had received services within the three months preceding the review was selected for record reviews. When providers did not have an adequate number of individuals served in the three months preceding the review, samples were pulled from individuals served up to six months preceding the review; therefore, all CSU providers had 15 individuals selected, for a total of 285 individual records reviewed. \nThe sample for the interviews was selected and scored similar to the BHQR process: individuals and staff were selected by the provider and Quality Assessors conducting the CSU Review. Results from the interviews were not included in calculating the provider's overall CSU score. Quality Assessors completed a minimum of five individual and five staff interviews per CSU review; however, the actual number fluctuated based on individual and staff availability, their agreement to participate in the interview process, the number of staff, and the number of individuals the provider served at the time of the review. The individuals selected for interviews were currently being served at the CSU, and the staff selected was currently providing services on the CSU. If an individual or staff declined to be interviewed, Assessors selected an additional individual or staff to be interviewed. \nCSU Quality Review Overall Scores \nFigure 24 shows the average overall score for the 19 providers who received a CSU quality review, and the scores for each category. Table 15 shows the mean, median and standard deviation for each. The overall score mean was 83 percent. Focused outcome areas was the highest-scoring category at 88 percent, followed by compliance with service guidelines (82%). \n \nPage 51 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n The lowest scoring category was the Individual Record Review (IRR), averaging 78 percent. The following are highlights of the overall category scores: \n Based on the higher scored questions within the IRR and compliance with service guidelines categories, individuals and staff have needed access to physicians or physician extenders, and individuals have regular contact with nursing staff \n Much like the BHQRs, the lower scores in the treatment and discharge planning subcategories within the IRR reflect a lack of documentation supporting personcentered, whole-health and wellness approaches to developing IRPs \n Also in line with the BHQRs, the FOAs reflected higher scores for most providers \nFigure 24. FY 2016 CSU BHQR Overall Review Scores (N = 19) \n \nPage 52 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 15. CSU Overall Scores (N = 17) \n \nCategory \n \nMedian \n \nMean \n \nOverall \n \n83% \n \n82.9% \n \nIndividual Record Review \n \n77% \n \n78.3% \n \nService Guidelines \n \n80% \n \n82.1% \n \nFocused Outcome Areas \n \n88% \n \n87.7% \n \nSD 8.1% 8.5% 13.2% 6.0% \n \nFigure 25. CSU Overall Score Distribution \n \n(N = 19) \n \n5 \n \n44 \n \n3 \n \n2 \n \n1 \n \nMedian 83% \n \nMean 82.9% \n \nStandard Deviation 8.1% \n \nCSU Individual Record Review Scores \nIndividual Record Review (IRR) questions were answered once per record reviewed. Each of the six subcategories had a variable amount of questions with a total of 31 scored within the IRR category. \nThe CSU intake assessment data indicate 99.6 percent of individuals met admission criteria and 99 percent were assessed by a physician or a physician extender within 24 hours of admission. Additional supporting evidence of this was that documentation in 99 percent of \n \nPage 53 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n individuals' records reflected daily status updates completed by a Registered Nurse (RN) and 93 percent of records contained a Medication Administration Record (MAR). \nAlternatively, data suggest areas for growth in CSU treatment planning that mirror deficits identified in the BHQR Assessment and Treatment Planning category--such as not incorporating all identified needs (primarily, co-occurring physical health issues) in the IRPs or Nursing Care Plans (NCPs). Inattention to co-occurring physical health issues was also found in progress notes, as they often lacked evidence that physical illnesses were monitored during an individual's course of stay. Additionally, documentation lacked evidence that individuals were offered an opportunity to participate in the treatment team meetings to develop their own plan of care. Discharge planning is also an area of growth for the CSUs, as discharge and aftercare plans were incomplete in 43.7 percent of records reviewed. \nInitial bio-psychosocial assessments were present and comprehensive nursing assessments were generally completed upon admission (84% and 78%, respectively). Documentation lacked evidence that individuals met American Society for Addiction Medicine (ASAM) criteria for medically monitored residential withdrawal management when admitted for detoxification (18.8% scored \"Yes\"). Records lacked evidence of the required assessment of individuals against ASAM criteria for service intensity and placement. \n \nPage 54 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 26. Individual Record Review Category Scores (N = 19) \n \nAssessment/Treatment Planning Admission/Initial Evaluation/Screening for Risk Crisis Stabilization Specific Treatment Planning \nCrisis Stabilization Course of Stay Documentation \nTransition/Discharge Planning \n \n86% 69% 70% \n83% 87% \n63% \n \n\"When people leave or graduate from this program, they know what recovery is and how to apply it to their life.\" \n \nPage 55 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 27. CSU Individual Record Review Score Distribution (N = 19) 7 \n5 \n \n22 \n \n1 \n \n1 \n \n1 \n \nMedian 77% \n \nMean 78.3% \n \nStandard Deviation 8.5% \n \nCSU Compliance with Service Guidelines Scores \nThe 15 CSU compliance with service guidelines questions (13 scored and 2 non-scored) were answered once per review to assess the CSU program as a whole. \nThe CSUs had all required staff including access to a physician. Additionally, the three CSUs serving adolescents met all applicable staff-to-individual ratio requirements. \nThe lower scoring questions included the following: \n The provider is adhering to its current policy for the safe storage of medication (53%)  Policies and procedures are present for adherence to required components of crisis \nservice plans for provision of crisis services to individuals who are deaf, deaf-blind, blind, and hard-of-hearing (53%) \n \nPage 56 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Documentation reflected that CSUs did not have access to specialists such as Addictionologists (67%) or a Pediatric Psychiatrists (67%) when needed. CSU policies often failed to identify a model for substance use treatment (69%), and scoring for providers adhering to their own policies on the notification of medication errors also indicated an area for growth (79%). \n \nFigure 28. CSU Compliance with Service Guidelines (N = 19) \n \nCSU Staffing Requirements Met (n = 19) C\u0026A Minimum Staff Present (n = 3) C\u0026A Staff Ratio Met (n = 3) C\u0026A Nursing Staff Ratio (n = 3) \nAdherence to Medication Notification Policy (n = 19) Protocols for Handling Drugs (n = 19) Safe Storage of Medication (n = 19) \nInfection Control Plan Adherence (n = 19) Seclusion \u0026 Restraint Policy Adherence (n = 19) \nTherapeutic Blood Level Monitoring (n = 19) Deaf, Deaf-Blind, Hard of Hearing Policies (n = 19) Model/Curriculum for SU treatment (Non-scored) (n = 16) \nPhysician Availability (n = 19) Access to Addictionologist (n = 18) C\u0026A Psychiatrist (Non-scored) (n = 3) \n \n100% 100% 100% 100% 79% 95% 53% 84% 95% 89% 53% 69% 100% 67% 67% \n \nPage 57 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 29. CSU Service Guideline Score Distribution (N = 19) \n \n6 \n \n5 \n \n11 \n \n2 \n \n1 \n \n3 \n \nMedian 80% \n \nMean 82.1% \n \nStandard Deviation 13.2% \n \nCSU Focused Outcome Areas Scores \nFocused Outcome Area questions were answered once per record reviewed. Each subcategory had a different number of questions for a total of 23 questions assessed in the FOA category. Please refer to the BHQR FOA section for a definition of the six FOA subcategories. \nAs illustrated below in Figure 30, agencies scored in the 90th percentile in the areas of Choice (99%), Rights (97%), and Community Life (92%). The high score in Choice reflected that documentation supports that the individual's preferences for treatment while at the CSU were honored and, when barriers to treatment were identified, measures were taken to address the barriers. Almost all of the records reviewed contained documentation that individuals' rights and responsibilities for treatment, as well as confidentiality, were reviewed with them upon admission. \n \nPage 58 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n In line with what is noted in the IRR category and within the BHQRs, documentation generally lacked evidence that whole health and wellness was incorporated into treatment. Also, documentation did not support that individuals were active participants in treatment planning. Data suggest this is a significant area for growth as all individuals require highly individualized and tailored plans to meet their needs regardless of length of stay. \nFigure 30. FY 2016 CSU Focused Outcome Area Overall Scores (N = 19) \n \nPage 59 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 31. CSU Focused Outcome Areas Overall Distribution (N = 19) \n \n65 \n \n33 \n \n2 \n \n00000 \n \nMedian 88% \n \nMean 87.7% \n \nStandard Deviation 6.0% \n \nCSU Staff and Individual Interview Scores \nThe CSU interview questions were similar to the BHQR interview questions and were divided into the six FOA subcategories. Individual interviews served to assess the individual's quality of life and also perception of care with the provider and services rendered. Staff Interviews helped determine if a person-centered approach was used in providing services and empowering individuals. The data and anecdotal information gathered from individual and staff interviews supplemented the record review and enhanced the review process by providing a tailored perspective to illustrate the quality of care the agency provided. \nData in Figure 32 and Table 16 represent the average scores for the Individual and Staff Interviews. It is notable that all but one FOA scored in the 90th percentile, which seems to indicate a high level of satisfaction for individuals who were served by the providers. The lowest-scoring subcategory for Individual Interviews was Whole-Health (85%) which may \n \nPage 60 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n indicate an increased need for CSUs to attend to and plan for individuals' co-occurring health conditions. This issue was also highlighted in the IRR and FOA categories. Similar to the BHQR staff interviews, CSU staff interview scores seem to indicate knowledge regarding providing services that align with standards assessed by the FOA questions, as well as knowledge of the individuals with whom they work. \nFigure 32. CSU Interview Subcategory Data \n \nChoice Person Centered Practices \nWhole Health Safety Rights \nCommunity Life Overall \nIndividual Interviews (N = 94) \n \n96.8% 99.6% \n96.0% 98.2% \n84.6% 96.8% \n95.3% 98.7% \n99.6% 97.3% \n99.4% 98.7% \n94.9% 98.6% \nStaff Interviews (N = 96) \n \nPage 61 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 16. CSU Interview Subcategory Data \n \nIndividual Interviews (N = 94) \n \nStaff Interviews (N = 96) \n \nFOA \n \nMedian Mean \n \nSD Median Mean SD \n \nChoice \n \n100% 96.8% \n \n4.6% \n \n100% 99.6% 1.3% \n \nPerson Centered Practices \n \n97% \n \n96.0% \n \n4.0% \n \n100% 98.2% 5.7% \n \nWhole Health \n \n86% \n \n84.6% \n \n6.8% \n \n99% 96.8% 4.6% \n \nSafety \n \n96% \n \n95.3% \n \n4.1% \n \n100% 98.7% 2.1% \n \nRights \n \n100% 99.6% \n \n1.3% \n \n100% 97.3% 3.7% \n \nCommunity Life \n \n100% 99.4% \n \n1.9% \n \n100% 98.7% 4.7% \n \nOverall \n \n95% \n \n94.9% \n \n2.0% \n \n99% 98.6% 2.7% \n \nSection 3e: Summary of Findings and Recommendations for Behavioral Health Providers \nSummary of Findings \nAlthough many impactful and life-changing things have been captured and recorded by assessors throughout the last fiscal year, we believe that quality improvement is an ongoing activity. The Georgia Collaborative continually reassess our processes to ensure we are capturing results and analyzing outcomes that give us the best information and truest picture of service quality in Georgia. \nThe table below illustrates the overall scores for the three review types, BHQR, ACT, and CSU. The overall statewide average for all three review types is above 80 percent: BHQR  84 percent, ACT  88 percent and CSU  83 percent. The Assessment and Treatment Planning category (IRR for CSU) was the lowest score of all three review types. The CSU review did not include Billing Validation. \n \nPage 62 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n BHQR ACT CSU \n \nTable 17. BHQR, ACT, CSU Overall Averages \n \nBilling \n \nAssessment/ Service \n \nValidation FOA \n \nPlanning Guidelines \n \n81% \n \n85% \n \n79% \n \n90% \n \n92% \n \n88% \n \n85% \n \n85% \n \nNA \n \n88% IRR  78% \n \n82% \n \nOverall 84% 88% 83% \n \nThe FOAs represent a new area of review and contains questions on which providers had not previously been assessed. The table below summarizes the three review types highlighted in this report by FOA. Whole Health is the lowest score of the subcategories for both BHQR and ACT reviews and is a trend identified as an area for improvement in CSU reviews, as well. Person Centered Practices is a higher-scoring subcategory for the ACT and BHQRs; however, it is the second lowest subcategory found in the CSU reviews. Initial CSU data reflect individuals do not routinely participate in the treatment planning process throughout the course of stay. Additionally, treatment plans are not tailored to the individual's presenting needs. \n \nTable 18. BHQR FOA Scores \n \nFocused Outcome Areas \n \nBHQR ACT \n \nCSU \n \nSafety \n \n81% \n \n78% \n \n76% \n \nPerson-Centered Practices \n \n92% \n \n99% \n \n77% \n \nCommunity \n \n87% \n \n97% \n \n92% \n \nChoice \n \n92% \n \n97% \n \n99% \n \nRights \n \n90% \n \n89% \n \n97% \n \nWhole Health \n \n63% \n \n76% \n \n83% \n \nOverall Score \n \n85% \n \n90% \n \n88% \n \nPage 63 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Recommendations \nThe focus of Billing Validation is compliance with standards for ensuring that claims are justified in accordance with the DBHDD Provider Manual. As this is the second-lowest score of the four categories, we determined that technical assistance to providers was needed. In FY 2016, the Quality Department launched a Quality Training Series with the first module being Billing Validation. We will continue to analyze billing compliance trends to determine whether additional training or technical assistance to providers is indicated. \nThe three remaining categories of questions representing Assessment and Treatment Planning, Compliance with Service Guidelines, and Focused Outcome Areas will be analyzed throughout FY 2017 for their validity and effectiveness in yielding the data needed to ensure that quality is more accurately captured. As an example, the category of Compliance with Service Guidelines is primarily focused on compliance with measurable requirements of service definitions in the DBHDD Provider Manual. Our initial data analysis indicates a need to expand our focus to encompass a more qualitative look at services provided. \nBased on FY 2016 data, the Quality Department and DBHDD partnered to develop the BHQR frequency schedule for providers. The FY 2017 frequency of reviews is based upon providers' FY 2016 overall BHQR score and billing score and is based on providers achieving the benchmarks scores of 80 percent overall and 70 percent for billing validation. \nRecommendations: \n Based on data analysis, consideration will be given to modifying questions within the remaining categories that may provide additional information about quality. \n \nPage 64 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n  Additional training opportunities may become available through quality technical assistance consultations (QTAC). \n We will continue the Quality Training Series in FY 2017 based on data analysis from FY 2016, for all behavioral health and CSU providers. \n As FY 2016 is the baseline year for routine CSU reviews, we will continue to gather and analyze data and processes to improve the reviews to assess the quality within the CSU programs. \n \nPage 65 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 4: INTELLECTUAL AND DEVELOPMENTAL DISABILITIES Section 4a. Background \nPerson Centered reviews (PCR) and Quality Enhancement Provider Reviews (QEPR) are used to assess the extent to which individuals with intellectual and developmental disabilities are satisfied with their services and achieve outcomes important to them, and to monitor provider systems.7 Follow-up review activities provide technical assistance to help providers improve service delivery systems (Quality Technical Assistance Consultation (QTAC)). \nThe purpose of the QEPR is to monitor providers to ensure they meet requirements set forth by the Medicaid waiver and DBHDD, and to evaluate the effectiveness of their service delivery system. The purpose of the PCR is to assess the person's quality of life as well as the effectiveness of and the satisfaction individuals have with the service delivery system. The Georgia Collaborative quality assessors use interviews, observations and record reviews to compile a well-rounded picture of the individual's circle of supports, how involved the person is in the decisions and plans laid out for that person, and the quality of services provided. \nIndividuals from both the PCR and QEPR samples participate in the individual interview and Individual Support Plan Quality Assurance Checklist (ISP QA). Both processes also include a Provider Record Review (PRR) for each individual selected in the sample, a Staff Interview (SI) with a sample of direct support providers, and onsite observation of day or residential programs. During the PCR, a Support Coordinator Record Review (SCRR) and Interview (SCI) are completed for the support coordinator working with the individual. During the QEPR, each provider organization receives one administrative review to monitor compliance with \n \n7 Please refer to the following link to access a full description of the review processes and review tools: http://www.georgiacollaborative.com/providers/prv-IDD.html \n \nPage 66 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n required Qualifications and Training (Q\u0026T). The Q\u0026T includes a review of a sample of personnel records to determine if staff has the necessary qualifications, specific to services rendered, and if the training was received within required timeframes. \nIn this section of the report, results for both the QEPR and PCR are presented by each of these review tools. Results are also presented by Focused Outcome Areas (FOAs). Indicators within each tool are grouped into six FOAs, areas of the person's life that are important to achieve and maintain: \n Person Centered Practices--supports and services are being provided based on the person's preferences and direction \n Choice--individuals have information they need to make informed choices on life decisions, such as where to live, where to work, and which providers to use \n Community Life--individuals are actively participating in their communities as desired  Rights--rights are upheld for individuals and they are provided information and \neducation to ensure they understand and know how to exercise their rights  Whole Health--individuals are healthy, aware of their health-related needs, and direct \ntheir own health care regimen  Safety--individuals are safe in their home and work environments and in their \ncommunities; they understand or are learning how to self-preserve in all environments \n \nPage 67 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 4b: Sample Selection \nPerson Centered Review (PCR) \nDuring the 2016 fiscal year, 484 individuals were selected for a PCR. A stratified random sample of individuals was selected by region across Georgia so the number of PCRs per region was proportionate to the number of individuals receiving services within that region. The PCR sample was selected from individuals who had not received a PCR during the previous two years, were actively receiving eligible waiver or state funded services, and were 18 years of age or older. Table 19 shows the number of PCRs completed by Region. \n \nTable 19. PCR Sample by Region \n \nRegion \n \nN \n \n1 \n \n93 \n \n2 \n \n87 \n \n3 \n \n122 \n \n4 \n \n55 \n \n5 \n \n58 \n \n6 \n \n68 \n \n% \n19.2% 18.0% 25.2% 11.4% 12.0% 14.0% \n \nTotal \n \n484 \n \n100% \n \nQuality Enhancement Provider Review (QEPR) \nA stratified random sample of 98 providers was selected from the list of providers rendering services to the 484 individuals selected for a PCR. Based on the number of eligible individuals in their case load, providers were stratified into three categories by size: \"Large,\" \"Medium,\" and \"Small.\" In addition, DBHDD selected one crisis service provider and one support \n \nPage 68 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n coordinator agency to be included in the sample. Table 20 displays the number of providers selected for a QEPR by size. \n \nTable 20. QEPR Provider Sample by Size \n \nProvider Size \n \nN \n \nSmall (caseload  30) \n \n47 \n \nMedium (30 \u003c caseload \u003c 100) \n \n36 \n \nLarge (caseload  100) \n \n17 \n \nTotal \n \n100 \n \nSection 4c: Review Processes \nThe focus of the PCR is on the individual's quality of life and quality of the services received. The focus of the QEPR is on the provider's overall practices, quality of services offered to all individuals served and level of compliance with state requirements. Both the PCR and the QEPR use the individual observation staff assessment (IOSA). This tool includes an interview with individuals, interviews with their staff (one per service), and onsite observation(s) at residential and day programs, as applicable. In addition to the IOSA, the PCR and QEPR include an evaluation of the individual's service plan (ISP QA), a review of the provider's records (PRR), as well as the compliance with service delivery requirements using the Developmental Disability Service Specific (DDSS) review. The number of PRR and DDSS reviews depends on the number of services received by the person. \nWhile the PCR and QEPR share most of the same tools, there are a few exceptions. The PCR also includes an interview with the individual's support coordinator and a review of the record in the Consumer Information System (CIS) maintained by the support coordinator for the \n \nPage 69 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n person. The QEPR has an additional review tool, qualifications and training, which is used to review a sample of records from all staff ensuring they have received proper training and are current on all state requirements (e.g., background screenings, level of education). The total number of records or interviews completed for the PCR and the QEPR is listed in Table 21, for each review tool. \n \nTable 21. Number of Records by Review Tool and Review Type \n \nReview Tool \n \nPCR \n \nQEPR \n \n(N = 484) (N = 100) Total \n \nIOSA  Individual interview \n \n484 \n \n667 \n \n1,150 \n \nIOSA  Observation \n \n578 \n \n409 \n \n987 \n \nIOSA - Staff interview \n \n833 \n \n353 \n \n1,186 \n \nISPQA Checklist \n \n484 \n \n656 \n \n1,140 \n \nSupport Coordinator Interview (SCI) \n \n484 \n \n8 \n \n492 \n \nSupport Coordinator Record Review (SCRR) \n \n484 \n \n20 \n \n504 \n \nProvider Record Review (PRR) \n \n558 \n \n1,377 \n \n1,935 \n \nStaff Qualifications and Training (Q\u0026T) \n \nN/A \n \n1,041 \n \n1,041 \n \nDDSS - Behavioral Supports Consultation \n \n2 \n \n9 \n \n11 \n \nDDSS - Community Access (Group) \n \n243 \n \n489 \n \n732 \n \nDDSS - Community Access (Individual) \n \n65 \n \n209 \n \n274 \n \nDDSS - Community Living Support \n \n51 \n \n183 \n \n234 \n \nDDSS - Community Residential Alternative \n \n91 \n \n267 \n \n358 \n \nDDSS  Crisis \n \n0 \n \n8 \n \n8 \n \nDDSS  Prevocational \n \n74 \n \n117 \n \n191 \n \nDDSS  Respite \n \n1 \n \n7 \n \n8 \n \nDDSS - Support Coordination \n \n467 \n \n20 \n \n487 \n \nDDSS - Supported Employment \n \n58 \n \n80 \n \n138 \n \nDDSS  Transportation \n \n2 \n \n8 \n \n10 \n \nPage 70 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n When a PCR is completed, a report is provided that identifies the strengths of the person's team, providing supports and services as well as recommendations for the person and the support team, including the support coordinator, provider(s), and family. A provider who participates in a QEPR receives a comprehensive report which identifies strengths of the service delivery systems and recommendations for improvement, with a number of performance scores. These scores include the overall score, qualifications and training score, and DDSS score. \nThe QTAC is an additional review process conducted 90 days after completion of the QEPR. Using findings from the QEPR, technical assistance is provided to support providers and to offer suggestions and guidance to help improve their service delivery systems. This process utilizes a consultative approach with providers to address specific issues and concerns related to an individual receiving services or systems and practices that need improvement. \nProviders may also receive a QTAC through a referral based upon other monitoring systems, such as support coordination monitoring, critical incidents or complaints. The QTAC supplements the PCR and QEPR processes by affording DBHDD and contracted providers the opportunity to solicit technical assistance for specific needs within the service delivery milieu. \n \nSection 4d. Overall Review Scores \nData are aggregated into overall provider scores, scores by tool, and scores by focused outcome areas (FOAs). The overall score is the combined score for the IOSA (Individual Interview, Staff Interview and Observation) and the PRR. Each indicator in the overall score is \n \nPage 71 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n categorized into one of six FOAs: Person Centered practices (PCP), Community Life (CL), Choice, Safety, Rights, and Whole Health. The FOAs are constructed to measure the quality of services, as well as individual outcomes related to these six central aspects of a healthy and well-rounded life. \nThe graphic distributions presented in this report show the number of providers who scored within the specified range of scores as shown on the horizontal/x-axis. With each distribution, we provide the median, mean, and standard deviation (SD). The median represents the score that falls at the middle of a distribution (50th percentile). The mean represents the average score. The standard deviation refers to the amount of variation or dispersion there is in a distribution of scores, or how much scores tend to spread out from the mean. A small standard deviation (relative to the mean score) indicates the majority of scores tend be very close to the mean. In this case, scores may cluster around the mean (or average) score, with only a few scores farther away from the mean (outliers). By contrast, a large standard deviation (relative to the mean score) indicates that scores are more widely spread-out from the average score (mean).8 \n \n8 Standard deviations are sometimes used to determine significant differences between scores within a distribution; however, this application requires a distribution to be normally distributed (similar to a bell curve). Most of the distributions presented in this report do not meet the requirements of a normal distribution, a normal bell curve. Therefore, standard deviations may be used to determine the variation of scores around a mean but should not be used to determine significant differences between scores, or between the score and the mean. \n \nPage 72 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Person Centered Review (PCR) \nFigure 33 shows the overall score for the 484 PCRs as well as the score for each of the tools completed during the PCR process. The overall score for the PCR was 93.8 percent. Like the QEPR overall score, the PCR score uses results from four tools: the Individual Interview, Staff Interview, Observation, and PRR. The Observation score was highest at just over 98 percent, followed by the Staff (96.4%) and Individual (95.1%) Interviews. The PRR was the lowestscoring area, averaging 79 percent. The Support Coordinator Interview showed a higher average score than the Support Coordinator Record Review, 90.8 percent and 79.9 percent respectively. The distribution in Figure 34 shows that approximately 79 percent of PCR scores (N=382) were 90 percent or greater. \n \nPage 73 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 33. PCR Scores, Overall and by Tool (N = 484) \nIndividual Interview \n95.1% \n \nObservation \n98.4% \n \nOverall Score 93.8% \n \nStaff Interview \n96.4% \n \nSupport Coordinator \nInterview \n90.8% \n \nPRR 79.0% \n \nSupport Coordinator \nRecord Review \n79.9% \n \nFigure 34. PCR Overall Scores \n \n(N = 484) \n \n354 \n \n88 \n \n10 \n \n32 \n \nPage 74 \n \nMedian 95.2% \n \nMean 93.8% \n \nStandard Deviation 5.2% \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Quality Enhancement Provider Review (QEPR) \nFigure 35 shows the average overall score for the 100 providers who participated in a QEPR, and scores for each tool used in the review process. The overall score was 88.4 percent. The Observation score was highest at just over 98 percent, followed by the Staff (96.9%) and Individual (95.1%) Interviews. The lowest-scoring area was the PRR (73.6%). In addition to the overall score, providers also received a score for Staff Qualifications and Training (82.4%) and the DDSS review (99.9%). \nFigure 35. QEPR Scores, Overall and by Tool (N = 100) \nIndividual Interview \n95.1 % \n \nObservation \n98.2% \nQualifications \u0026 \nTraining 82.4% \n \nOverall Score 88.4% \nPRR 73.6% \n \nStaff Interview \n96.9% \nDD Service Specific 99.9% \n \nPage 75 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 36 shows the distribution of the 100 QEPRs/providers across scores. The mean overall score was 88.4 percent with a standard deviation of 6.1 percent, with 49 providers showing a score of 90 percent or higher. \n \nFigure 36. QEPR Overall Scores \n \n(N = 100) \n \n45 49 \n \nMedian 89.7% \n \n15 \n \nMean 88.4% \n \nStandard Deviation 6.1% \n \nPage 76 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 4e. Person Centered Review \nPCR Scores by Focus Outcome Area (FOA) \nThe overall score for each review is divided into six FOAs. Each FOA score is calculated with the combined results from the Individual Interview, Staff Interview, Observation (IOSA), and PRR.9 Results by FOA are shown in Figure 37. \n \nFigure 37. PCR FOA and Overall Score (N = 484) \n \nRights 96.6% \nChoice 93.3% \n \nWhole Health 93.7% \nOverall Score 93.8% \nCommunity Life \n83.0% \n \nSafety 97.1% \nPersonCentered Practices \n90.6% \n \n9 The Support Coordinator Record Review and Interview are also categorized into the six FOAs. However, we exclude these in the PCR FOA Score calculations to more closely resemble the QEPR calculations. Those results are shown separately. \n \nPage 77 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 22 displays the median and mean scores for each FOA, as well as the standard deviation for the PCRs. Mean scores for each FOA ranged from a low of 86.5 percent for Community Life to a high of 98.1 percent for Safety. The community life data suggest that individuals' needs and expectations related to community integration and the development of valued social roles for the individuals may need to be addressed. \n \nTable 22. PCR Scores by FOA \n \nTool Name \nPerson Centered Practices Community Life Choice Rights Whole Health Safety \n \nMedian \n93.4% 86.5% 96.3% 98.1% 95.4% 98.1% \n \nMean \n90.6% 83.0% 93.3% 96.6% 93.7% 97.1% \n \nStandard Deviation \n9.0% 16.1% 7.5% 3.8% 7.2% 3.4% \n \nPCR Scores by Tool \nEvery PCR uses multiple interviews, observations, and record reviews. Table 23 displays, for each tool of the PCR, the number of interviews or record reviews completed, as well as the median and mean scores and standard deviations. Provider documentation (PRR) and the interview with the support coordinator showed the lowest mean scores, 79.0 percent and 79.9 percent respectively. Distributions of the number of PCRs by score for each of these tools are displayed in Figures 38-43.10 \n \n10 Of the 484 PCRs completed there were 484 individual interviews, and 412 individuals eligible for one or more observation for a total of 578 observations, 833 staff interviewed, etc. Only individuals receiving residential or day services receive an observation. The N sizes displayed in Table 23 are the number of \"tools\" completed for \n \nPage 78 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 23. PCR Scores by Tool \n \nTool Name \n \nN Median Mean \n \nIndividual Interview \n \n484 \n \n97.1% \n \n95.1% \n \nObservation \n \n578 \n \n99.5% \n \n98.4% \n \nStaff Interview \n \n833 \n \n97.7% \n \n96.4% \n \nPRR \n \n558 \n \n79.1% \n \n79.0% \n \nSupport Coordinator Record Review 484 \n \n98.4% \n \n90.8% \n \nSupport Coordinator Interview \n \n477 \n \n84.1% \n \n79.9% \n \nStandard Deviation \n5.5% 2.9% 4.3% 14.4% 22.2% 19.2% \n \neach component of the review and may differ from the number of PCRs shown in Figures 38-43, which show the distribution of the number of PCRs by score and by tool. \n \nPage 79 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Page 80 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n PCR Scores by Tool and Focused Outcome Area (FOA) \nAs described previously, the indicators used in each review tool are divided into six FOAs. In this section, results by FOA for the PCR are presented for each tool used in calculating the overall score for the PCR process--Individual Interview, Observation, Staff Interview and PRR (see Figure 44). The SCRR and SCI are also divided into FOAs. The results for these tools are in Figure 45. \nIndividual Interview As part of the PCRs completed this year, 484 individuals were interviewed. Scores for the PCR Individual Interview ranged from 83.6 percent for Community Life to 98.7 percent for Rights, with a mean of 95.1 percent. Individuals were least likely to be participating in their communities or provided opportunities to develop meaningful social roles. \n \nPage 81 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Observations Observations were completed at 412 locations (residential or day services), with an average score of 98.2 percent. Nearly 90 percent of the Observations showed scores of above 95 percent. There was very little variation across the different Focused Outcome Areas. \nStaff Interviews PCRs included 833 staff interviews, with an average score of 96.4 percent. Ninety percent of the staff scored above 90 percent. Consistent with previous findings, the Staff Interviews reflect high scores for all FOAs, with Community Life the only area scoring under 95 percent (86.6%). \nProvider Record Reviews The PCRs included 558 provider record reviews. Results by FOA ranged from a low of 71.6 percent for Choice to a high of 84.3 percent for Rights, with all PRR FOAs showing lower results than for the Interviews or Observations. The lowest scores were in the areas of Person Centered practices, Choice, and Community Life. The lower score for choice suggests that providers may not be consistently documenting efforts of providing individuals with information to make informed choices regarding supports and services, community participation, and daily activities. \n \nPage 82 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 44. PCR Scores by Tool and Focused Outcome Area \n \nPerson Centered Practices \nCommunity Life \n \nChoice \n \nRights \n \nWhole Health \n \nSafety \n \nPCR Sample \n \n0% \n \n25% \n \nIndividual Interview (N = 483) Staff Interview (N = 833) \n \n77.7% \n \n92.3% 95.2% 96.1% \n \n83.6% 95.1% \n86.6% 73.7% \n \n71.6% \n \n98.0% 96.9% 96.3% \n \n84.3% \n \n98.7% 98.9% 98.3% \n \n80.2% \n \n93.8% 98.5% \n95.5% \n \n83.8% \n \n96.6% 99.8% 98.3% \n \n79.0% \n \n95.1% 98.4% \n96.4% \n \n50% \n \n75% \n \n100% \n \nObservations (N = 578) Provider Record Review (N = 558) \n \nPage 83 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Support Coordinator Record Review and Interview For the 484 PCRs completed this year, 484 SCRRs and 477 SCIs were completed with a mean score of 79.9 percent and 90.8 percent respectively. Results by FOA are presented in Figure 45. The lowest-scoring areas for the SCRR were in Community Life (67.3%), Rights (73.4%), and Choice (74.1%). The lowest-scoring SCI areas were in Community Life (82.3%) and Rights (86.3%). With the exception of Safety, documentation appears to be consistently lower in providing evidence of supporting individuals, than interviews with the support coordinator might otherwise indicate. \n \n100% 90% 80% \n \nFigure 45. Support Coordinator Record Review and Interview Scores by Focused Outcome Areas \n \n90.8% \n \n91.1% \n \n79.9% \n \n81.1% \n \n92.3% 82.3% \n \n94.1% \n \n92.7% \n \n94.6% 92.6% \n \n86.3% \n \n74.1% \n \n73.4% \n \n70% \n \n67.3% \n \n60% \n \n50% \n \nPCR Sample Person Community \n \nCentered \n \nLife \n \nPractices \n \nChoice \n \nRights \n \nWhole Health \n \nRecord Review (N = 484) Interview (N = 477) \n \nSafety \n \nPage 84 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 4f: Quality Enhancement Provider Review (QEPR) \nQEPR Scores by Provider Size \nFigure 46 displays the distribution of the QEPR Overall Scores and the Qualification and Training (Q\u0026T) scores by size of the organization.11 Scores were generally lower on the Q\u0026T component of the QEPR than for the overall scores but there is little variation across the size categories within any of the components of the QEPR. \nLarge providers scored below the average for all QEPRs on the Overall Score (86.1% compared to 88.4%) and on Q\u0026T (79.8% compared to 82.4%). Small and medium providers scored at or above the average. Small providers had the highest average Overall Score (90.4%), and medium providers had the highest average score on Q\u0026T (82.2%). All providers, regardless of size, scored above 99 percent on the DDSS tool (not shown in the figure). \n \n11 See Table 20 on page 69 for details regarding provider size categories. \n \nPage 85 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 46. QEPR Scores by Provider Size \n \nOverall Score \n \n88.4% 90.4% \n89.0% 86.1% \n \nQualifications \u0026 Training 50% \n \n60% \n \n80.9% 80.0% \n82.2% 79.8% \n \n70% \n \n80% \n \n90% \n \n100% \n \nQEPR Sample (N = 100) \n \nSmall (N = 47) \n \nMedium (N = 36) \n \nLarge (N = 17) \n \nThe distributions of QEPRs/providers across scores, both the Overall Score and for Q\u0026T, are shown in Figures 47-52. The standard deviations for providers of all sizes are greater for Q\u0026T than for the overall scores, indicating a broader dispersion across responses. \n \nPage 86 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 47. Overall Score: \n \nSmall Providers \n \n(N = 47) \n \n29 \n \n16 \n \nMedian 91.5% \n \n11 \n \nMean 90.4% \n \nStandard Deviation \n6.3% \n \nFigure 48. Overall Score: \nMedium Providers \n(N = 36) \n16 17 \n \n3 \n \nMedian 89.6% \n \nMean 89.0% \n \nStandard Deviation \n6.3% \n \nFigure 49. Overall Score: Large Providers (N = 17) 13 \n \nPage 87 \n \n3 1 \n \nMedian 86.9% \n \nMean 86.1% \n \nStandard Deviation \n4.0% \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 50. Qualifications and Training \n \nScore: Small Providers \n \n(N = 47) \n \n15 \n \n12 \n \n6 5 \n4 3 \n \n1 \n \n1 \n \nMedian 87.2% \n \nMean 80.0% \n \nStandard Deviation \n16.1% \n \nFigure 51. Qualifications and Training Score: Medium Providers (N = 36) \n \n8 \n \n8 \n \n7 \n \n5 \n \n5 \n \n3 \n \nMedian 86.0% \n \nMean 82.2% \n \nStandard Deviation \n14.2% \n \nPage 88 \n \nFigure 52.Qualifications and Training \n \nScore: Large Providers \n \n(N = 17) \n \n6 \n \n5 \n \n3 2 \n1 \n \nMedian 81.4% \n \nMean 79.8% \n \nStandard Deviation \n8.7% \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n QEPR Scores by Focused Outcome Area (FOA) \nThe Overall Score for each review is divided into six FOAs. Each FOA score is calculated with the combined results from the Individual Interview, Staff Interview, Observation (IOSA), and PRR. Results for the 100 QEPRs are shown by FOA in Figure 53. \n \nFigure 53. QEPR by FOA and Overall Score (N = 100) \n \nRights 92.4% \nChoice 86.2% \n \nWhole Health 89.1% \nOverall Score 88.3% \nCommunity Life \n78.0% \n \nSafety 93.9% \nPersonCentered Practices \n82.6% \n \nTable 24 displays the median and mean scores for each FOA as well as the standard deviation. Mean scores ranged from a low of 78.0 percent for Community Life to a high of 93.9 percent for Safety. Community Life data suggest providers may not always meet the needs and \n \nPage 89 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n expectations related to community integration and the development of valued social roles for the individuals being served. Community Life also shows the greatest degree of variation from the mean, with a standard deviation of 14.6 percent. \n \nTable 24. QEPR Scores by FOA \n \nTool Name \nPerson-Centered Practices Community Life Choice Rights Whole Health Safety \n \nMedian \n85.9% 80.3% 88.7% 94.2% 91.4% 95.0% \n \nMean \n82.6% 78.0% 86.2% 92.4% 89.1% 93.9% \n \nStandard Deviation \n9.7% 14.6% 9.3% 5.9% 8.7% 4.3% \n \nPage 90 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n QEPR Review Scores by Tool \nEvery QEPR contains multiple interviews, observations, and record reviews. Table 25 displays the number of interviews or record reviews completed, as well as the median and mean scores and the standard deviation. On average, the mean PRR score (73.6%) based on documentation review, was lower than mean scores reflected for interviews or observations, each over 95 percent. The distributions of providers by score for each of these tools are displayed in Figures 54-59. \n \nTable 25. QEPR Scores by Tool \n \nTool Name \nIndividual Interview Observation Staff Interview PRR Qualifications and Training DDSS \n \nN12 \n667 409 353 1,377 1,044 1,397 \n \nMedian \n96.2% 98.6% 97.9% 77.3% 84.5% 100.0% \n \nMean \n95.1% 98.2% 96.9% 73.6% 80.9% 99.9% \n \nStandard Deviation \n4.7% 3.4% 3.6% 13.2% 14.4% 0.4% \n \n12 Each QEPR consists of multiple interviews, observations, and record reviews. Also, if a provider does not offer residential or day services, no observations are completed for that QEPR. Therefore, in our sample, of the 100 QEPRs, 92 providers had one or more observations completed, for a total of 409 observations. The N sizes displayed in Table 25 may differ from the number of reviews shown in Figures 53-58, which are based on the average scores per QEPR to show the distribution of providers across scores. \n \nPage 91 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Distribution of the Number of QEPRS by Score and by Tool \n \nFigure 54. QEPR Individual Interview Scores (N = 100) 86 \n \nFigure 55. QEPR Observation Scores (N = 92) 72 \n \n11 \n \n2 \n \n1 \n \nMedian 96.2% \n \nMean 95.1% \n \nStandard Deviation \n4.7% \n \nFigure 56. QEPR Staff Interview Scores (N = 99) 84 \n \n6 \n \n9 \n \nMedian 97.9% \n \nMean 96.9% \n \nStandard Deviation \n3.6% \n \n18 11 \n \nMedian 98.6% \n \nMean 98.2% \n \nStandard Deviation \n3.4% \n \nFigure 57. QEPR Provider Record Review Scores (N = 99) 34 \n26 \n18 11 \n1144 \n \nMedian 77.3% \n \nMean 73.6% \n \nStandard Deviation \n13.2% \n \nPage 92 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 58. QEPR Qualifications and Training Scores (N = 100) 25 26 \n20 \n \n11 9 7 \n \n1 \n \n1 \n \nFigure 59. QEPR DDSS Scores (N = 100) 91 \n9 \n \nMedian 84.5% \n \nMean 80.9% \n \nStandard Deviation \n14.4% \n \nMedian 100.0% \n \nMean 99.9% \n \nStandard Deviation \nN/A \n \nAdministrative Qualifications and Training Every QEPR includes a review of staff qualifications and training. A sample of 1,041 staff records was reviewed, ensuring that all services were included. The primary purpose of the record review is to confirm that relevant staff information is accurate and up to date (e.g., driver's license, performance evaluations), and that staff have received all required trainings. Figure 58 shows the distribution of QEPRs/providers across Q\u0026T scores. The mean score was 80.9 percent. This tool does not include results by FOA. \n \nPage 93 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Developmental Disability Service Specific (DDSS) Review The average score on the DDSS tool was close to perfect, with a mean score of 99.9 percent. Only nine of the 100 providers scored below 100 percent in this area (Figure 59). \nQEPR Scores by Tool and FOA \nAs described previously, the indicators used in each review tool are divided into six FOAs. In this section, results by FOA for the QEPR are presented for each tool used in calculating the Overall score for the QEPR process--Individual Interview, Observation, Staff Interview and PRR (see Figure 60). \nIndividual Interview As part of the 100 QEPRs completed this year, 667 individuals participated in the Individual Interview, with a mean score 95.1 percent. Each of the FOAs scored above 90 percent, with the exception of Community Life (84.1%). \nObservations Observations are completed for providers providing residential and day supports (92 of the 100 providers). The objective of the observation is to see how services are being provided and to identify strengths or areas needing improvement regarding quality, health, or safety. For the 92 providers offering at least one of the eligible services, 409 Observations were completed with a mean score of 98.2 percent. All but two QEPRs received scores higher than 90 percent. Data indicate there was minimal variation across FOAs (Figure 60), scores ranging from a low of 95.3 percent for Person-Centered Practices to a high of 99.4 percent for Safety. \n \nPage 94 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Staff Interview As part of the QEPR, 353 Staff Interviews were completed, with an average score of 96.9 percent. Similar to the Individual Interview, Staff Interview results were lowest for Community Life (87.9%), approximately 10 percentage points below the average for all QEPRs. Provider Record Review Every QEPR includes the PRR, a review of the provider's record for the individuals selected as part of the sample, ensuring that each service the provider offers is represented in the sample. Over the course of the year, 1,377 records were reviewed as part of the 100 QEPRs. The scores for each FOA range from a low of 64.2 percent for Choice to a high of 79.5 percent for Safety. Evidence of supporting individuals appears to be consistently lower in provider documentation than from interviews and observations. \n\"[I'm] doing things that I haven't done in 18 years; things I never thought I would be able to do again, like attending football games\". \n \nPage 95 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 60.QEPR Scores by Tool and Focused Outcome Areas \n \nPerson Centered Practices \nCommunity Life \n \nChoice \n \nRights \n \nWhole Health \n \nSafety \n \nQEPR Sample \n \n0% \n \n25% \n \nIndividual Interview (N = 667) Staff Interview (n = 353) \n \n71.4% \n \n91.9% 95.3% 96.7% \n \n70.2% \n \n84.1% 95.4% \n87.9% \n \n64.2% \n \n97.6% 97.1% 97.3% \n \n79.2% \n \n98.4% 97.9% 98.4% \n \n75.4% \n \n94.1% 98.7% \n96.3% \n \n79.5% \n \n96.7% 99.4% 98.4% \n \n73.6% \n \n95.1% 98.2% 96.9% \n \n50% \n \n75% \n \n100% \n \nObservations (N = 409) Provider Record Review (n = 1,377) \n \nPage 96 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Section 4g: ISP QA Checklist \nISP Written to Support \nThe ISP Quality Assurance (QA) checklist is used to provide an overall rating for each service plan, based upon the degree to which the ISP is written to provide a meaningful life for the individual receiving services. There are three different categories for each ISP. \nService Life: The ISP supports a life with basic paid services and paid supports. Needs that are \"important for\" the person are addressed, such as health and safety. However, there is not an organized effort to support a person in obtaining other expressed desires that are \"important to\" the person, such as getting a driver's license, having a home, or acting in a play. The individual is not connected to the community and has not developed social roles, but expresses a desire to do so. \nGood but Paid Life: The ISP supports a life with connections to various supports and services (paid and non-paid). Expressed goals that are \"important to\" the person are present, indicating the person is obtaining goals and desires beyond basic health and safety needs. The person may go out into the community but with only limited integration into community activities. For example, the person may go to church or participate in Special Olympics; however, real community connections are lacking, and the person indicates that he/she wants to achieve more. \nCommunity Life: The ISP supports a life with the desired level of integration in the community and in various settings preferred by the person. The person has friends and support beyond providers and family members. The person has developed social roles that are meaningful to him/her, such as belonging to a Red Hat club or a book club, or having employment in a competitive, rather than segregated, environment. Rather than just going to church, the \n \nPage 97 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n person may be an usher at the church or sing in the choir. Relationships developed in the community are reciprocal. The ISP is written with goals that help support the person in moving toward a community life, as he/she chooses. \nResults in Figure 61 indicate that of a majority of ISPs reviewed, 78.6 percent, were written to support a \"Good but Paid Life.\" The smallest percent, 9.2 percent, supported a service life, while 12.2 percent supported a community life. The ISP results support our findings within each of the tools: although most individuals' needs regarding health and safety are being addressed, there is opportunity for improvement to achieve integration in the community or in various settings. \n \nFigure 61. ISP QA Life question (N = 1,137) \n \n100% 75% \n \n78.6% \n \n50% \n \n25% \n9.2% \n \n12.2% \n \n0% \n \nService Life \n \nGood But Paid Life Community Life \n \nPage 98 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n ISP Expectations \nQuality assessors reviewed 12 different sections in the ISP. Each section represents an expectation, listed in Table 26, and has four indicators that must be addressed in the plan. The expectation is rated on a scale from zero to four, zero meaning the section is blank or inadequately addresses the requirements for any of the indicators and four meaning that 100 percent of the indicators or requirements in the section are adequately addressed in the ISP. \nData in Table 26 show the percent of ISPs at each rating on the 12 different expectations. For example, results show that 44.5 percent of ISPs reviewed had all four indicators present (addressed) for the expectation regarding the communication chart expectation. Data indicate 54.0 percent of all ISPs reviewed addressed all elements for all of the sections within the ISP. The expectations measuring whether the person's hopes and dreams and goals are person centered and whether goals are person centered were most likely to have zero or one indicator addressed, 17.6 percent and 20.1 percent respectively; however, most ISPs addressed all four requirements for rights, psychotropic medications, behavior supports and the health and safety review section, 93.1 percent and 92.3 percent respectively. \n \nPage 99 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 26. ISP QA Checklist Ratings by Expectation \n \nN = 1,139 \n \nRatings \n \nISP QA Checklist Description \n \n0 1 2 3 \n \nRelationship Map \u0026 discussion on ways to develop relationships. \n \n0.3% 4.6% 18.6% 34.7% \n \nCommunication chart Person-centered important to/for Hopes and Dreams: if you could be, do or include anything in your life what would it be? \n \n0.6% 0.8% 9.2% 44.9% 0.3% 0.1% 4.2% 24.6% \n9.9% 7.7% 13.0% 22.7% \n \nService summary Rights, psychotropic medications, behavior supports section Meeting minutes \n \n0.9% 4.7% 12.2% 22.2% \n0.1% 0.0% 0.8% 6.0% 1.6% 6.9% 15.6% 23.0% \n \nSIS completed and support needs are addressed in the ISP \n \n0.1% 0.1% 3.0% 42.7% \n \nHealth and safety review section completed accurately and thoroughly \n \n0.3% 0.0% 0.2% 7.25 \n \nGoals are Person Centered Training goal action plan \n \n5.7% 14.4% 20.5% 25.9% 1.1% 6.0% 7.2% 58.7% \n \nAction plans/objectives \n \n0.1% 5.8% 21.3% 42.0% \n \nTotal \n \n1.7% 4.3% 10.5% 29.5% \n \n4 \n41.9% 44.5% 70.8% \n46.6% 60.0% \n93.1% 52.9% \n54.1% \n92.3% 33.5% 26.9% 30.7% 54.0% \n \nSection 4h: Quality and Technical Assistance Consultation (QTAC) \nThere were 238 QTACs completed, of which 99 were completed at the provider level and 139 at the individual level. Other than QEPR follow-ups, the most common referral reason at the provider level was the \"request for two-day Person Centered thinking training\" (N = 10). At the individual level, the most common referral reasons were: \"medication error(s)\" (n = 53), \"health concern for the person(s)\" (n = 21), and \"goals not being tracked\" (n = 21). \n \nPage 100 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 27. QTAC Referral Reasons \n \nProvider Level: \n \nN \n \nQEPR follow-up \n \n72 \n \nRequest for two-day Person Centered thinking training \n \n10 \n \nReview of person centered documentation \n \n7 \n \nOverall score on QEPR \n \n4 \n \nOther \n \n4 \n \nPerson centered training needed \n \n2 \n \nRed Alert follow up \n \n2 \n \nIndividual Level: \n \nMedication error(s) \n \n53 \n \nGoals not being tracked \n \n21 \n \nHealth concern for the person(s) \n \n21 \n \nLack of bowel movement tracking \n \n20 \n \nLack of financial tracking \n \n20 \n \nLack of HRST tracking \n \n18 \n \nSafety concern for the person(s) \n \n14 \n \nLack of community exposure \n \n8 \n \nLack of AIMS testing \n \n7 \n \nRed alert follow up \n \n7 \n \nDoctor's orders not being followed \n \n6 \n \nEnvironmental concern(s) \n \n5 \n \nHealth concern for the person(s) \n \n5 \n \nLack of behavior support plan \n \n5 \n \nLack of seizure tracking \n \n5 \n \nRights concern for the person(s) \n \n5 \n \nLack of blood pressure tracking \n \n4 \n \nLack of bowel movement protocol \n \n4 \n \nLack of fire drills or other safety drills \n \n4 \n \nFollow up to previous QTAC \n \n3 \n \nLack of blood sugar tracking \n \n3 \n \nSafety practice concern(s) for the provider \n \n3 \n \nAssistance with tracking practices \n \n2 \n \nBehavior support plan not followed or supported by staff \n \n2 \n \nPage 101 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Human rights practice concern(s) for the provider \n \n2 \n \nLack of behavior support plan tracking \n \n2 \n \nLack of blood pressure protocol \n \n2 \n \nLack of medical follow up after a hospital visit \n \n2 \n \nSeizure protocol not followed or supported by staff \n \n2 \n \nLack of assessment(s) \n \n1 \n \nLack of blood sugar protocol \n \n1 \n \nLack of person centered documentation \n \n1 \n \nPerson centered training needed \n \n1 \n \nRequest for two-day Person Centered thinking training \n \n1 \n \nReview of person centered documentation \n \n1 \n \nTable 28 provides information about the QTAC region, the focused outcome area, referral source and referral type, and technical assistance provided. \n \nTable 28. QTACs by Region and Referral Source/Type \n \nQTAC Type \n \nProvider Level \n \nIndividual Level \n \nN \n \n% \n \nN \n \n% \n \nRegion: \n \n1 \n \n12 \n \n12.1 \n \n9 \n \n6.5 \n \n2 \n \n11 \n \n11.1 \n \n29 \n \n20.9 \n \n3 \n \n41 \n \n41.4 \n \n65 \n \n46.8 \n \n4 \n \n6 \n \n6.1 \n \n4 \n \n2.9 \n \n5 \n \n16 \n \n16.2 \n \n13 \n \n9.4 \n \n6 \n \n13 \n \n13.1 \n \n19 \n \n13.7 \n \nTotal \n \n99 \n \n100.0 \n \n139 \n \n100.0 \n \nReferral Source: \n \nGeorgia Collaborative \n \nASO \n \n82 \n \n82.8 \n \n10 \n \n7.2 \n \nProvider \n \n14 \n \n14.1 \n \n0 \n \n0.0 \n \nDBHDD Regional Staff \n \n3 \n \n3.0 \n \n129 \n \n93.5 \n \nPage 102 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 28. QTACs by Region and Referral Source/Type \n \nQTAC Type \n \nProvider Level \n \nIndividual Level \n \nN \n \n% \n \nN \n \n% \n \nTotal \n \n99 \n \n100.0 \n \n139 \n \n100.0 \n \nReferral Type: \n \nQEPR Score \n \n78 \n \n78.8 \n \n0 \n \n- \n \nProvider Request \n \n19 \n \n19.2 \n \n0 \n \n- \n \nRed Alert (PCR/QEPR) \n \n3 \n \n3.0 \n \n8 \n \n5.7 \n \nSupport Coordinator \n \nMonitoring \n \n0 \n \n- \n \n128 \n \n92.0 \n \nComplaint/Grievances \n \n0 \n \n- \n \n1 \n \n.07 \n \nTotal13 \n \n100 \n \n137 \n \nTechnical assistance is provided at every QTAC. The percent of each type of technical assistance provided is shown in Figure 62 for QTACs completed at the provider versus the individual level. The most common type of technical assistance offered for provider level QTACs was role play (56.6%) and the most common type offered at the individual level was individual discussion (77.9%). \n \n13 Totals do not sum to 100% because some QTACS had multiple referral types while some did not have a referral type recorded. \n \nPage 103 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 62. Technical Assistance Provided for QTAC \n100% \n \n77.9% 75% \n \n56.6% \n \n49.5% \n \n50% \n \n39.7% \n \n42.4% \n \n40.4% 36.0% \n \n25% \n \n19.9% \n \n23.2% \n \n14.0% \n \n4.0% \n \n2.2% \n \n7.1% \n \n0% \n \n1:1Training Brainstorming Group \n \nIndividual \n \nGroup \n \nRole Play \n \nOther \n \nTraining Discussion Discussion \n \nProvider Individual \n \nSection 4i: IDD Summary of Findings \nBeginning in fiscal year 2016, the Georgia Collaborative implemented revised review tools to evaluate the quality of intellectual and developmental disabilities services and supports--the QEPR and PCR. The tools are based upon the six FOAs identified throughout this report. Expectations, measured with a various number of indicators, are based on how services and systems should be delivered. Indicators address a wide range of requirements and best practices within each FOA. \nDuring the year, 100 QEPRs and 484 PCRs were completed. Overall FOA scores from both review types indicate three areas in which the service delivery system appears to do well: \n \nPage 104 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n safety, rights and whole health. These areas are the foundation of supports provided to people with intellectual and developmental disabilities, and help protect them from potential risk (Table 29). Providers seem to have effective systems and practices in place to support individuals being safe in their environments, exercising rights, and maintaining their health. \nOn average, QEPR results are lower than PCR results on each FOA. QEPR results for Choice, Person Centered Practices, and Community Life were lower than shown for Safety, Rights, or Health. PCR results were also lowest for Community Life (Table 29).14 These areas seem to be more challenging for providers to meet the expectations of offering informed choice, providing person centered practices, and successfully integrating people into communities as desired. \n \nTable 29. Summary FOA Scores \n \nFOA \n \nQEPR \n \nPerson Centered Practices Choice \nCommunity Life Safety Rights Health \n \n82.6% 86.2% 78.0% 93.9% 92.4% 89.1% \n \nPCR \n90.6% 93.3% 83.0% 97.1% 96.6% 93.7% \n \nIn general, findings from reviews are quite positive, as indicated in the overall results in Table 29; however, there are some specific areas within the FOAs that could benefit from additional attention or analysis to identify trends or areas where quality initiatives may be directed. \n \n14 The FOA scores include results from the Individual and Staff Interviews, Observations, and PRRs. \n \nPage 105 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Community Life \nAccording to recent Center for Medicaid and Medicare Services (CMS) standards, individuals with intellectual and developmental disabilities should be able to participate in their communities in the same manner as people who do not have a disability. Therefore, helping providers develop systems to address this goal is imperative. During both the PCR and QEPR, individuals indicated they are often not: \n Getting new experiences in the community  Supported to join clubs, social groups, or other community groups, such as the YMCA \nor neighborhood association  Learning about, identifying or developing new social roles \nThe lowest-scoring indicators for Community Life in the Observations for both review types indicate individuals are not provided opportunities to develop social roles. Approximately 30 percent of staff interviewed could not describe how the person is: \n Provided opportunities to develop social roles  Exposed to new community experiences  Provided opportunities to develop community employment \nIn addition, provider documentation lacks evidence to demonstrate how the person is supported to experience the community, develop social roles and natural supports, engage in the community, or have opportunities to seek employment. \n \nPage 106 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Choice \nInformed choice is the cornerstone of helping individuals understand and achieve meaningful goals and direct their own supports and services. Without informed choice, person centered practices are not possible. Data indicate the following: \n The person (from interview) is not always involved in life's decisions  Staff (from interview) is not always providing options of competitive \nemployment/supported employment  Individual Interview results indicate the person's preferences related to goals of \nemployment are not being actively pursued (not a readiness model)  Providers are often not documenting how informed choice is provided to individuals, \nparticularly regarding competitive employment and community participation/social interaction \n \nPerson Centered Practices \nAnother key area of focus for CMS is to ensure that services and supports are provided using person centered practices and planning. To the extent possible, the person should be at the center of all decisions, plans, and goals. This means that providers need to understand who the person is and what he or she may want, dream and hope for. Data indicate the following: \n Individual Interview results suggest the person often is unable to identify something new or experienced, or something learned \n Provider documentation often does not reflect the person's dreams and hopes, talents or strengths \n Providers do not always adequately describe the person's progress toward goals or the person's response to services or treatment \n \nPage 107 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n  Approximately 25 percent of staff interviewed were not formally reviewing, with the person, the person's goals and objectives \n \nProvider Documentation \nProvider documentation through the PRR is consistently lower when results are compared across the review tools and with the FOAs. While individuals and staff may indicate services are provided to meet a particular need or goal, documentation does not always reflect this. \n \nSection 4j: Recommendations \nBased upon a review of data, we provide the following recommendations:  Pursue the development of training for direct support professionals and support coordinators to understand how to connect individuals to their communities in meaningful ways, i.e., based on their interests, talents, strengths, desires and what is important to and for them.  To enhance competitive employment, The Georgia Collaborative, in partnership with DBHDD and the Georgia Vocational Rehabilitation Services, could develop a training module that targets support coordination, to promote an understanding of employment options and resources available to people with intellectual and developmental disabilities. Training content may include how to present this information to people and their families, as well as success stories from people who are in competitive employment.  The Georgia Collaborative could coordinate with the quality councils to develop training focused on problematic documentation areas, to help providers improve their overall documentation efforts. \n \nPage 108 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 5: BEHAVIORAL HEALTH AND INTELLECTUAL DEVELOPMENTAL DISABILITIES COMPARISONS \nOverall Results by Focused Outcome Area--Behavioral Health and Intellectual and Developmental Disabilities \nThe Georgia Collaborative uses the focused outcome areas (FOAs), as described throughout this report, in both the BH and IDD review processes. While the concepts are the same, there are some differences in the data collection methods. Data for BH FOAs are taken only from record reviews; while for intellectual and developmental disability providers, indicators in the interviews, observations and provider/support coordinator record reviews are categorized into the six FOAs. Therefore, for comparison to the BH FOA data, we use IDD data from only the provider record reviews. Figure 63 demonstrates the overall scores for the focused outcome areas derived from the QEPR provider record reviews and BHQR record reviews. \nResults (Figure 63) indicate that behavioral health providers appear to perform somewhat better than intellectual and developmental disability providers in documenting how FOAs are addressed for individuals receiving services, with average scores of 85 percent and 74 percent respectively.15 Intellectual and developmental disability providers show lower scores on each FOA with the exception of health. The greatest differences are in Choice and Person Centered Practices, on which intellectual and developmental disability provider documentation is 28 and 21 percentage points lower, respectively. \n \n15 As shown in the report, IDD data from the IOSA show higher results for all of the FOAs than the PRR/documentation data indicate. \n \nPage 109 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Figure 63. Intellectual and Developmental Disabilities and Behavioral Health FOA Score Comparisons \n \nQEPR \n \nFOA Overall Score: 74% \n Person Centered : 71%  Community: 70%  Choice: 64%  Rights: 79%  Whole Health: 75%  Safety: 81% \n \nBHQR \n \nFOA Overall Score: 85% \n Person Centered: 92%  Community: 87%  Choice: 92%  Rights: 90%  Whole Health: 63%  Safety: 80% \n \nPage 110 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Record Review Results by Indicator/Question and FOA \nTable 30 displays a sample of questions used in the record reviews, from the QEPR and BHQR. These specific questions from each of the six FOAs are identical in both processes (BH and IDD) and are shown to demonstrate areas of congruence and areas for improvement between BH and IDD. Because review protocols between the two programs may differ, as well as the type of information reviewed and procedures for collecting the data, comparative analysis should proceed with caution. \n \nResults between the two programs are similar in terms of developing and reassessing plans \n \nbased on the person's need (Table 30). However, intellectual and developmental disability \n \nproviders appear to be less likely to document informed choice, how education is provided on \n \nthe risk and benefits of medication, how they ensure that rights forms are signed by \n \nindividuals, and how community involvement for individuals is achieved. Behavioral health \n \nproviders were less likely to document how they ensure that rights and responsibilities are \n \nreviewed annually with individuals receiving services. \n \nTable 30. Comparison Behavioral Health and IDD Record Review \n \nIndicators/Questions \n \nFOA \n \nRecord Review Questions (excerpt) \n \nIDD BH \n \nChoice \n \nInformed choice/options of supports and services \n \n75% \n \n94% \n \nPerson Centered \n \nPlan is developed and reassessed based on needs \n \n92% \n \n91% \n \nCommunity \n \nCommunity involvement is occurring according to the person's preferences \n \n34% \n \n93% \n \nWhole Health \n \nMedication education on risk/benefits Coordination/communication of services and supports \n \n43% 91% \n \n71% 61% \n \nSafety \n \nSafety/crisis plan development and monitoring \n \n89% \n \n85% \n \nPage 111 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 30. Comparison Behavioral Health and IDD Record Review \n \nIndicators/Questions \n \nFOA \n \nRecord Review Questions (excerpt) \n \nIDD BH \n \nRights \n \nRights/responsibilities reviewed annually with individual \n \n92% \n \n58% \n \nRights signed by individual or legal guardian \n \n67% \n \n94% \n \nIndividual and Staff Interviews  Intellectual and Developmental \nDisabilities and Behavioral Health Comparison \nThe Georgia Collaborative conducted individual and staff interviews for both intellectual and developmental disability and behavioral health providers during the onsite review processes. Intellectual and developmental disability provider interviews used in this comparison are from individuals and staff selected as part of a QEPR sample. The IDD interview sample is randomly selected and stratified by service to ensure each service rendered by the provider is represented. The BH interview samples are selected by the provider and assessors during the quality review. Both interviews focus on the quality of the organization's systems and practices. However, due to the differences in sample selection, comparisons across the programs should be made with caution. \n \nFindings indicate individuals receiving intellectual and developmental disability services are less likely to report they are involved in the routine review of progress toward goals or to develop new social roles (Table 31). Individuals receiving behavioral health services appear to be less likely to visit a dentist annually. Individuals appear equally likely to be satisfied with supports and services, feel they are treated with respect and participate in safety/crisis plan development. \n \nPage 112 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 31. Selected Questions Individual Interview: IDD and BH \n \nIDD \n \nBH \n \n(N = (N = \n \nFOA \n \nIndividual Interview Questions \n \n667) 737) \n \nChoice \n \nSatisfied with supports and services \n \n99% \n \n98% \n \nPerson Centered \n \nInvolved in the routine review of progress toward goals \n \n81% \n \n96% \n \nCommunity Opportunity to develop new social roles \n \n81% \n \n97% \n \nSeen a primary care physician in the last 12 months \n \n99% \n \n91% \n \nWhole Health Seen a dentist in the last 12 months  if they so choose \n \n88% \n \n71% \n \nSafety \n \nIndividual/person participated in the development of or review the safety/crisis plan \n \n88% \n \n88% \n \nRights \n \nFeels they are treated with respect and dignity by staff (including physicians) \n \n99.9% 99.5% \n \nStaff Interview results (Table 32) are shown to be similar for behavioral health and intellectual and developmental disability providers on the selected Choice, Whole Health, and Safety indicators. Staff is equally likely: to be aware of desired changes for supports or services; to be able to describe how health is addressed; and to be aware of cultural practices related to rights. Intellectual and developmental disability providers appear to be less likely to do a formal review of progress on goals with the person or to describe how the person is provided opportunities to develop social roles. However, sample methods differ and lacking statistical tests, these results may simply point to areas for further investigation. \n \nPage 113 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n Table 32. Selected Questions Staff Interview: IDD and BH \n \nIDD \n \nFOA \n \nStaff Interview Questions \n \n(N = \n \n351) \n \nChoice \n \nStaff is aware of what to do if the individual/person wants to change a support or service \n \n99.7% \n \nPerson Centered \n \nStaff formally reviews progress on goals/objectives with the individual/person \n \n77% \n \nCommunity \n \nStaff describes how the individual/person is provided opportunities to develop new social roles \n \n65% \n \nStaff is able to describe how the whole health of the Whole Health individual/person is addressed through coordination 99% \nof services \n \nSafety \n \nStaff is aware of the crisis/safety plan and can describe how it is reviewed with the individual/person, when appropriate \n \n100% \n \nRights \n \nStaff is aware of any cultural practices and beliefs related to rights and exercising those rights \n \n100% \n \nBH (N = 753) \n99.5% 98% 99% \n95% \n93% \n99.8% \n \nPage 114 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n SECTION 6: CONCLUSION \nInformation in this report reflects findings from the first year of the ASO quality reviews conducted for the new quality management system, created through extensive collaborative efforts between DBHDD and The Georgia Collaborative, to bring together various service delivery systems, databases, and quality oversight. The review processes incorporated new and revised review tools, new procedures, review of crisis stabilization units, and integrated BH and IDD reviews. Anecdotal information from feedback surveys indicates there are many satisfied individuals served by the system; people feel their lives have been positively affected; and they have experienced high levels of support from Georgia's network of providers. Data from this first year have yielded a baseline of network performance with which to refine and refocus efforts to continuously improve the quality of services in Georgia. As the quality management system moves into the second year of the contract, comparisons to the baseline data will help focus and drive effective and efficient quality improvement initiatives and continuous quality improvement for individuals receiving services across the state. \n \nPage 115 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n APPENDIX A: ABBREVIATIONS AND ACRONYMS \n \nAcronym \nACT AD ASAM ASO BH BHQR CIS CMS CL CST CSU DBHDD DDSS FOA II IDD IFI IOSA IRR IRP ISP QA MAR N NA NCP PCP PCR PRR QEPR Q\u0026T \n \nDefinition \nAssertive Community Treatment Addictive Diseases American Society for Addiction Medicine Administrative Services Organization Behavioral Health Behavioral Health Quality Review Consumer Information System Center for Medicaid and Medicare Services Community Life Community Support Team Crisis Stabilization Unit Department of Behavioral Health and Developmental Disabilities Developmental Disability Service Specific Review Focused Outcome Area Individual Interview Intellectual and Developmental Disability Intensive Family Intervention Individual Observation Staff Assessment Individual Record Review Individual Recovery / Resiliency Plan Individual Support Plan Quality Assurance Checklist Medication Administration Record Number in sample Not Applicable Nursing Care Plan Person Centered Practices Person Centered Review Provider Record Review Quality Enhancement Provider Review Qualifications and Training \n \nPage 116 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n QTAC RN SAMHSA SCI SCRR SD SI TA YMCA \n \nQuality Technical Assistance Consultation Registered Nurse Substance Abuse Mental Health Services Administration Support Coordinator Interview Support Coordinator Record Review Standard Deviation Staff Interview Technical Assistance Young Men's Christian Association \n \nPage 117 \n \nThe Georgia Collaborative ASO | FY 2016 Quality Management Annual Report \n \n "},{"id":"dlg_ggpd_y-ga-bb400-b-pa15-b2012-belec-p-btext","title":"Annual quality management report January 2012-December 2012","collection_id":"dlg_ggpd","collection_title":"Georgia Government Publications","dcterms_contributor":["Georgia. Department of Behavioral Health and Developmental Disabilities, issuing body."],"dcterms_spatial":["United States, Georgia, 32.75042, -83.50018"],"dcterms_creator":["Georgia. Department of Behavorial Health and Developmental Disabilities"],"dc_date":["2012"],"dcterms_description":["Description based on: January 2013/December 2013 (harvested August 20, 2015 from dbhdd.georgia.gov); title from PDF title page (Georgia Government Publications database, viewed October 16, 2015)","Latest issue consulted: January 2014/December 2014 (harvested on March 25, 2015 from dbhdd.georgia.gov) (Georgia Government Publications database, viewed October 19, 2015)"],"dc_format":["application/pdf"],"dcterms_identifier":null,"dcterms_language":["eng"],"dcterms_publisher":["[Georgia?] : Georgia Collaborative ASO ; [Atlanta, Georgia] : DBHDD, [2016]-"],"dc_relation":null,"dc_right":["http://rightsstatements.org/vocab/InC/1.0/"],"dcterms_is_part_of":null,"dcterms_subject":["Georgia. Department of Behavioral Health and Developmental Disabilities Evaluation","Mental health services--Georgia--Evaluation","Developmentally disabled Services for--Georgia--Evaluation"],"dcterms_title":["Annual quality management report January 2012-December 2012","Georgia Department of Behavioral Health \u0026 Developmental Disabilities"],"dcterms_type":["Text"],"dcterms_provenance":["University of Georgia. Map and Government Information Library"],"edm_is_shown_by":["https://dlg.galileo.usg.edu/do:dlg_ggpd_y-ga-bb400-b-pa15-b2012-belec-p-btext"],"edm_is_shown_at":["https://dlg.galileo.usg.edu/id:dlg_ggpd_y-ga-bb400-b-pa15-b2012-belec-p-btext"],"dcterms_temporal":null,"dcterms_rights_holder":null,"dcterms_bibliographic_citation":null,"dlg_local_right":null,"dcterms_medium":["annual reports","state government records"],"dcterms_extent":["1 online resource"],"dlg_subject_personal":null,"iiif_manifest_url_ss":null,"dcterms_subject_fast":null,"fulltext":"Georgia Department of Behavioral Health \u0026 Developmental Disabilities \nANNUAL QUALITY MANAGEMENT REPORT January 2012  December 2012 \nPrepared by the DBHDD Office of Quality Management February 2013 \n \n TABLE OF CONTENTS \nIntroduction Activities of the Quality Councils Stakeholder Involvement in Quality Management The Status of the Quality Management Work Plan Goals Key Performance Indicators and Outcomes DBHDD Quality Management Training Program Other Quality Management Activities Appendices Attachments \n \nPage 3 Page 3 Page 6 Page 7 Page 8 Page 24 Page 24 Page 37 \n \n2 \n \n INTRODUCTION \nThe State of Georgia Department of Behavioral Health and Developmental Disabilities (DBHDD) is committed to developing and implementing policies, protocols, and fidelity assurance mechanisms to support generally accepted professional standards with regard to the care of individuals served within the DBHDD system. \nThe DBHDD Quality Management Program was established in response to the Department's commitment to the continuous improvement of the quality of its services. The purpose of the Quality Management Program is to monitor and evaluate DBHDD programs/services in order to continuously improve the quality of care for all consumers served in the DBHDD system. \nThe DBHDD Office of Quality Management was formally established in August of 2011. The Department's Quality Management Plan was developed and implemented in December 2011 and was updated in June of 2012. This plan established guidelines for the structure of a DBHDD system-wide quality management program encompassing hospital and community based services. \nReaders are encouraged to refer to the Quality Management Plan for detailed information about the organizational structure of the Quality Management Program and a detailed description of the Executive and Program Quality Councils and the goals and objectives of each council. \nThis report provides a summary of pertinent and significant modifications that have been made to the Quality Management Plan and the Quality Councils along with detailed information about the quality management activities that have taken place between January 2012 and December 2012. \nACTIVITIES OF THE QUALITY COUNCILS \nExecutive Quality Council \nA Quality Council, made up of Departmental leadership, created the organizational structure necessary for the Department's quality management system operations. The Department's first Quality Council meeting was held in January 2012. In April of 2012, a Department-wide DBHDD Executive Quality Council (EQC) was created, which replaced the DBHDD Quality Management Council. \nThe EQC was tasked with providing leadership for the consistent, systematic review and improvement of the DBHDD services provided within the service system. Council membership is based upon strategic position and expertise within the organization. Executive Quality Council membership is composed of the Department's Commissioner, key Department leaders and key Program Quality Council (PQC) members. It is the responsibility of the Executive Quality Council co-chairs to review the annual DBHDD QM report and present the findings and recommendations to the Program Quality Councils and the DBHDD Board of Directors. \n3 \n \n During 2012, the EQC met quarterly and the council's activities and actions included:  Supporting and guiding implementation of the quality management plan such as: o reviewing and approving the DBHDD QM goals, objectives and plan.  Creating and defining three Program Quality Councils: o the Hospital System, Community Behavioral Health, and Developmental Disabilities (originally formed in November 2008).  Establishing and supporting specific quality improvement initiatives such as: o encouraging a partnership between the Division of DD with the Georgia Department of Public Health to increase access to dental services for individuals with DD.  Receiving and reviewing reports of data/performance improvement activities such as; o reviewing Program Quality Council key performance indicator data and making recommendations to the Program Quality Councils regarding areas of concern and setting priorities for performance improvement initiatives  Addressing systemic issues that affect quality such as: o Identifying the need for QM training and directing the development of a QM training program, o reviewing and making recommendations regarding the analysis of community based incidents and premature mortality trending.  Setting priorities such as: o receiving information about and discussing challenges related to quality data/information management storage, retrieval and analysis resulting in the allocation of money for data system development. \nA new DBHDD Commissioner was appointed in August 2012 and a number of changes were made in the executive management team. As a result, the membership of the EQC will be updated in the January 2013 meeting. The EQC membership roster can be found in Appendix A. In 2013, the EQC meeting frequency will increase to six times per year. \nProgram Quality Councils \nThe scope and content of the quality management program is comprehensive, involving evaluating and improving quality of care and services in all settings and for individuals receiving services for behavioral health issues and/or developmental disabilities. To provide adequate infrastructure to support the Department's quality management program, a less centralized and more focused structure was deemed necessary and the three Program Quality Councils were defined in April 2012 (See Appendix B). These supporting Program Quality Councils (PQCs) report to the EQC and they function as the systems to continuously screen and review information about quality issues and to identify potential quality improvement projects and system improvement needs. \nEach of the Program Quality Councils is responsible for identifying, analyzing and periodically reviewing key performance indicators and other information relevant to quality in the program area (See Appendix C). Additionally each of the Program Quality Councils is responsible for presenting systems problems and solutions, trends and patterns and other issues requiring senior leadership guidance and direction to the EQC. \n4 \n \n Program Quality Council (PQC) membership is composed of key program level leaders and staff and is detailed in Appendix A. The Hospital System PQC and Community Behavioral Health (mental health and addictive diseases) PQC meet monthly. The DD PQC meets quarterly with specific workgroups meeting more often, as needed. \nAll Program Quality Council meetings follow an agenda designed to encourage discussion, provide feedback, make recommendations and assignments, and ensure appropriate followthrough. The Council Chair (or his/her designee) has responsibility for presiding at meetings, assisting with agenda preparation, reviewing meeting minutes, and assisting with preparation of required documents. Meeting minutes are taken during every quality council meeting and distributed to its membership. \nExamples of information provided to and actions taken by the PQCs include the following: \nCommunity BH Program Quality Council  In December 2011, identified the key performance indicators (KPI) for 2012 and reviewed data for these indicators during the year.  In December of 2012, the CBH PQC initiated the annual review of the KPI and the target thresholds established in the previous year, made adjustments to the target thresholds and discussed retention or replacement of KPI indicators for the following year. \nHospital Program Quality Council  Reviewed data on KPI throughout the year and made recommendations for performance improvement initiatives to address areas where targets were not achieved identified the need to perform root cause analysis training to hospital leadership and hospital QM staff; this was completed in August of 2012. \nDD Program Quality Council  Determined that statewide data obtained during FY 2011 - 2012 demonstrated that access to supported employment services and community connections as areas needing improvement. As a result, the DD Program Quality Council developed a Supported Employment brochure, the intent of which was to reach individuals who were currently receiving waiver/state-funded services but not supported employment services. The brochure has been given to support coordinators to review with those individuals and their families (if necessary).  Created a Supported Employment Guide to: explain why employment is important; to illustrate through real life examples the difference work makes in people's lives; answer common questions about pay and health benefits when you work and have an intellectual and/or developmental disability; provide employment resources for people with or without services; and provide information and resources on Supported Employment programs in Georgia. \nIn November 2012 an Assistant Commissioner was hired whose responsibilities will include assisting DBHDD in advancing its quality management framework. The committee membership of the PCQs will be reviewed and updated at the January 2013 EQC meeting. \n5 \n \n STAKEHOLDER INVOLVEMENT IN QUALITY MANAGEMENT \nAn important component of our quality management program is to focus attention on the needs of our customers. Customer feedback and stakeholder input is essential. Opportunities for stakeholder and customer input/feedback during the year were varied and include, but are not limited to: \n1. A community-based needs assessment that was distributed by DBHDD service region. Target stakeholders included: a. Individuals receiving services b. Family members of individuals receiving services c. Advocates \n2. A discussion regarding the DBHDD Quality Management System with: a. The Behavioral Health Advisory Committee on December 12, 2011 b. Regional Planning Board Leadership on January 27, 2012 c. Advocacy groups on April 11, 2012 d. Statewide provider meeting on May 24, 2012 \n3. Community-based consumer satisfaction surveys through the Georgia Mental Health Consumer Network (GMHCN). This is a review of thousands of individuals who receive DBHDD behavioral health services. \n4. Consumer satisfaction feedback from individuals receiving community-based behavioral health services received through quality management audits which follows the treatment and assesses satisfaction of individuals in service. \n5. Consumer satisfaction from individuals in DBHDD hospitals - The hospitals have historically used a variety of surveys and measures of consumer satisfaction. Effective February 2012, all the hospitals began participating in a nationally recognized system of survey methodology for consumers of mental health services with the NASMHPD (National Association of State Mental Health Program Directors) Research Institute. The Hospital System PQC receives reports from each of the hospitals on their efforts to improve consumer satisfaction. The strategies of the hospitals vary and information about those strategies is shared in the PQC meetings, for the purpose of contributing to other hospitals' strategies. Hospitals have encouraged the increased use of peer specialists, offering additional food choices and utilization of committees whose purpose is to support a \"recovery-oriented\" culture in their hospitals. \n6. Consumer satisfaction feedback from consumers with DD and their families/guardians  The Division of Developmental Disabilities gathers feedback on the quality of and satisfaction with services through the Person-Centered Review process and participation in the National Core Indicator Survey (NCIS). The NCIS gathers satisfaction data from both the individual receiving services and their family members or guardians. Annually, 480 individuals who are receiving services participate in the Consumer Survey of the NCIS. The Consumer Survey is conducted face to face with the individual receiving services. Additionally, 1600 Family and Guardian Surveys are mailed to family members or guardians of individuals receiving services. The response rate to the mail out survey is approximately 32 percent annually. The response rate allows for statistically significant data analysis. \n7. The DDD also incorporates individual and family participation in various councils and workgroups such as the six regional and one statewide quality improvement councils, the \n6 \n \n Individual Service Plan (ISP) restructuring workgroup, and the community inclusion planning workgroup. \na. The Regional and Programmatic Quality Councils developed various quality improvement projects such as training programs to help educate individuals and providers on the various aspects of community inclusion \nb. The ISP restructuring group developed an entirely new ISP which: i. is a meaningful person centered service plan that visually, and in words, tells the person's story; ii. includes opportunities for self advocacy and long term goals. Reflects the past but focuses on the present and future; iii. is an electronic document that can be accessed by individuals, family members and authorized Division staff and readily updated, changed, and amended. \n8. The Division of Developmental Disabilities regularly communicates with the Georgia Service Providers Association for Developmental Disabilities (SPADD) and the Georgia Association of Community Care Providers (GACCP) as a way of sharing current information on the Division and to garner provider input on policies, procedures, etc. \n9. The Division of Developmental Disabilities has created the DD Advisory Council which consists of representatives from the provide community, support coordination agencies, self advocates, and family members. The Advisory Council will take a pivotal role in providing input and guidance in all areas of DD operations. \n10. The Divisions of Behavioral Health and Developmental Disabilities meets monthly with Georgia's Medicaid authority, the Department of Community Health (DCH), to discuss issues concerning the NOW and COMP waivers and community behavioral health care services. Standing agenda items include: a. Policy Changes b. Provider issues \nc. Program Integrity \n11. The DDD participates in quarterly meetings with DCH to discuss quality assurance and improvement efforts related to the waivers. Standing agenda items include: a. Support Coordination Reports b. Mortality Reports c. Remediation Reports d. Letter of Agreement Deliverables e. Waiver Performance Measures \nSTATUS OF QUALITY MANAGEMENT WORK PLAN GOALS \nEach Quality Council develops a work plan to guide the quality management activities within its area of responsibility. The EQC defines the work plan for the Department and the Program Quality Councils develop program-specific work plans for the hospital system, the community behavioral health and developmental disabilities service delivery systems. \nBelow are descriptions of the status of progress toward achieving the work plan goals for each Quality Council: \n7 \n \n DBHDD QM Work Plan The foundation structure for the DBHDD QM program was identified and implemented and includes an Executive Quality Council with DBHDD wide decision making authority as well as three Program Quality Councils. The work plan goal of improving access to data relevant for QM initiatives and reporting via the development of a data warehouse is progressing but at a pace slower than anticipated. A centralized method of data collection is being developed and will be piloted in 2013. The goal of increasing DBHDD employee knowledge about performance improvement is an ongoing project and pilot testing of the first three training modules of the QM training program will began in January 2013. \nHospital System Work Plan  All goals that were created for the 2012 work plan have been achieved with the exception of the data analysis training goal, which has been partially accomplished. Continued training will be done in order to bring staff up to the desired level of competence. Much of the training is being conducted on a \"just-in-time\" basis and is measure specific. Training is targeted to those staff who have responsibilities associated with each respective measure (QMs, Mentors, Incident Managers, etc.). \nCommunity BH Work Plan  All goals created have been met or are on target with the exception of tracking health status indicators which is on hold as the Department works to identify a way of capturing and analyzing such information. In 2012, a community behavioral health quality management infrastructure and system was set up via the leadership of the Community Behavioral Health Program Quality Council. A reduction in the number of readmissions for consumers who have been admitted more than three times to a State Psychiatric Hospital is ongoing and data systems should be modified by 3/31/2013 to allow collection and trending of data that is currently being tracked. Suicide prevention activities and Cognitive Therapy best practice implementation was started in 2012 and will continue into 2013. KPI have been identified and will continue to be analyzed going forward. \nDevelopmental Disabilities (DD) QM Work Plan - The DD Quality Work plan is based on the state's fiscal year (July 1, 2012 through June 30, 2013). All goals for the period of July 1, 2012 through December 31st) have been met or are ongoing. An example of an ongoing goal would be the Mortality Review Committee meetings which take place quarterly. All other goals in the plan are on target for completion by June 30, 2013. \nKEY PERFORMANCE INDICATORS AND OUTCOMES \nKey performance indicators (KPI) are used to assist in defining and measuring progress towards organizational goals. KPI are quantifiable measurements that reflect the critical success factors of the Department and vary depending upon the program or service. The 2012 key performance indicators are program-specific but in 2013 the setting of DBHDD wide key performance indicators will be discussed. \nIn 2012 KPIs were selected and tracked along with their associated outcomes to cumulatively provide a picture of service delivery in each of the identified areas. The following subsections discuss those key indicators. \nEach of the three program areas (hospitals, community behavioral health and developmental disabilities) are in different phases of data collection and analysis. The hospital system has had a \n8 \n \n quality management system for many years. Since the formation of the new Department, DBHDD has built upon the individual quality management programs that have existed for years in all the State hospitals and moved towards a better integrated quality program that has enjoyed the benefits of economies of scale and has developed improved system wide information and communication systems. These developments have enabled the hospital system to develop improved performance measurements and reporting capabilities and used them to improve important areas of patient care and safety such as increasing consumer participation in the planning of their own treatment. \nThe Community BH quality system was developed in 2011 and implemented in 2012. As Community Behavioral Health program data collection was initiated in January 2012, DBHDD recognizes that performance patterns must be established over time to effectively make recommendations for program changes and indicators must be evaluated for their effectiveness in accurately portraying the intended outcome. When adverse or deleterious aggregate data is identified, the findings are referred for immediate review, discussion, and/or correction (as appropriate) at the program and/or executive levels. \nThe Department's Division of DD quality system was created in 2008 in response to the Centers for Medicare and Medicaid (CMS) requirement that all waiver programs have a quality assurance plan. The DDD Georgia Quality Management System (GQMS) was developed in order to assist in the evaluation of the quality of supports and services rendered to individuals with developmental disabilities. The Division of DD uses this system to evaluate the quality of supports and services, create initiatives, and identify areas needing improvement for the State's service delivery system. \nThe DD QM plan addresses all of the outcomes identified in the CMS Quality Framework for the Home and Community Based Services. To ensure this occurs, stakeholder workgroups, along with the guidance of the Division's External Quality Review Organization (EQRO), participated in the development of the Person Centered Review (PCR) and Quality Enhancement Provider Review (QEPR) processes. Since its inception, the GQMS has released four Annual Quality Assurance Reports, four Quality Improvement Studies, and various ad hoc quality reports. \nA. Hospital Quality System The DBHDD Hospital System maintains a quality management program chaired by the Director of Hospital Operations. The Hospital PQC is responsible for implementing the provisions of the DBHDD Quality Management Plan within the six (6) hospital system. \nIn addition, each of the six hospitals in the DBHDD hospital system has its own quality management plan and Quality Council that is responsible for overseeing the quality improvement activities within their respective hospitals and who report to the Hospital System Program Quality Council. Quality management activities and results are communicated to the members of the Hospital System PQC during the monthly meetings. Reports are captured in meeting minutes and recordings. In addition to the four key performance measures identified in this report, the Hospital System has a number of other measures that it uses to monitor its overall performance. Those measures are focused in three main areas: safety, consumer satisfaction and recovery orientation. \nOne of the major focus areas for the hospital quality management program is a performance dashboard that is comprised of a number of performance measures that are intended to reflect \n9 \n \n the priority areas of concern within the hospital system. Performance measures are changed as new priorities are determined. The Hospital System also sets evaluation thresholds that are used as aids in determining progress towards meeting performance expectations. Key hospital performance indicator data is collected and reviewed monthly by the Hospital PQC where hospitals report on improvement activities, and identify any issues or obstacles they have encountered. See Appendix D for the hospital system dashboard. \nThe four performance measures are currently being monitored for the DBHHDD Hospital System are: \n1. Continuing Care Plan Created Overall Definition: Percent of adult mental health patients discharged from a hospital-based inpatient psychiatric setting with a continuing care plan that contains all of the following: reason for hospitalization, principal discharge diagnosis, discharge medications and next level of care recommendations. (NASMHPD Research Institute) \nResults Summary and Analysis: The Hospital System has averaged 96% on this measure for calendar year 2012. The target threshold of 95% has been met or exceeded in 4 out of the 12 months, with 3 of those in the first 6 months and 1 in the last six months. \n2. Individualized Recovery Plan - Quality Definition: Percent of criteria that were met on quality audits performed on chart reviews of adult mental health patients. \nResults Summary and Analysis: The Hospital System has achieved consistent improvement from January (52%) through December (87%). The target percentage is 95%. These improvements reflect the results of a substantial amount of training resources committed to improving the performance of Individual Recovery Teams in developing care plans. \n3. Consumer Satisfaction/Outcome of Care Domain Definition: Percent of adult mental health clients at discharge or at annual review who respond positively to the outcome of care domain on the Inpatient Consumer Survey. \nResults Summary and Analysis: The last two months for which data is currently available (Oct. \u0026 Nov. 2012), show a significant improvement in this area compared to the previous eight months. While the Department's eight-month average score prior to October was 75%, this score is similar to the national average for similar facilities. The Department's goal of 95 % has not yet been achieved. Improvement efforts are being focused on more effective utilization of treatment mall programming and improving the methodology utilized in obtaining valid feedback from consumers. \n4. Consumer Satisfaction/Empowerment Domain Definition: Percent of adult mental health clients at discharge or at annual review who respond positively to the empowerment domain on the Inpatient Consumer Survey. \nResults Summary and Analysis: The last two months for which data is currently available (Oct. \u0026 Nov. 2012), show a slight improvement (82% \u0026 80%) in this area compared to the previous 8 months (77% average). While the scores fall within the average of similar facilities nationally, the goal of 95 % has not yet been achieved. \n10 \n \n Improvement efforts are being focused on \"Respect\" training for staff and on improvements in the involvement of consumers in the recovery planning process . \nB. Community Behavioral Health (CBH) Programs The CBH key performance indicators were vetted with nationally recognized subject matter experts prior to implementation in 2012. Twenty-three indicators were chosen. Data are collected monthly or quarterly, depending on the indicator, and reviewed quarterly by the Community Behavioral Health PQC (see Appendix E for the CBH dashboard): \n1. The Georgia Housing Voucher Program (GHVP) (2 indicators) a. Definition: Percent of individuals with vouchers who remain in stable housing for \ngreater than 6 months. Results Summary and Analysis: A review of the data currently indicates that the supports provided to voucher recipients is effective at assisting those individuals in maintaining stable housing. The Department's results (92%) exceeded the HUD standard for the Shelter Plus Care Program (77%) for a similar population profile by an average of 15%. Additionally, it confirms the referral application process of describing the individuals community support needs and how they will be met. This information is broken out by Region and provided to Regional Office staff, who then review it down to the provider level in order to determine provider effectiveness in monitoring housing stability. \nb. Definition: The percent of individuals who have left stable housing under unfavorable circumstances and have been re-engaged and reassigned vouchers as indicated. \nResults Summary and Analysis: There are reasons for leaving the program, such as reunification with family or moving out of state, that would not call for re-engagement efforts. This measure focuses only on those who left the program due to an unfavorable outcome and the effectiveness of re-engagement efforts by service providers. The target for the indicator was set at 10% based on consensus of the council membership. There is no local or national benchmark for this indicator. A review of the data suggests that reentry into the voucher program, even after eviction or termination of a voucher, is possible and the percentage who were re-engaged ranged from 17% to 30% in 2012. This information is reviewed by the Regional Office staff that are charged with oversight responsibility and follow up with providers who update the status of those individuals that can be re-engaged. \nBaseline data was obtained during 2012 and as a result of the annual review by the CBH program quality council these key PI indicators will continue to be tracked during 2013. \n2. Supported Employment (SE) (2 Indicators) a. Definition: The percent of providers that meet an average staff to consumer caseload \nratio of 1:20. Results Summary and Analysis: A low staff to consumer ratio of no more the 1:20 is desired for this service to ensure that each participant has the opportunity to receive the service at the intensity needed to produce good outcomes.. During 2012, the target threshold was met six out of the twelve months. From January to June, only data from the three settlement funded SE providers was collected for this indicator. Starting in July, all Adult Mental Health contracted SE providers supplied data for this indicator. The increase \n11 \n \n in providers may have influenced the overall percentage because those newly added providers were still in a data submission and fidelity learning curve. \nEach month this indicator is reviewed with the providers during the Supported Employment Coalition Meeting. Barriers that keep providers from meeting this indicator are discussed and shared. Barriers that providers have cited include difficultly retaining qualified staff and obtaining referrals from external sources. Coalition Meetings also included program highlights from providers that were successful in creating relationships with other agencies to increase the number of referrals to Supported Employment. DBHDD brought in a consultant to provide a session related to staff burn-out at the Community Mental Health Training and Technical Assistance Event in January 2013 in Macon, GA. \nDBHDD also noted a selection of providers whose ratios are much smaller than expected and therefore were not counted as meeting this measure. The Community Behavioral Health PQC discussed this during its annual review of key performance indicators and it was agreed that the indicator needed refinement during 2013 since a lower ratio would not be expected to be associated with low service intensity for individual consumers and therefore, unrelated to the purpose of the indicator. This indicator will be changed to capture the percentage of providers whose staff to consumer ratio falls in between 1:15 and 1:20. \nb. Definition: The percent of unduplicated individuals who had first contact with a competitive employer within 30 days of enrollment. \nResults Summary and Analysis: Engagement in rapid job search is desired for persons enrolled in this service. During 2012, the target threshold 50% was met each quarter it was evaluated. From January to June, only data from the three settlement-funded SE providers was collected for this indicator. Starting in July, all Adult Mental Health contracted SE providers supplied data for this indicator. Data from July to September 2012 demonstrated that 70.4% of settlement consumers enrolled during that time frame were able to have first contact with potential employers within 30 days. \nFrom July to September 2012, several providers new to the data collection had difficulty calculating the number of full-time employees (FTEs) that were devoted solely to consumers with mental illnesses. This impacted DBHDD's ability to calculate an accurate consumer to staff ratio. In response to this difficulty, DBHDD provided ongoing individual technical assistance to these providers to ensure that accurate data collection methods were in place. \nThis indicator is discussed at every Supported Employment Coalition meeting. Providers have cited barriers related to the economy/poor job market, difficulty finding employers that are receptive to hiring individuals with mental health diagnoses, and transportation barriers. Coalition Meetings have provided the opportunity for sharing successful experiences in building positive partnerships and relationships with employers. \nAs this data must be collected with a one month delay, data from second quarter of FY13 (October 2012 to December 2012) was unavailable at the time of this report. \nAs a result of the annual review by the CBH program quality council, these key PI indicators for Supported Employment will continue to be tracked during 2013. \n12 \n \n 3. Assertive Community Treatment (ACT) (3 indicators) Note: In July 2012 DBHDD began to collect this data from all ACT Teams regardless of funding source. Initially, only teams under contract to receive state funds were included but Medicaid-funded teams were added in July 2012 to provide a better overall picture of ACT service outcomes. This changed the population by including more consumers that otherwise would not have been included in this indicator. \na. Definition: The percent of consumers who are enrolled within three days of referral. Results Summary and Analysis: Rapid enrollment of referred consumers is desired for this service. The target for this measure is 70% and the 2012 results ranged from 28.9% to 71.9%. The increased percentages during October and November may be related to DBHDD's transition of individuals from terminating State Funded ACT teams to newly established teams that were ready to enroll individuals. \nData is discussed with providers during ACT Coalition meetings. Providers are engaged in discussions of the barriers to meeting targets, solutions are strategized. Technical assistance is provided. Providers that consistently score high on this indicator are invited to shared what positively impacts rapid enrollment. Barriers have included newly added providers being in a data submission learning curve. DBHDD has been providing ongoing technical assistance to these new providers to ensure accurate data reporting. DBHDD will also be investigating the effect of high volume of referrals on this measure. It is possible that differences between providers' results may be more related to differences in volume rather than provider-controlled factors. \nb. Definition: The percent of ACT consumers admitted to psychiatric hospitals within the past month. \nResults Summary and Analysis: Lower utilization of psychiatric hospitals by persons participating in this service is desired. The target for this measure is no more than 7% per month and the 2012 results ranged from 6.3% to 9.9%. \nData is discussed with providers during ACT Coalition meetings. Providers are engaged in discussions of the barriers to meeting targets and solutions are strategized. Technical assistance is provided. Providers have not cited any barriers that they are experiencing related to this indicator. However, conversations at Coalition meetings have focused on the needs of high level of care consumers and ways to reduce length of stay when a hospitalization is necessary. Many teams noted that building relationships with area law enforcement entities as a way to help educate the community on alternative mental health interventions and reduce the utilization of psychiatric hospitalizations. \nc. Definition: The average number of jail/prison days utilized per enrolled consumer. Results Summary and Analysis: Decreased incarceration for persons participating in this service is desired. The target for this measure is less than 1.0 days per month and the 2012 results ranged from .559 to1.032 days. \nAs with the other ACT indicators, data is discussed with providers during ACT Coalition meetings. The slight decrease in November and December 2012 was discussed at the ACT Coalition Meeting in January 2013. Several teams cited that the colder weather has kept many of their consumers from loitering in public areas. The teams also state that \n13 \n \n loitering increases police contacts which impacts jail utilization. Conversation at the meeting was then focused on other ways for teams to impact the amount of time their consumers loiter (e.g. assisting consumers in finding other meaningful daily activities by teams building relationships with local Supported Employment providers, building consumer's informal networks, locating community recreational programs). DBHDD will be monitoring this indicator in the future to determine if there is a correlation between seasonal changes and jail utilization. \nBaseline data was obtained during 2012 and as a result of the review by the CBH program quality council these KPIs will continue to be tracked during 2013. \n4. Intensive Case Management (ICM) (3 indicators) a. Definition: The percent of ICM consumers with a Psychiatric inpatient admission within \nthe past month. Results Summary and Analysis: Decreased psychiatric re-admission for persons participating in this service is desired. A review of the 2012 data indicates that the target threshold of not exceeding 10% was met every month. \nData is discussed with providers during Coalition meetings similar to the method used during the ACT Coalition meetings described above. \nSince 2012 was designed to collect baseline data and the target was met every month, the target for this indicator will be lowered to 5% during 2013. \nb. Definition: The percent of individuals housed (non-homeless as defined in the programmatic report) within the past month. \nResults Summary and Analysis: Increased housing stability for persons participating in this service is desired. A review of 2012 data indicates that the target of 90% was met nine out of the twelve months. It is hypothesized that the decrease in percentages may have been impacted by the increase in the number of new consumers served and an associated higher number who entered without housing. \nData is discussed with providers during Coalition meetings. Providers are engaged in discussions of the barriers to meeting targets and solutions are strategized. \nc. Definition: The average number of jail/prison days utilized per enrolled consumer. Results Summary and Analysis: Decreased incarceration for persons participating in this service is desired. A review of 2012 data indicates that the target of 0.5 days was met every month. \nData is discussed with providers during Coalition meetings using the same method as the other indicators in this section. \nSince 2012 was designed to collect baseline data and the target was met every month, the target for this indicator will be lowered to 0.25 days during 2013. \n5. Community Support Teams (CST) (3 indicators) a. Definition: The percent of CST consumers with a Psychiatric inpatient admission within \nthe past month. 14 \n \n Results Summary and Analysis: Decreased psychiatric re-admission for persons participating in this service is desired. A review of 2012 data indicates CST providers met this target of 10% eight months out of the year. The increase in percentages in June and September 2012 were greatly influenced by the number of new consumers that received services during those months. \nData is discussed with providers during Coalition meetings. Providers are engaged in discussions of the barriers to meeting targets and solutions are strategized. \nb. Definition: The percent of individuals housed (non-homeless as defined in the programmatic report) within the past month. \nResults Summary and Analysis: Increased housing stability for persons participating in this service is desired. A review of 2012 data indicates the target of 90% was met every month. Data is discussed with providers during Coalition meetings. \nDefinition: The average number of jail/prison days utilized per enrolled consumer. Results Summary and Analysis: Decreased incarceration for persons participating in this service is desired. A review of 2012 data indicates CST providers met this target of 0.75 days nine months out of the year. The increase in utilization in July 2012 was likely impacted by the number of consumers utilizing the service during the month. The discussion regarding this measure during a Coalition Meeting revealed that the 21 days were primarily utilized by one consumer due to a specific personal event. \nData is discussed with providers during Coalition meetings. Providers are engaged in discussions of barriers to meeting target, solutions are strategized. \nBaseline data was obtained during 2012 and as a result of the review by the CBH PQC these key PI indicators will continue to be tracked during 2013. \n6. Case Management (CM) (3 indicators) a. Definition: The percent of CM consumers with a Psychiatric inpatient admission within \nthe past month. Results Summary and Analysis: Decreased psychiatric re-admission for persons participating in this service is desired. A review of 2012 data indicates Case Management providers met the target of 10% every month. Data is discussed with providers during Coalition meetings as described above. \nSince 2012 was designed to collect baseline data and the target was met every month, the target threshold for this indicator will be revised to 5% during 2013. \nb. Definition: The percent of individuals housed (non-homeless as defined in the programmatic report) within the past month. \nResults Summary and Analysis: Increased housing stability for persons participating in this service is desired. A review of 2012 data indicates Case Management providers met this target of 90% every month. Data is discussed with providers during Coalition meetings as described above. \nc. Definition: The average number of jail/prison days utilized per enrolled consumer. \n15 \n \n Results Summary and Analysis: Decreased incarceration for persons participating in this service is desired. Jail utilization data was not collected and tracked during State FY12 (July 2011-June 2012) for consumers receiving CM services. Data collection processes were implemented in July 2012. A review of 2012 data indicates CM providers met the target of 0.25 days twice out of the six months data was collected. \nData is discussed with providers during Coalition meetings as described above. Discussions during Coalition Meetings have focused on the relatively higher jail utilization by CM consumers compared to higher intensity community services such as ICM and CST. One theory is that the intensity of service of ICM and CST services has a greater impact on jail utilization but these services are not always available in the same region as CM . A possible implication of this for CM providers is that they may be serving consumers that are in need of higher intensity services. DBHDD and providers have discussed the option of referring those consumers to other higher level services in their areas for which the consumers may be eligible, such as ACT services, until such time that ICM and CST services are available statewide. Another option that will be considered is to lower the caseload expectations for case managers in these areas to allow for higher frequency of contact. \nBaseline data was obtained during 2012 and as a result of the review by the CBH PQC these key PI indicators will continue to be tracked during 2013. \n7. Addictive Disease Services (2 Indicators) a. Definition: Percent of Adult Addictive Disease consumers who abstain from use or \nexperience a reduction in use while in treatment. Results Summary and Analysis: This measure is self reported but assists providers in understanding the behaviors of those in services as it relates to reducing the harmful consequences of their substance use while engaging in clinical care. This indicator is a required NOMS (National Outcome Measures) for SAMHSA and has not been used to drill down to the provider level \nA review of the most recent available statewide data for (FY 2012) suggests that 45% of adult participants in treatment report a reduction in use or abstinence from alcohol and drugs, which is slightly higher than data from previous years. Over the four year period of data collection, we were able to see providers meet the targets set for the coming year demonstrating an incremental improvement in the performance indicator each year from a statewide perspective. \nAlthough reduction in use and abstinence is important for programs to measure, during this last year of QM development and implementation, our evaluation of this indicator determined the need to adopt new indicators that will measure engagement and retention, which are critical quality components of a successful substance abuse treatment program. These PIs will be discussed in further detail below. \nb. Definition: Percent of Youth Addictive Disease consumers who abstain from use or experience a reduction in use while in treatment. \nResults Summary and Analysis: This measure is self reported but assists providers in understanding the behaviors of those in services as it relates to reducing the harmful consequences of their substance use while engaging in clinical care. \n16 \n \n A review of the most recent available statewide data (FY 2012) suggests that 58% of youth participants in treatment report a reduction in use or abstinence from alcohol and drugs. Over the four year period the state has collected data on this indicator, we were able to see providers meet the targets set for the coming year which demonstrated an incremental improvement in the performance indicator from a statewide perspective. \nAlthough reduction in use and abstinence is important for programs to measure, during this last year of QM development and implementation, our evaluation of this indicator determined the need to adopt new indicators that will measure engagement and retention, which are critical quality components of a successful substance abuse treatment program. \nThese key PI indicators will be replaced with new indicators in 2013. The new key Addictive Disease (AD) Service performance indicators will focus on consumers discharged from crisis/detoxification who receive follow-up services and clients remaining active in treatment for 90 days after beginning non-crisis stabilization services. The new PI indicators will be drafted by the AD program staff and will be presented to the CBH PQC for review and approval. \n8. Customer Satisfaction from the Quality Management Audits, Community Behavioral Health Programs. Face-to-face interviews were conducted with adult consumers who were in the target population. Target thresholds for each of the satisfaction indicators were set at 90%. \nDefinitions:  Percent of individuals that are satisfied with services they are receiving. (80.4%)  Percent of individuals which feel their quality of life has improved as a result of services. (80%)  Percent of individuals which feel the location of services is convenient for them. (84.6%)  Percent of individuals which feel staff treats them with respect. (86.5%)  Percent of individuals which state they regularly discuss goals with staff. (78%) \nResults Summary and Analysis: Currently, the available data for these KPIs is limited to 6 months worth of data collection and will continue to be collected in order to establish baseline information. Results ranged from a low of 78% (consumers state they regularly discuss their goals with their service provider staff) to a high of 86.5% (consumers who state service provider staff treat them with respect). The Department will continue to collect customer satisfaction data through the Quality Management audits/reviews and will summarize patterns over time, as applicable. \nAdult Mental Health Fidelity Reviews Assertive Community Treatment (ACT) Fidelity Reviews are conducted annually for all 22 state contracted ACT teams. In the current fiscal year a total of 8 Fidelity Reviews have been completed using the 28-item DACTS model for Fidelity. Once the DBHDD ACT Fidelity Review Team completes the review, results of the Fidelity Review are given to the ACT team, the regional office in which the team operates, the DBHDD Adult Mental Health Director and other departmental leadership, and results are provided to the ACT Subject Matter expert hired as part of the DOJ Settlement. \n17 \n \n Review items that are found to be outside of the acceptable scoring range result in a Corrective Action Plan (CAP) which each team develops and submits for acceptance to the regional and state office. Of the 8 teams that have received a Fidelity Review, all are operating within a good to very good range of Fidelity with evidence of serving the appropriate population, maintaining an acceptable caseload, delivery of the service with intended frequency and intensity, provision of crisis response, effective daily team meeting discussion of consumers and consistent delivery of 80% of the teams services in the community. Some of the areas of needed attention are, increasing team involvement in hospital admissions and discharges, strengthening delivery and documentation of contacts with consumer's informal support system, and more out-of-clinic delivery of services by the psychiatrist on \n \nSupported Employment (SE) Fidelity Reviews are conducted annually for all 21 state contracted SE providers. Fidelity Reviews were completed in late 2011 for the 3 state contracted SE providers and as a result of the FY'2013 expansion from 3 funded providers to 21 funded providers, SE reviews will resume within the next month. \n \nAll teams will receive a Fidelity Review using the 25-item IPS model supported employment scale. Once the SE Fidelity Review is complete, results will be given to the SE provider, the regional office in which the team operates, the DBHDD Adult Mental Health Director and other departmental leadership and results will be provided to the SE Subject Matter expert hired as part of the DOJ Settlement. Review items that are found to be outside of the acceptable scoring range will result in a Corrective Action Plan (CAP) which each team develops and submits for acceptance to the regional and state office. \n \nQM Audits: Quality Service Reviews of Adult Behavioral Health Community Providers As a component of DBHDD's quality management system, a quality audit of a sample of individuals meeting settlement agreement criteria and who were enrolled in settlement funded services was created and implemented. The audit was designed to follow the care of an individual throughout the system of care as they transitioned between services and as they received multiple ongoing services. \n \nThe audits included interviews with individuals served and with provider leadership and staff, direct observation, reviews of treatment records, and reviews of provider's performance improvement systems. By following individuals through the system of care, a more holistic picture of the functioning of the overall system could be identified while allowing for site-specific feedback towards improving services for individuals served. Audits were performed October 2011 through October 2012 in all six regions. From April 2012 through October 2012, individuals interviewed were also asked about their overall satisfaction with services and their quality of life. Consumer satisfaction continues to be an identified key performance indicator. \n \nEach of the following ADA related services were included in the sampling of ADA individuals as \n \noutlined below: \n \nServices \n \nCycle 1- Reviews \n \nCycle 2- Reviews \n \nCompleted \n \nCompleted \n \nAssertive Community Treatment 4 Providers / 9 Teams \n \n4 Providers / 8 Teams \n \nIntensive Case Management \n \n1Provider / 1 Team \n \n1 Provider / 1 Team \n \nSupported Employment \n \n3 Providers / 5 Teams \n \n3 Providers / 4 Teams \n \nPeer Support/Peer Mentoring 1 Provider / 1 Team \n \n1 Provider / 5 Teams \n \n18 \n \n Community Hospital Beds \n \n4 Providers \n \n7 Providers \n \nCrisis Stabilization Units \n \n7 Providers \n \n13 Providers / 15 CSUs \n \nSingle Point of Entry \n \n1 Provider \n \n1 Provider \n \nBridge Funding \n \n7 Providers / 8 Locations *N/A \n \nMH Supported Housing \n \n4 Providers / 6 Locations N/A \n \nDD Support Coordination \n \n4 Providers / 9 Teams \n \nN/A \n \nDD Mobile Crisis Teams \n \n4 Providers / 6 Teams \n \nN/A \n \nDD Crisis Respite \n \n1 Provider \n \nN/A \n \nDD Congregate Living Homes 9 Providers \n \nN/A \n \nDD Host Homes \n \n4 Providers \n \nN/A \n \nCycle One= Number of Providers/Teams reviewed October 2011  December 2011 \n \nCycle Two= Number of Providers/Teams reviewed January 2012  December 2012 \n \n*N/A = No Review this cycle \n \nSample Selection: Individuals were considered for selection when they utilized any Settlement Agreement services within the six months prior to the audit of a region. Individuals were selected for the audits based on the following: enrollment in multiple services, random selection and clinical reasons that included length of stay, time of enrollment, or inclusion in a previous audit. \n \nReviews/Audits Completed Between October 2011 and the end of October 2012, the Quality Management audit team completed a total of 142 provider/site audits of ADA Settlement Agreement providers. The services individuals were enrolled in included: Assertive Community Treatment, Intensive Case Management, Supported Employment, Peer Support, Peer Mentoring, Community Hospital Beds, Crisis Stabilization Units, Bridge Funding, Georgia Housing Voucher Program, Support Coordination, DD Mobile Crisis Teams, DD Crisis Respite Homes, DD Congregate Living Homes, and DD Host Homes. The Single Point of Entry and Georgia Crisis Access Line services were also included in the continuity of care review. During those audits, 842 charts were reviewed, 150 consumers received face-to-face interviews, and 347 provider staff were interviewed. Providers were given copies of their audit results and expected to utilize that information in their internal QM/QI processes to correct any concerns or issues identified. Individuals placed in Mental Health Supportive Housing and individuals with Developmental Disabilities were included only in the first set of reviews which occurred between October 2011 and December 2011. \n \nTrends Identified: At the end of each quality management audit cycle, a summary report was developed and shared with DBHDD senior leadership, the Regional Coordinators, and key Central Office program leadership. The leadership in the areas that have been identified in the report as having possible deficient practices or which require further analysis/review were expected to follow up and or correct and modify program and procedures as deemed necessary and appropriate. The top two recurring themes identified in these review reports include: \n \n \n \nAn absence of a master DBHDD database which easily and accurately identifies and \n \ntracks consumers across their continuum of care. \n \n \n \nThat individualized Recovery Plans (IRP) did not consistently include the individual's input \n \nor involvement, nor were they always individualized or comprehensive (i.e. medical needs \n \nmissing, substance abuse issues not addressed, etc). \n \n19 \n \n Summary and Recommendations for Community Behavioral Health The sections above reference the current status of the KPIs as well as the Department's other quality management activities. The past year, 2012, provided a base line for these indicators. The BHQC has made modifications as well as changes to the targets and indicators to improve the sensitivity of the indicators to more accurately reflect the program outcomes. The Department will continue through 2013 to evaluate the quality of these indicators, improve our performance and continue to search for measures that have objective or national benchmarks that might guide this process. In addition to the Department's review, input will be utilized from consultants to evaluate our KPIs and include indicators that would provide us with outcome information for our entire population. \nDBHDD has begun a collaboration with the Department of Community Health to better coordinate activities of our respective External Review Organizations. This will provide a more systemic and comprehensive view of the community and behavioral health provider network. A significant goal is to reduce duplication and reallocate resources to gather information not currently available. \nHealth Status Indicators, Community Behavioral Health Programs Physical health is often neglected when dealing with psychiatric or addictive disease concerns and individuals with behavioral health issues are known to be more likely to have other chronic diseases as well as poorer health outcomes compared to those without behavioral health problems. Identifying and analyzing specific key health performance indicators has been discussed at length by the CBH PQC and the current challenges related to capturing, sending and storage prohibit its collection and analysis at this time. This issue will be included in discussions related to data systems and potential Administrative Service Organizations (ASO) or External Review Organizations (ERO). \nC. DBHDD Division of Developmental Disabilities The DDD utilizes an external Quality Improvement Organization (QIO) to carry out much of its data collection and analysis. The population for the DDD indicators (except for the crisis indicators) is adults with intellectual or developmental disabilities (IDD)who are currently receiving waiver services. Crisis services are available to adults and children age 5 -18 years of age with I/DD regardless if they are currently receiving DD services. Therefore the PI indicators for crisis services are inclusive of this population. The QIO creates an annual quality report which contains data on key performance and quality indicators. The full report for FY12 can be found in GQMS Annual Report (Attachment 1). KPIs include (see Appendix F for Community DD dashboard): \n1. Individual Support Plans (ISP): The data for these key quality indicators is collected on an ongoing basis and is reported quarterly to the Director of Quality Assurance for the Division of Developmental Disabilities. The data is either collected using the ISP Quality Assurance (QA) checklist or through a Person-Centered Review. The ISP QA Checklist was developed by the State to ensure the ISP includes all necessary requirements as required by the State, to ensure what is \"important to\" and \"important for\" the individual is captured in the overall plan for that year, as well as to ensure the individual has a healthy, safe, and meaningful life. The purpose of the PersonCentered Review is to assess the effectiveness of and the satisfaction individuals have with the service delivery system. \n1.a The percent of ISPs written to support either a Service Life, Good but Paid Life, or Community Life. \n20 \n \n o Service Life means the individuals uses paid supports and services and has little to no connection with the community. \no Good but Paid Life means the plan supports life in the community, but real community connections are lacking. The individual has both paid and unpaid supports. \no Community Life means the ISP is written to move people toward a community life as the person chooses. \n1.b Percentage of individuals reporting they are involved in the development of their annual ISP. Results Summary and Analysis: 1.A ISP QA Checklist results indicate the proportion of ISPs written to support a Community Life saw a steady decline over the last four years of the contract, with a small increase in FY 12. The decline may be related to the faltering economy which resulted in a decrease in available community supports and funding. Additionally, the numbers of DD community providers increased a great deal in 2009. The capacity of these new providers to support a Community Life, may need to be increased through training and technical assistance. \n1.B In FY12, 88% of individuals reporting being involved in developing their annual ISP which is an increase from 83% in FY11. This positive increase may be a result of increased training and emphasis on the importance of individuals and families being included in the planning of their services. \n2. Crisis Response System: The Georgia Crisis Response System for Developmental Disabilities provides crisis supports to children ages 5  18 and adults regardless of receipt of waiver services. The data for the crisis response system is collected monthly and are reviewed by the Director of Quality Assurance for the Division of Developmental Disabilities: 2.a The percent of Mobile Crisis Team (MCT) dispatches. 2.b Average Mobile Crisis Team response time. 2.c The percent of crisis incidents that resulted in intensive in-home supports. 2.d The percent of crisis incidents that resulted in placement of the individual in a crisis home. Results Summary and Analysis: 2. A. A review of the data for indicator 2.a currently suggests that 48% of crisis calls result in the Mobile Crisis Team being dispatched. This number will probably increase over the next year, as the Division takes steps to reduce telephonic resolution of crisis episodes. 2. B. The data for indicator 2.b suggests MCT are on average meeting the required response time of 1.5 hours. The average for FY12 was 93 minutes. FY13 data is showing a decrease in response time to 60 minutes on average. The most likely reason for the positive result is that crisis providers are becoming more familiar with their service area and the crisis system as a whole. 2. C. The data for indicator 2.c suggests that 18% of mobile crisis team dispatches result in the individual needing additional intensive in-home support beyond the initial crisis resolution. It is a goal for the crisis system to keep in the individual in their home environment through the provision of in-home crisis supports rather than remove them to an unfamiliar setting. The Division has provided \n21 \n \n additional technical assistance and training to crisis providers in order to increase the use of in-home crisis supports. 2. D. The data for indicator 2.d suggests that 14% of mobile crisis team dispatches result in the individual needing to be moved to a crisis home. The DDD continues to work closely with its crisis providers to evaluate and strengthen the crisis system. Mobile Crisis Teams are now able to be dispatched to state hospitals and jails in order to assess individuals in these locations for possible DD crisis supports. The Mobile Crisis Teams do not transport individuals to jails, but may transport individuals to a State hospital if it is determined an individual needs Behavioral Health supports. The Division is also developing protocols for Mobile Crisis Team dispatch to Crisis Stabilization Units. \n3. Health and Safety: This data is collected annually and reported bi-annually through the National Core Indicator Survey. 3.a Percentage of individuals who had a routine dental examine in the past year. 3.b Percentage of individuals who had a flu vaccine in the past year. 3.c Percentage of individuals who had a Pap Test in the past 3 years. 3.d Percentage of Individuals who has a PSA test in the past 5 years. 3.e Percentage of Individuals who feel safe in their home. 3.f Percentage of Individuals who feel safe in their neighborhood. 3.g Percentage of Individuals who feel safe at work or day program. Results Summary and Analysis 3.A 2009-2010 data shows that 72% of individuals reported having a routine dental screening which was well below the nation average of 83%. 2010-2011 data shows an increase in examinations to 78% but Georgia still remains below the nation average of 80%. Many dental procedures are not covered by either Georgia's State Medicaid plan or the Medicaid waivers. Efforts are underway to partner with the Georgia Department of Public Health to increase access to dental services. 3.B 2009-2010 data shows that 63% of individuals reported having flu vaccine which was below the national average of 77%. 2010-2011 data shows a slight increase in vaccinations to 65% but Georgia still remains significantly below the nation average of 75%. Standard vaccinations are covered under the State Medicaid plan. Discussions with providers and families have suggested that transportation to and from a physician's office is an issue. The Division will investigate possible partnerships with the Department of Public Health to address this issue. 3.C 2009-2010 data shows that 77% of individuals reported having Pap Test in the past 3 years which was within the national average of 76 %. 2010-2011 data shows a slight decrease in testing to 74% but Georgia remains within the national average of 71%. 3.D 2009-2010 data shows that 50% of individuals reported having a PSA test in the past year which was lower than the national average of 57 %. 2010-2011 data shows a decrease in testing to 45% but Georgia remains below the national average of 56%. The Division will survey families and providers in an attempt to determine why Georgia is consistently below the national average. 3.E 2009-2010 data shows that 91% of individuals reported feeling safe in their homes which was higher than the national average of 84%. 2010-2011 data shows a decrease to 86% but Georgia remains above the national average of 83% reporting they feel safe in their homes. \n22 \n \n 3.F 2009-2010 data shows that 92% of individuals reported feeling safe in their neighborhood which was higher than the national average of 86%. 2010-2011 data shows a slight decrease to 89% but Georgia remains within the national average of 86% reporting they feel safe in their neighborhood. \n3.G 2009-2010 data shows that 96% of individuals reported feeling safe at work or in their day program which was higher than the national average of 85%. 2010-2011 data shows no change but Georgia remains above the national average of 89% reporting they feel safe at work on in their day program. \n4. Rights and Choice: This data is collect annually either through the National Core Indicator Survey or the Person-Centered Review. 4.a Percentage of Individuals reporting that they are educated and assisted to learn about and fully exercise their rights. 4.b Percentage of Individuals reporting their home was entered without their permission. 4.c Percentage of individuals reporting they are allowed to use the phone or internet when they want to. 4.d Percentage of individuals reporting that their mail is opened without permission. 4.e Percentage of individuals reporting that they are treated with respect and dignity. 4.f Percentage of individuals reporting they have a choice of support and services. 4.g Percentage of individuals reporting that they decide how to spend free time. Results Summary and Analysis: 4.A In FY12, 83% of individuals reporting being educated on and able to fully exercise their rights. This is slight increase from 81% in FY11. 4.B 2009-2010 data shows that 6% of individuals reported that their home had been entered without their permission, which is below the national average of 10%. 2010-2011 data shows only a slight increase to 7% with Georgia still remaining below the nation average of 10%. 4.C 2009-2010 data shows that 96% of individuals reported being able to use the phone or internet when they wanted to. This was within the national average of 92%. 2010-2011 data shows a slight decrease to 95% but Georgia ranks top in all the states participating in the National Core Indicator Survey, with the national average being 91%. 4.D 2009-2010 data shows that 6% of individuals reported their mail was opened without their permission which was lower than the national average of 10 %. 20102011 data shows a significant increase, 13%, with Georgia slightly above the national average of 12%. The Division regularly trains individuals/families and providers on individual rights. In future trainings, the Division will stress the importance that an individual be allowed to open their own mail if they are physically able to do so. 4.E In FY12, 97% of individuals reported that they are treated with respect and dignity. This is slight increase from 96% in FY11. 4.F In FY12, 95% of individuals reported that they have a choice in supports and services. This is increase from 91% in FY11. 4.G 2009-2010 data shows that 98% of individuals reported they decide how to spend their free time, which was higher than the national average of 91%. 2010-2011 data shows a decrease to 94%, however Georgia remains above the national average of 92%. \n23 \n \n The Division of Developmental Disabilities continues to focus its efforts on transitioning individuals from the State hospitals to the community. The Division will continue to rigorously monitor its key quality data to ensure the health, safety, and success of all individuals receiving DDD services and supports in the community. \nDBHDD QUALITY MANAGEMENT TRAINING PROGRAM \nThe need for training in quality management principles was identified for the Department's staff as a whole and the DBHDD Quality Management Training Program has been initiated. The training program is a collaborative effort between the Quality Management team and the DBHDD University staff. It incorporates webinars, an e-learning modular structure, and/or classroom style trainings as appropriate. \nThe first three e-learning modules (see list below) have been completed and converted into an elearning format by DBHDD University. These first three modules started pilot testing during January 2013. The target audience for the e-training modules is all DBHDD staff. Participation in the training program content will vary depending upon role, responsibility, and program or service within the Department. In the future, training modules will be developed and made available to community providers. \nIt is currently anticipated that the QM trainings will include but will not be limited to: \n Customer Focus  Introduction to Quality  DBHDD Quality Management Program  Introduction to Project Selection  Introduction to Quick Wins \u0026 Rapid Improvement Events  Project Documentation  Voice of the Customer \u0026 Stakeholder Analysis  Introduction to Establishing Measures  Root Cause Analysis  Data Analysis and Interpretation  How to Complete a PI Project  Case Management  Health Status Indicators \nOn August 1, 2012 the first classroom style training session on performing a Root Cause Analysis was completed for hospital based quality management staff and senior leadership who work in the inpatient setting. Additional training will take place in 2013 based on identified need. \nOTHER QUALITY MANAGEMENT ACTIVITIES \n1. Community and Hospital Incident Data  Summary Review for 2012 Background: It is the policy of DBHDD to ensure that individuals who receive services in state hospitals and in a variety of community settings do so in a safe and humane environment and \n24 \n \n that they are protected from abuse, neglect and exploitation. To accomplish this, DBHDD's Office of Incident Management and Investigations (OIMI) is responsible for receiving reports of deaths and critical incidents, reviewing the reports, and ensuring that investigations are conducted according to DBHDD policies. \nHospital and Community Incidents: The number and types of incidents required to be reported to OIMI differ for community settings and state hospitals; however, providers in both settings are required to self report critical incidents as defined by policy. Incidents may be investigated by the provider or by OIMI Investigators. \nThe following incident review covers death reports and critical incident reports received during 2012. The information is reported in numbers of incidents for state hospitals and community settings. Community settings are further categorized by incidents in behavioral health services and developmental disabilities services. \nHospital Incident Data State Hospitals reported almost 9,000 critical incident types for CY 2012. (Note: A single critical incident report may include multiple incident types.) The incident types reported most frequently were (1) aggressive acts to another individual-physical, (2) aggressive act to staff-physical, (3) accidental injuries, (4) falls, and (5) aggressive acts to self. \nThese five incident types account for almost 75% of the total hospital incident types. The aggressive acts and falls are tracked monthly through the Hospital System's aggregated Triggers and Thresholds Report for trending and intervention, as indicated. Additionally, each hospital maintains a Triggers and Thresholds report for their respective hospitals. Each month they analyze their data, address those areas for which interventions are appropriate, and report on those analyses and associated activities during each Hospital's QC meeting and also in the Hospital System Quality Council meetings. The aggregated version of the Triggers and Thresholds report also offers comparative analyses of data and opportunities for the hospitals to benchmark with one another. Strategies for critical incident reduction are also discussed and developed within the DBHDD Medical Executive Committee meetings during the quality management portion of those meetings. \nCommunity Incident Data  Behavioral Health Services Community behavioral health providers reported almost 1,300 critical incident types for 2012. The incident types requiring an investigation and reported most frequently were (1) hospitalization of an individual in a community residential program, (2) individual who is unexpectedly absent from a community residential program or day program, and (3) incident occurring in the presence of staff which required the intervention of law enforcement services. The Program and Executive Quality Councils began reviewing and analyzing this data in October 2012 and will regularly review this data going forward. \nCommunity Incident Data  Developmental Disabilities Services Community developmental disabilities providers reported almost 2,350 critical incident types for CY 2012. The types of incidents reported most frequently were (1) hospitalization of an individual in a community residential program, (2) individual injury requiring treatment beyond first aid, (3) incident occurring in the presence of staff which required intervention of law enforcement services, (4) alleged individual abuse-physical, and (5) alleged neglect. The Executive Quality Council began reviewing and analyzing this data in October 2012 and will regularly review this \n25 \n \n data going forward. \nFor both behavioral health services and developmental disabilities, the Department is implementing an additional level of review of premature mortality of individuals receiving community services. The review includes suicides and unexpected deaths. The review team is comprised of the Medical Director, a DBHDD Hospital Physician, the Director of Quality Management, the DD Director of Quality Assurance, the Director of Incident Management and Investigations, the Addictive Disease Services Assistant Executive Director, and a registered nurse. This review process was developed and implemented in October of 2012. Results of reviews will be reviewed by the CBH and DD Program Quality Councils starting in 2013. \n2. Complaints and Grievances In 2012, the Office of Public Relations (OPR) (formerly the Office of External and Legislative Affairs changed its) received 280 complaints/grievances requiring the attention of state office, regional office and/or regional hospital staff. The cases were triaged and tracked for review, response and/or resolution. \nDepending on the nature of the concern, a case is assigned to either the state office, a regional office or to a regional hospital. The state office was assigned 20% of the 280 cases. Sixty-four percent (64%) were addressed by the regional offices and 16% were handled by the regional hospitals. \nComplaints and grievances received by OPR were initiated by a variety of stakeholders. Nineteen percent (19%) were forwarded to OPR by the Governor and Lt. Governor's Offices. Requests initiated by members of the Georgia General Assembly accounted for 16% of the cases. Approximately, 38% of the reported concerns were initiated by families, consumers, friends, advocates or providers. \nOf the 280 complaints received in 2012, there were 59 issues categories that included addictive diseases; administration; community care; developmental disabilities; financial services; fraud and abuse; health care-personal care; general information about DBHDD programs; investigations; mental health; DBHDD contracts; medical records request; personal care homes; personnel; provider services; transportation and issues that were referred to another agency. \nComplaints and grievances included issues related to access to behavioral treatment and habilitation services; problems related to service delivery and supports; eligibility; abuse and neglect; self direction; prior authorization; exceptional rate funding; forensic services; inpatient treatment and evaluation; provider application, certification and enrollment. Approximately, 44% of the constituent concerns pertained to developmental disability services and 41% to mental health services. Seventeen percent (17%) were categorized as other (e.g. provider network management and other state offices). \nThe top three primary issues of concern were related to developmental disabilities and mental health. The first category of concerns was related to eligibility for the New Options Waiver (NOW) and the Comprehensive Supports (COMP) Wavier. Fifty-five percent (55%) were received from family members, friends and legislators inquiring about waiver services for their loved one or constituent. The second category of concern was developmental disabilities self-directed services. Nineteen percent (19%) of 280 cases attributed to family members experiencing difficulty understanding and managing their loved ones waiver budgets. These cases were triaged \n26 \n \n to state office disability staff as well as regional staff to address each individuals concern. The third category of concern was in the mental health area and accounted for 18% of the complaints and grievances received. Individuals and family members complained about the need for mental health services for their loved one and the need for additional long term mental health services in their communities. All issues were triaged to the regional office where staff reviewed each case and addressed each individuals concern. \nIn 2013, the Office of Public Relations will identify other tools to enhance its database to allow for more robust reporting. OPR anticipates that data collected through the constituent services process will be used to identify trends and efficiencies within our service delivery system. \n3. Provider Network Analysis Results The Department engages in community behavioral health planning and Developmental Disability service planning that encompasses an array of services that will assist individuals in living a life in the community. This service array provides levels of care for individuals' who are identified as the target population as well as those who meet eligibility criteria for state supported services. Service planning is unique to the needs of each community and includes significant input from community members and service recipients. \nDuring 2012 each Region performed a Region specific analysis that identified current services. In addition, each Region identified specific service needs/gaps based upon community input. Each Region will be working towards minimizing their identified gaps. Examples of those gaps include: \"the need for a wider array of community services (adults, children and addictive diseases), expansion of service capacity, funding (both increased and flexible), improved coordination and communication between programs \u0026 services, and system enhancements.\" \n4. Implementation and Results of Practice Guidelines: \n Beck Initiative Overview The Beck Initiative is a collaborative clinical, educational and administrative partnership between the Aaron T. Beck Psychopathology Research Center of the University of Pennsylvania (PENN) and Georgia's Department of Behavioral Health and Developmental Disabilities (DBHDD) to disseminate Cognitive Therapy (CT) training and consultation throughout the DBHDD network. Through intensive workshops and ongoing consultation, tangible tools are placed in the hands of those working with people in recovery across the network to provide quality care. A continuity of care system, informed by CT, will be developed across the state of Georgia to help those in recovery to integrate back into their communities and have an increased quality of life. The initiative began in Region 4 on July 1, 2012 with a series of trainings and consultations. Additionally, it is expected that an evaluation plan will be finalized in February 2013 wherein the outcome measures will be agreed upon to determine the effectiveness of the training. \n Suicide Prevention Program Best Practice Initiative: During 2012, efforts were initiated to institute a DBHDD statewide suicide prevention program with a focus on adult community behavioral health providers (CBHP). \n27 \n \n The Department's Suicide Prevention Program, reviewed and analyzed suicide deaths from July 2010 through 2011 looking for systemic issues that needed to be addressed. Prominent among the issues identified were: \n lack of awareness of risk of suicide,  lack of common language to describe and report suicidal behavior,  consistent screening, and  safety planning and monitoring consumers at high risk of suicide between programs \nand systems of care. The Suicide Prevention Program (SPP) then identified evidence based or best practice models to tackle each of these systemic issues. The SPP is in the process of developing best practice policies and a training program for DBHDD and DBHDD providers. Specific best practices that will be utilized are the Uniform Definitions related to suicide from the CDC, the Columbia Suicide Severity Rating Scale (C-SSRS) and Drs. Stanley and Brown's Suicide Prevention Safety Plan and monitoring model. \nThe SPP in collaboration with the DBHDD Division of Training and Organizational Development is working with Drs. Posner and Stanley from Columbia University Medical School to develop training that can be disseminated statewide on these best practices. Training has been piloted throughout Georgia. Additionally, onsite consultation is offered and provided to any community provider who has had a consumer who died by suicide. \n5. Behavioral Health Contracted External Review Organization APS Healthcare is the External Review Organization for DBHDD's behavioral health services. Many of the functions and products provided by this vendor contribute to the Department's quality management of the Provider Network. These elements include training, technical assistance, prior authorization for services, provider audits, and provider billing and service provision data. The information that most informs management of the network are the on audits. These are onsite provider audits conducted approximately twice per year for each community service provider. Audits are conducted by licensed clinicians and review provider's documentation specific to: \n Assessments \n Treatment planning \n Programmatic integrity \n Documentation \n Billing \nThe overall result of the audit is a provider-specific summary of audit findings. The audit summary provides both strengths and areas of improvement for the provider. The summaries are posted publically on APS's website, www.apsero.comEach of the audit summaries are shared with the provider, Department staff, and staff from The Department of Community Health. The findings of these audits are tracked and used in several ways: \n APS uses the audit findings to offer direct technical assistance with the provider while on site for the audit. \n Providers are expected to incorporate the findings into their own internal quality assurance and improvement systems. \n Aggregate audit trends are used by the Department to target specific needs for training and technical assistance. \n APS monitors reoccurring issues and findings in subsequent audits to provide direct feedback to the provider. 28 \n \n  Repeated poor audit scores are used to manage the provider's contract compliance through adverse action or termination if indicated. \nIn addition to audits, APS provides a variety of reports that reflect the utilization of services by provider, region, and statewide. This information is also available for public view on the APS website. The Department's State and Regional offices use this information to identify and address providers or services that are outliers by over or under usage. This information informs the Department of service areas that may need stronger utilization review and tighter management. It also informs us of geographical areas that are both rich and poor with regards to service access and availability. \nAPS Healthcare authorizes services in their Care Management Office; this function also impacts quality of services delivered through these various roles: \n The Ambassador Program is an opportunity for new providers to spend both online and telephonic time with a licensed clinician (Care Manager) who provides orientation to the new providers and new agency staff. This includes support in navigating the DBHDD Provider Manual and policies to provide guidance on consumer eligibility, prior authorization requests, and appropriate documentation of service delivery. \n Care Managers review a random sample of authorization requests as well as those flagged or due to a disparity between assessment results and the services requested. This review often includes a phone conversation with the provider staff to provide a clinical review and technical assistance regarding the appropriate services, intensity, and documentation. \n Some high intensity services such as Assertive Community Treatment and Inpatient Hospitalization require a manual review by a Care Manager for all authorizations. This review offers the opportunity to provide one-on-one technical assistance to support the provider in serving the right person with the right service and encourages an ongoing relationship between the provider and their designated Care Manager. This review and technical assistance impacts the quality of the services provided by the agency and ensures that service resources are effectively allocated. \n6. Hospital System Quality Management - In addition to the quality management activities described in this document, the DBHDD Hospital System has implemented a number of initiatives in the areas of policy development, training, information system development, performance measurement, clinical supervision. A substantial auditing system continues to be developed utilizing the Plato Data Analyzer system and staff are being trained and given inter-rater reliability testing to assure consistency and data integrity. That system allows for data entry and analysis at the local hospital level as well as the Hospital System level. Results of these activities are reviewed in Quality Council meetings. \n Additionally, the Department Medical Executive Committee, under the direction of the Department Medical Director, maintains oversight of peer review and medical staff credentialing for the state hospitals, the description of which is detailed in the medical staff bylaws. That body is also responsible for assuring that consulting and agency physicians, nurses and other professional staff are properly credentialed. \n Each hospital is also responsible for performing utilization review activities for their respective hospitals. Those activities involve the review of appropriateness for admission and continued stay. Utilization review staff coordinate and communicate with the Hospital \n29 \n \n Clinical Director, as appropriate, when data show patterns of inappropriate utilization and with Hospital social workers and Regional Office staff when individuals no longer meet continued stay criteria. \n An example of a performance improvement project related to hospital service utilization is focused on reducing the rate of readmission of consumers within 30 days of discharge. Each of the hospitals has, on staff, a Readmission Review Coordinator (RARC) whose job it is to evaluate the factors that contributed to the rapid readmission and to work with treatment teams to develop treatment and discharge plans that will result in longer, more supportive community stays. Along with the RARC activities staff responsible for the hospitals' utilization review processes, under the direction of each facility's clinical director, evaluate the appropriateness of admissions to each facility. Those activities, along with a major commitment to community based services has succeeded, over the last several years in virtually eliminating the over utilization of those inpatient psychiatric services. Also, they have succeeded in maintaining, during the past year, a 30-day readmission rate of 8.5%. That rate is a substantial improvement over the 13% rate that the Hospital System maintained several years ago. Efforts continue to reduce that rate even further. \n7. Division of DD QM Reviews of Individuals Served is performed through Support Coordination Monitoring and the Person Centered Review Process. The Person Centered Review (PCR) process is designed to assess the overall quality of the supports and services a particular person receives though interviews with the individual and his or her provider(s), record reviews, and observations. The process explores the extent to which the system enhances the person's ability to achieve self-described goals and outcomes, as well as individuals' satisfaction with the service delivery system. Each PCR includes a face to face interview with a randomly selected individual using the National Core Indicator (NCI) individual survey tool and additional interview questions using an EQIO Individual Interview Instrument (III). \nIn addition to the interview, records of the most recent twelve (12) months of services received by the person are reviewed and used to help determine the person's achievement of goals that matter most. Onsite observations are conducted for individuals who receive day supports or residential services to observe the person in these environments, the individual's reaction to supports, and how well supports interact with the person. Interviews with the individual's support coordinator and provider/staff further assist the consultant in gathering information to help determine how the person is being supported and the person's knowledge of the supports and services being provided. A review of the person's central record is also part of this process and includes a review of how well the person's Individual Support Plan (ISP) reflects the person, including goals, talents, strengths and needs. A total of 480 PCRs are completed annually. Individual participation in any interview as part of the QA process is voluntary. Individuals may refuse to participate for any reason and may also have anyone present at the interview they choose to have present. \nSpecific findings and recommendations based on the Person Centered Reviews, and actions being taken by the DDD can be found in Section 10 (below) and in Attachment 1 GQMS FY12 Annual Report. \n8. Division of DD QM Reviews of Providers: Quality Enhancement Provider Reviews (QEPR) are a significant part of the Georgia Quality Management System for Developmental Disabilities. \n30 \n \n The QEPR is used to evaluate the effectiveness of the provider's supports and services, organizational systems, records, and compliance with Division of DD standards for policy and procedures, as well as staff training and qualifications. The intent of the GQMS contract is for the EQIO to complete a QEPR with all providers at least one time over the course of five years. During the each contract year, 39 providers and one support coordinator agency will participate in a QEPR. For each provider, a representative sample of individuals is chosen to participate in an interview using the III, which begins the QEPR process and helps determine what individuals receiving services perceive as strengths and/or areas needing improvement within the provider's service delivery system. Other resources used during the QEPR to gather information regarding the provider's supports and services are individual record reviews, onsite observations for individuals receiving day supports and/or residential services, and administrative review of the organization's policies and procedures, as well as staff training and qualifications, and provider/staff interviews. Information from the PCR interviews will be used to enhance the QEPR findings, as appropriate, to help support the provider in identifying trends, strengths, and areas needing improvement. The QEPR was implemented in January 2009. \nSpecific findings and recommendations based on the Person Centered Reviews, and actions being taken by the DDD can be found in Section 10 (below) and in Attachment 1 GQMS FY12 Annual Report. The Division also has four Support Coordination agencies which monitor providers and advocates for individuals. Support Coordinators assure the completion of the written Individual Service Plan (ISP) document and any revisions. Support Coordinators are also responsible for monitoring the implementation of the ISP and the health and welfare of participants. Monitoring includes direct observation, review of documents, and follow up to ensure that service plans have the intended effect and that approaches to address challenging behaviors, medical and health needs and skill acquisition are coordinated in their approach and anticipated outcome. Monitoring includes reviewing the quality and outcome of services. Support Coordinators are also responsible for the ongoing evaluation of the satisfaction of waiver participants and their families with the ISP. \nSupport Coordination Agencies use a summary rating system to report findings from their monitoring efforts. The summary rating system is designed to reflect a point-in-time status of an individual's services related to health, safety and service issues. The primary focus is on health and safety issues but the support coordinator must also evaluate the appropriateness and adequacy of services. A description can be found in Attachment 2 Summary Rating Guidelines for Primary Services. \n9. Division of Developmental Disabilities (DDD) Quality Management Program As stated above, the Division of Developmental Disabilities has contracted with a QIO since 2008. The Georgia Quality Management System for Developmental Disabilities (GQMS) contract mandates that each provider rendering services through the Medicaid waivers to individuals with developmental disabilities has one annual review over the course of five years. Therefore, 40 providers are reviewed each year through the Quality Enhancement Provider Review (QEPR) process (39 service providers and one Support Coordination Agency). Providers who receive the QEPR are randomly selected each year and 480 individuals for the Person Center Reviews (PCR) are randomly selected from the caseloads of the 39 service providers. The PCR sample is stratified by region and providers, meaning providers are first randomly selected proportionately from each region, and then individuals are randomly selected from those providers, excluding individuals who have had a PCR. \n31 \n \n For the QEPR process, a sample of individuals, excluding individuals who have had a PCR, are randomly selected from the 39 service providers, with at least one and a maximum of 34 individuals per provider. The sample is stratified by service to ensure all services are represented. In addition to the sample of individuals for the QEPR, staff personnel records are reviewed for each service offered by the provider. A random sample of staff rendering supports and services, including sub-contractors, are selected from a list of all staff working with the provider. A minimum of two staff per service are selected, or 25 percent, whichever is greater. A maximum of 30 records are selected for review. For Support Coordination, up to 30 records are randomly sampled from the support coordinators rendering services. \nIndividuals from both the PCR and QEPR samples participate in the Individual Interview Instrument (III) activity and Individual Support Plan Quality Assurance Checklist (ISP QA). Both processes also include a Provider Record Review (PRR), Staff/Provider Interview (SPI), and onsite observations of day and/or residential programs. \nIn addition to the PCRs completed for the sample of individuals, as described above, the Division's (EQIO) has implemented processes to complete PCRs for Individuals Recently Transitioned to the Community (IRTC) from an institutional setting. Many of these transitions are the result of an agreement between the State of Georgia and the United States Department of Justice to accommodate individuals with developmental disabilities to live in the community and to provide services necessary for them to do so. Individuals from this transition process participate in all aspects of the PCR with the exception of the NCI interview. IRTC findings are analyzed and presented separately from the findings for individuals already established in the community. \nA DDD Quality Management report (Attachment 1 GQMS FY12 Annual Report) is generated annually that includes aggregate data from the Person-Centered Reviews, Quality Enhancement Provider Reviews, and the Follow Up with Technical Assistance Consultations. The report also contains recommendations based on the data. The Division uses this report and it recommendations in its quality improvement efforts for the next year. \n10. Discussion from the FY 12 DDD ANNUAL QUALITY MANAGEMENT REPORT (Attachment 1): Attachment 1, the GQMS FY12 Annual Report was submitted to the DDD on August 31st, 2012. The Division is currently reviewing the data and the recommendations. The Division has already taken steps to address certain recommendations, but all quality improvement steps will be discussed in the next report. \nThe QIO completed 480 Person Centered Reviews (PCR) and 40 Quality Enhancement Provider Reviews. As part of these reviews, the EQIO consultants completed 961 interviews with individuals that included a random sample of 480 individuals who participated in the National Core Interview using the NCI Consumer Survey. Consultants also completed 514 Support Coordinator Record Reviews, 1,414 Provider Record Reviews, 927 Staff/Provider Interviews, 775 onsite observations of residential and day program facilities, and 40 Administrative Reviews. \nAn additional 203 individuals who were recently transitioned to the community (IRTC) from an institution participated in a PCR. Compared to individuals already established in the community, IRTC results indicate recently transitioned individuals were much more likely to have a profound \n32 \n \n intellectual disability, much more likely to live in a group home, and more likely to have an ISP written to support a Service Life. They were much less likely to be developing desired social roles, have choice of services and supports or be involved in the design of their service plan. Support Coordinator and provider records were much less likely to show they are included in the larger community or given choice of community services. In addition, IRTC results indicate goals on the ISP are less likely to be person centered and the HRST information is less likely to be updated as required. See Attachment 3 IRTC Report for CY 2012. \nThe Division's External Quality Review Organization made recommendations to the Division based on the findings of the report which are listed below: \nRecommendation 1: The Division of DD should explore how the transition planning process is implemented for individuals transitioning from an institution. The planning process should ensure the person has input and is being connected to the community as desired even prior to the transition. Comment: The Division will be conducting an evaluation of the current transition planning process to determine where quality improvement steps can be taken. \nRecommendation 2: Support Coordinators should review the ISP for each person transitioned from an institution and update the plan as necessary to ensure goals are person centered and ensure the HRST is adequately and appropriately completed as required or necessary. Comment: The Division is evaluating the provision of services and supports provided by Support Coordination Agencies (SCA) in an effort to better define the responsibilities of the SCAs \nRecommendation 3: Because outcome scores for people living in host homes tend to be higher, the Division should help ensure a variety of residential settings, specifically host homes, are available and presented as an option for newly transitioned individuals. This will help support the person in making an informed choice related to supports and services available. \nFace to face interview results across various demographics were similar to previous years, and results are fairly positive on average (90.2 %), an increase since Year 1 of the contract (83.2%). Year 4 results reflect a higher percent of outcomes met than the combined average for the previous three years, particularly in key areas of choice, having input into the design of the service plan and life's decisions, achieving outcomes and satisfaction with supports and services, health and safety, education about exercising rights, and community participation. In addition, although the previous two years ISP QA checklist results indicated a decline in the proportion of ISP written to support a Community Life, data for Year 4 indicate a shift up. \nProvider documentation has shown improvement since Year 3 in some critical areas: a person centered focus in provider documentation; medication oversight and management; offering individuals a choice of services and supports and allowing them to direct their services and supports; and identifying health and safety needs of individuals served. Support coordinator documentation has also improved in key areas such as showing a person centered focus in the documentation and ensuring human and civil rights for the person are maintained. \nExtensive statistical analysis has not been completed to determine all the factors that may be positively impacting outcomes for individuals. However, a recently completed QI study suggests that adequately implementing policies and procedures (measured though the Provider Record) improves outcomes. In addition, conducting person centered reviews to help determine how well \n33 \n \n the provider systems are responding to individuals raises awareness of person centered practices for individuals, families and providers. Furthermore, the QEPR and FUTAC processes focus on improving practices for the provider's service delivery system. \nRecommendation 4: People will perform to the test. Therefore, a continued focus on person centered practices and a person centered quality assurance/improvement process as well as continuing to include individual interviews as part of the Quality Enhancement Provider Review are recommended. \nAdministrative review of employee records reflected relatively low provider compliance on required qualifications and training. Approximately 17 percent of employees reviewed did not have adequate background screening documentation in place; 42 percent of staff did not receive the minimum of 16 hours of annual training; and 27 percent with oversight for medication did not follow rules, regulations or best practices. \nRecommendation 5: Maintaining proper background screening practices and documentation are critical when working with a vulnerable population. The Division should consider a stricter policy and/or sanctions for noncompliance if appropriate. Comment: The Division is currently evaluating its adverse action policies and procedures for when a provider does not meet performance requirements. Updates will be provided in the next report. \nRecommendation 6: A workgroup including the EQIO, the Division, and provider representation should be convened to develop a training curriculum providers can use to ensure staff receives the annual training as required by the Division. The workgroup should also develop a training curriculum for medication administration that providers can use for staff who monitor the self administration of medications for individuals and/ or develop best practice guidelines providers can use to develop internal quality assurance checks to ensure accuracy of the implementation of these procedures. \nFindings continue to show that individuals who receive supported employment have better outcomes than individuals who receive any other service. Community integration and development of social roles are improved when individuals are employed in integrated settings. \nRecommendation 7: The state should continue to emphasize supported employment initiatives (becoming an Employment First state, the Alliance for Full Participation) and access to community resources. Develop a stakeholder workgroup to identify barriers to this with the outcome being a plan and recommendations to the State to overcome the barriers. Comment: The Division is implementing steps to meet this recommendation. Georgia will become an Employment First state. Georgia will convene an Alliance for Full Participation (AFP) State Team. The AFP State Team will assist the division with drafting and implementing policy and procedures to increase the number of individual in supported employment services. This state team will be comprised of developmental disabilities stakeholders who will also engage the provider and business community on the benefits of integrated employment. It is a goal of the Department to increase the number and percentage of individuals in supported employment from 1345 to 2700 by July 1, 2015. \nRecommendation 8: Support the Statewide QI Council's initiative to try and educate individuals and families regarding the employment supports and services available. This could include an \n34 \n \n initiative requiring support coordination to educate individuals and family members not already involved with employment services using the supported employment brochure and guide. Comment: Steps are being taken to educate individuals and families on supported employment. A full description of the work done by the Statewide QI Council in this area can be found in Attachment 1 GQMS FY12 Annual Report. \nOther findings are similar to results reported in previous years. Results continue to reflect possible issues surrounding health and/or safety, Community Access/Integration, and Person Centered Practices. \nHealth and Safety:  HRST is not updated in the ISP as needed (48.6% present in ISP QA Checklist).  Annual informed consent for psychotropic medications is present (24.7% present in ISP QA Checklist).  Behavior support plan, crisis plan, and safety plan are signed (54.3% present in ISP QA Checklist).  Medical support section of the ISP is fully completed including plans for an emergency (50.6% present in ISP QA Checklist).  Although higher than in Year 3, only 31 percent of provider records reviewed documented a means to identify health status and safety needs.  Approximately 37 percent of providers scored not met on the Qualification and Training element: indicating employees are educated on medication administration and proper laws and regulations related to medication oversight were followed, or best practices were used.  Health and Safety represented over 50 percent of the FUTAC Focused Outcome Areas addressed during the consultation. \nCommunity Access:  19 percent of individuals interviewed were not developing or being supported to maintain desired social roles.  The proportion of ISPs written to support a Community Life has increased since Year 3 but remains low, at 7.5 percent.  Only 26 percent of provider records indicated the person had choice of community services and supports.  Approximately 52 percent of support coordinator records documented how individuals are included in the larger community.  QEPR recommendations for half of the 40 providers reviewed to date this year indicated a need to identify ways to expose individuals to new opportunities in the community. \nPerson Centered Practices:  Over 190 individuals (20 percent) were not involved in the routine review of their supports and services.  Approximately 24 percent of ISPs did not contain goals that were all person centered and 32 percent of the service plans had two or fewer expectations met in the checklist section indication goals are person centered.  Provider Record Reviews often do not use a person centered focus in documentation (33.9% present). \n35 \n \n  Less than half (47%) of the Support Coordinator Record Reviews showed person centered documentation. \n Several recommendations provided during the QEPR address person centered practices such as regularly reviewing progress with the person, documenting that information is reviewed by the person, and document how individuals are being included in the planning process for outings. \nRecommendation 9: The training developed on social roles and community connections should be a mandatory training for all staff, and should be competency based. Comment: The Division will examine the feasibility of implementing this recommendation, and other quality improvement steps needed to address issues in Health and Safety, Community Integration, and Person-Centered Practices \nRecommendation 10: With the development of the new ISP process and template submitted to the Division of DD, it is recommended the State begin developing strategies to implement this new system which by design ensures the person's goals and needs change as the person desires and/or as necessary. Comment: The Division will be developing new case management information systems in FY13. The new ISP will be incorporated into the new system \nRecommendation 11: The EQIO nurse provided training across the state specific to medications, possible reactions to medications, and medication administration. These standards should be tracked through the next reporting period and a new and possibly revised training session offered if necessary. Comment: The data appears to reflect some differences in outcomes and results for individuals receiving services through the NOW versus the COMP waivers. The COMP waiver is designed for people who need residential services and these individuals showed better health and safety outcomes than NOW recipients. However, they were less likely to be involved in the review of their supports and services, less likely to be educated on and exercise their rights, and less likely to have community access and involvement. In addition, they were more likely to have an ISP written to support a Service Life and provider documentation was less likely to have a person centered focus or to show the individuals was offered a choice of supports and services. An assumption might be made that because COMP services include Community Residential Alternative services which include more restrictive group home residential settings may be impacting the scores. \nRecommendation 12: It is not clear why differences exist between NOW and COMP waiver results. Perhaps the Division should revise the standards for the COMP waiver and ensure they more explicitly define how areas of choice and rights should be addressed. Comment: The Division will review the COMP waiver policies and make any needed changes. \n36 \n \n Appendix A \nThe Executive and Program Quality Council Membership \nas of December 31, 2012 \n \nThe DBHDD Executive Quality Council (EQC): \n \nCommissioner \nMedical Director Deputy Commissioner/COO \nDeputy Commissioner/Programs \nDirector of Hospital Operations/Assistant Commissioner for DD \nDeputy Assistant Commissioner/ADA Settlement Coordinator Addictive Disease Services Executive Director Director of Forensic Services \nDirector of Community Mental Health Services Director of Quality Management \n \nco-chair of the EQC, sets policy for the DBHDD, and provides oversight and guidance to QM activities. co-chair of the EQC and providers oversight and guidance to QM activities. responsible for the Departments fiscal management strategy and provides guidance to QM activities related to fiscal planning, budgeting, cash flow and other policy matters. oversees the provision of the Departments programs and activities related to behavioral health services/DD and provides guidance related to infrastructure and service delivery. is responsible for coordinating the needs of the hospitals and provides oversight for the DD population. Provides input to the EQC regarding operational successes, challenges and improvement projects. provides oversight for the ADA settlement agreement for Community Mental Health \u0026 DD as well as the quality management component. directs and manages DBHDDs addictive disease program and provides QM guidance related to Addictive Disease (AD) services. directs and manages DBHDD's forensic programs and services and provides QM guidance related to forensic services. directs and manages DBHDDs adult and child community mental health programs and provides QM guidance related to community based services. oversees DBHDDs quality management system . \n \n37 \n \n Appendix A \n \nProgram Quality Council Membership \nas of December 31, 2012 \n \nThe Hospital System Program Quality Council: \n \nDirector of Hospital Operations Regional Hospital Administrators Hospital Quality Managers \nDirector of Forensic Services \nDirector of Hospital System Quality Management Director of Quality Management \n \nchair of the Hospital System PQC, sets policy and provides oversight to the hospital and DD programs. chair their respective hospital's QCs and represents them on the Hospital System Program Council.. support the quality management activities of their respective hospitals and the system-wide committees and teams. directs and manages DBHDD's forensic programs and services and provides QM guidance related to forensic services. coordinates and supports the Hospital System Quality management activities. oversees DBHDDs quality management system. \n \n38 \n \n Appendix A \n \nProgram Quality Council Membership \nas of December 31, 2012 \n \nThe Behavioral Health Program Quality Council: \n \nAssistant Commissioner for Behavioral Health Deputy Assistant Commissioner/ADA Settlement Coordinator Addictive Disease Services Executive Director Addictive Disease Services Assistant Executive Director Director Community Mental Health Services Director Adult Mental Health Services Suicide Prevention Manager \nTransitions Director \nRegional Coordinator Representative Federally Funded Program Manger Director Quality Management \n \nis chair of the Behavioral Health PQC and provides direction and guidance related to community behavioral health services. provides oversight for the ADA settlement agreement for Community Mental Health \u0026 DD as well as the quality management component. directs and manages DBHDDs addictive disease program and provides QM guidance related to AD services. assists the addictive disease services executive director with managing the DBHDD addictive disease program. directs and manages DBHDDs adult and child community mental health programs and provides QM guidance related to community based services. provides administrative and clinical oversight for adult mental health services directs and manages DBHDDs suicide prevention program and provides guidance related to suicidality issues. provides input related to consumers transitioning from inpatient to community settings and acts as an information resource for the Regional Offices and hospitals. provides a regional perspective on behavioral health and DD issues. provides oversight for Federally funded programs such as jail diversion. oversees DBHDDs quality management system \n \n39 \n \n Appendix A \n \nProgram Quality Council Membership \nas of December 31, 2012 \n \nThe Developmental Disabilities Program Quality Council: \n \nAssistant Commissioner for Developmental Disabilities \nDD Director of Quality Assurance \nState Level DD Staff \nSelf-Advocates \nParents of Individual's receiving DD supports and services \nRepresentatives from DD Service Providers \nRepresentation from DD Support Coordination Agencies \nAdvisory Members (ERO) and DD Advocates such as the DD Director for the Georgia Advocacy Office and the State Director for Georgia ARC \n \noversees the provision of the Division of Developmental Disabilities programs and activities and provides guidance related to infrastructure and service delivery. oversees the Division of Developmental Disabilities quality management system and crisis response system assists with various duties related to areas such as training, provider compliance, supported employment, and others. individuals who are currently receiving DD services and supports and are able to participate in DD Council and bring their perspective to the council. parents of individuals who are currently receiving DD services and supports but are not able to participate in DD Council. During 2012 a parent of a consumer acted as co-chair for the DD council. oversees the operations of a DD Service Provider and brings the provider perspective/input to the DD Council. During 2012 a provider representative acted as co-chair for the DD council. oversees the operations of a DD Support Coordination Agency Provider and brings the support coordination perspective/input to the DD Council advisory/advocate members do not have voting privileges If there is more than the identified number of voting representatives from any of the stakeholder groups, those individuals will be considered a part of the Advisory Group. Involvement of such individuals will be on an as needed basis. \n \n40 \n \n Appendix B \n \nDBHDD Quality Structure \n \nDBHDD Executive Quality Council \n \nHospital System QC \n \nJoint \nInitiatives BH Community Services QC \n \nHospital System \n \nDevelopmental Disabilities QC \nCommunity System \n14 \n \n41 \n \n Appendix C \nQuality Management System Structure \n \nDBHDD Executive Quality Council \n \nHospital System QC \nDD QC \n \nBH QC \n \nState Hospitals (MH \u0026 DD) \nKey Performance Measures \u0026 Quality Improvement Activity \nReports \n \nDD Program Key Performance Measures \u0026 Quality Improvement Activity \nReports \n \nBehavioral Health Program \nKey Performance Measures \u0026 Quality Improvement Activity \nReports \n \n42 \n \n Appendix D \n \nHospital System Dashboard \n \nNRI Continuing Care Plan Created--Overall Numerator Denominator \n \nJan-12 \n \n520 \n \n571 \n \nFeb-12 \n \n541 \n \n575 \n \nMarch-12 April-12 May-12 June-12 July-12 Aug-12 Sept-12 Oct-12 Nov-12 Dec-12 \n \n619 \n \n630 \n \n543 \n \n559 \n \n570 \n \n581 \n \n527 \n \n559 \n \n311 \n \n322 \n \n343 \n \n350 \n \n297 \n \n301 \n \n253 \n \n279 \n \n109 \n \n112 \n \n520 \n \n571 \n \n% \n91% \n94% \n98% 97% 98% 94% 97% 98% 99% 91% 97% 91% \n \n43 \n \n Appendix D \n \nHospital System Dashboard \n \nIRP-Quality \nJan-12 \nFeb-12 \nMarch-12 April-12 May-12 June-12 July-12 Aug-12 Sept-12 Oct-12 Nov-12 Dec-12 \n \nNumerator Denominator \n \n% \n \n1466 \n \n2833 \n \n52% \n \n1846 \n \n2983 \n \n62% \n \n2181 2538 1751 2175 2113 2234 2428 2065 1669 \nIn Process \n \n3168 3465 2433 2770 2687 2858 2986 2470 1988 \nIn Process \n \n69% 73% 72% 79% 79% 78% 81% 84% 84% \nIn Process \n \n44 \n \n Appendix D \n \nHospital System Dashboard \n \nInpatient Consumer Survey-Outcomes \nJan-12 \nFeb-12 \nMarch-12 April-12 May-12 June-12 July-12 Aug-12 Sept-12 Oct-12 Nov-12 Dec-12 \n \nNumerator Denominator \n \n% \n \nNA \n \nNA \n \nNA \n \n132 \n \n195 \n \n68% \n \n125 116 115 156 139 \n97 61 70 37 In Process \n \n164 152 151 209 178 126 \n87 88 46 In Process \n \n76% 76% 76% 75% 78% 77% 70% 80% 80% In Process \n \n45 \n \n Appendix D \n \nHospital System Dashboard \n \nInpatient Consumer \n \nSurvey-Empowerment Numerator Denominator \n \n% \n \nJan-12 \n \nNA \n \nNA \n \nNA \n \nFeb-12 \n \n95 \n \n135 \n \n70% \n \nMarch-12 April-12 May-12 June-12 July-12 Aug-12 Sept-12 Oct-12 Nov-12 Dec-12 \n \n89 82 117 164 136 99 62 72 36 In Process \n \n113 \n \n79% \n \n102 \n \n80% \n \n149 \n \n79% \n \n208 \n \n79% \n \n177 \n \n77% \n \n129 \n \n77% \n \n87 \n \n71% \n \n88 \n \n82% \n \n45 \n \n80% \n \nIn Process In Process \n \n46 \n \n Appendix E \nCommunity Behavioral Health Dashboard \nAll Community Behavioral Health KPI data is specific to adult consumers. The statistical data contained within Appendix D is accurate as of 1/25/2013. \nHousing Stability \nPercent of GHVP individuals in stable housing (greater than 6 months) Target (77%) \n100% \n90% \n80% \n70% \nService Delivery Month \n \nPercent of Individuals Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n% of GHVP individuals in stable housing \n \nConsumers \n \n\u003e 6 months \n \nNumerator Denominator % \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \n19 \n \n215 \n \n91% \n \n23 \n \n205 \n \n89% \n \n27 \n \n226 \n \n88% \n \n27 \n \n243 \n \n89% \n \n33 \n \n272 \n \n88% \n \n38 \n \n313 \n \n88% \n \n42 \n \n380 \n \n89% \n \n42 \n \n468 \n \n91% \n \n47 \n \n526 \n \n91% \n \n47 \n \n583 \n \n92% \n \n47 \n \n Appendix E \n \nPercent of Individuals Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nCommunity Behavioral Health Dashboard \n \nHousing Stability Con't \nPercent of GHVP individuals who left stable housing (reengaged/reassigned vouchers as indicated) where possible \nTarget (10%) \n50% 45% 40% 35% 30% 25% 20% 15% 10% 5% \nService Delivery Month \n \n% of GHVP individuals who left stable housing reengaged/reassigned where \n \nConsumers \n \npossible \n \nNumerator Denominator % \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \n12% \n \nN/A \n \nN/A \n \n12% \n \nN/A \n \nN/A \n \nN/A \n \n7 \n \n31 \n \n23% \n \n8 \n \n38 \n \n21% \n \n14 \n \n47 \n \n30% \n \n14 \n \n51 \n \n27% \n \n16 \n \n55 \n \n27% \n \n16 \n \n63 \n \n25% \n \n15 \n \n90 \n \n17% \n \n48 \n \n Appendix E \n \nPercent of Providers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nCommunity Behavioral Health Dashboard \n \nSupported Employment \nPercent of adult mental health S.E. providers that meet caseload average of staff to consumer (ratio of 1:20) Target (85%) \n100% 90% 80% 70% 60% 50% 40% \n \nService Delivery Month \n \n% of adult mental health S.E. providers that meet caseload average of staff to \nconsumer ratio 1:20 \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nProvider - Sites \n \nNumerator Denominator \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n3 \n \n17 \n \n23 \n \n18 \n \n23 \n \n17 \n \n23 \n \n18 \n \n23 \n \n17 \n \n23 \n \n19 \n \n23 \n \n% \n100% 100% 100% 100% 100% 100% 73.9% 78.3% 73.9 % 78.3% 73.9% 82.6 \n \n49 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \nSupported Employment Con't \nUnduplicated individuals 1st contact with an employer (within 30 days of enrollment) Target (50%) \n100% \n80% \n60% \n40% Oct-Dec 11 Jan-Mar 12 April-June 12 July-Sep 12 \n \nPercent of individuals, 1st contact within 30 days \n \n% of unduplicated individuals 1st contact with an employer within 30 days of \n \nConsumers \n \nenrollment \n \nNumerator Denominator % \n \nOct-Dec 11 Jan-Mar 12 April-June 12 July-Sep 12 \n \n17 \n \n32 \n \n53.1% \n \n55 \n \n85 \n \n64.7% \n \n42 \n \n55 \n \n76.4% \n \n100 \n \n142 \n \n70.4% \n \n50 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nPercent of Consumers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nAssertive Community Treatment \nThe percent of ACT consumers who are enrolled within 3 days of referral Target (70%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% \nService Delivery Month \n \n% of ACT consumers enrolled within 3 days of referral \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator \n27 36 43 47 33 55 69 62 56 141 157 109 \n \nDenominator \n73 93 94 107 114 87 108 104 92 196 223 179 \n \n% \n37.0% 38.7% 45.7% 45.7% 28.9% 63.2% 63.9% 59.6% 60.9% 71.9% 70.4% 60.9% \n \n51 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nAssertive Community Treatment Con't \nPercent of ACT consumers admitted to a Psychiatric Hospital (within the past month) Target (7%) \n10% 8% 6% 4% 2% 0% \n \nPercent of Admissions Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nService Delivery Month \n \n% of ACT consumers admitted to a Psychiatric hospital within the past \n \nConsumers \n \nmonth \n \nNumerator Denominator % \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n92 \n \n926 \n \n9.9% \n \n65 \n \n1024 \n \n6.3% \n \n73 \n \n1116 \n \n6.5% \n \n89 \n \n1175 \n \n7.6% \n \n80 \n \n1238 \n \n6.5% \n \n87 \n \n275 \n \n6.8% \n \n102 \n \n1771 \n \n6.6% \n \n111 \n \n1699 \n \n7.4% \n \n99 \n \n1405 \n \n7.0% \n \n81 \n \n1191 \n \n6.8% \n \n85 \n \n1170 \n \n7.3% \n \n82 \n \n1169 \n \n7.0% \n \n52 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nAssertive Community Treatment Con't \nAverage # of jail/prison days utilized (per enrolled ACT consumer) Target (1.0 days) \n2 \n \nAverage number of jail/prison days Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n1 \n \n0 Service Delivery Month \n \nAverage # of jail/prison days utilized per enrolled ACT consumer \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator Denominator % \n \n669 \n \n926 \n \n0.722 \n \n653 \n \n1024 \n \n0.638 \n \n910 \n \n1116 \n \n0.815 \n \n1050 \n \n1175 \n \n0.894 \n \n896 \n \n1238 \n \n0.724 \n \n713 \n \n1275 \n \n0.559 \n \n1786 \n \n1771 \n \n1.008 \n \n1624 \n \n1699 \n \n0.956 \n \n1459 \n \n1651 \n \n0.884 \n \n1605 \n \n1555 \n \n1.032 \n \n1435 \n \n1674 \n \n0.857 \n \n959 \n \n1387 \n \n0.691 \n \n53 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nIntensive Case Management \nPercent of ICM consumers with a Psychiatric Inpatient Admission \n(within the past month) Target (10%) or less \n \nPercent of Consumers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n15% 10% 5% 0% \n \nService Delivery Month \n \n% of ICM consumers with a Psychiatric inpatient admission within the past \n \nConsumers \n \nmonth \n \nNumerator Denominator % \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nN/A \n \nN/A \n \n2.4% \n \nN/A \n \nN/A \n \n2.6% \n \n5 \n \n192 \n \n2.6% \n \n5 \n \n173 \n \n2.9% \n \n1 \n \n157 \n \n0.6% \n \n3 \n \n163 \n \n1.8% \n \n5 \n \n102 \n \n3.4% \n \n5 \n \n170 \n \n2.9% \n \n10 \n \n217 \n \n4.6% \n \n0 \n \n207 \n \n0.0% \n \n8 \n \n231 \n \n3.5% \n \n6 \n \n215 \n \n2.8% \n \n54 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \nIntensive Case Management Con't \nPercent of ICM consumers housed (non homeless) \n(within the past month) Target (90%) \n100% \n \nPercent of Consumers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n90% \n \n80% Service Delivery Month \n \n% of ICM consumers housed (non homeless) within the past month \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator Denominator % \n \n172 \n \n185 \n \n93.0% \n \n163 \n \n175 \n \n93.1% \n \n165 \n \n172 95.9% \n \n147 \n \n165 \n \n89.1% \n \n144 \n \n155 92.6% \n \n112 \n \n125 \n \n89.6% \n \n147 \n \n155 94.8% \n \n160 \n \n177 90.4% \n \n191 \n \n217 88.0% \n \n190 \n \n207 91.8% \n \n214 \n \n231 92.6% \n \n206 \n \n215 95.8% \n \n55 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nIntensive Case Management Con't \nAverage # of jail/prison days utilized \n(per enrolled ICM consumer) Target (0.50 days) \n1 \n \nAverage number of jail/prison days Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n0 Service Delivery Month \n \nAverage # of jail/prison days utilized per enrolled ICM consumer \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator \n46 44 6 28 6 23 1 10 43 63 11 56 \n \nDenominator % \n \n185 \n \n0.249 \n \n175 \n \n0.251 \n \n192 \n \n0.031 \n \n173 \n \n0.162 \n \n157 \n \n0.038 \n \n163 \n \n0.067 \n \n199 \n \n0.043 \n \n232 \n \n0.043 \n \n242 \n \n0.178 \n \n278 \n \n0.227 \n \n267 \n \n0.041 \n \n281 \n \n0.199 \n \n56 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nCommunity Support Teams \nPercent of CST consumers with a Psychiatric Inpatient Admission (within the past month) Target (10%) or less \n30% \n \nPercent of Consumers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n20% 10% \n \n0% \n \nService Delivery Month \n \n% of CST consumers with a Psychiatric inpatient admission \nwithin the past month \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nNumerator \nN/A N/A \n3 3 1 4 1 1 5 3 5 7 \n \nConsumers \nDenominator \nN/A N/A 62 63 45 26 11 11 18 38 42 67 \n \n% \n4.8% 4.8% 4.8% 4.8% 2.2% 15.4% 9.1% 9.1% 27.8% 7.9% 11.9% 10.4% \n \n57 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nCommunity Support Teams Con't \nPercent of CST consumers housed (non homeless) (within the past month) Target (90%) \n100% \n \nPercent of Consumers Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n90% \n \n80% Service Delivery Month \n \n% of CST consumers housed (non homeless) within the past month \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator \n61 61 61 44 26 24 11 18 33 47 65 75 \n \nDenominator \n62 62 61 44 26 24 11 18 35 48 65 75 \n \n% \n98.4% 98.4% 100.0 % 100.0% 100.0% 100.0% 100.0% 100.0% 94.3% 97.9% 100% 100% \n \n58 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nAverage number of Jail/Prison days utilized Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nCommunity Support Teams Con't \nAverage # of jail/prison days utilized \n(per enrolled CST consumer) Target (0.75 days) \n2.25 \n1.5 \n0.75 \n0 \nService Delivery Month \n \nAverage # of jail/prison days utilized \n \nConsumers \n \nper enrolled CST consumer \n \nNumerator Denominator \n \n% \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n124 \n \n62 \n \n2.000 \n \n116 \n \n62 \n \n1.871 \n \n46 \n \n62 \n \n0.742 \n \n27 \n \n63 \n \n0.429 \n \n0 \n \n45 \n \n0.000 \n \n0 \n \n26 \n \n0.000 \n \n21 \n \n11 \n \n1.909 \n \n0 \n \n18 \n \n0.000 \n \n0 \n \n46 \n \n0.000 \n \n0 \n \n58 \n \n0.000 \n \n0 \n \n80 \n \n0.000 \n \n18 \n \n99 \n \n0.182 \n \n59 \n \n Appendix E \n \nPercent of Consumers \n \nCommunity Behavioral Health Dashboard \nCase Management \nPercent of CM consumers with a Psychiatric Inpatient Admission (within the past month) Target (10%) or less \n20% \n \n10% \n \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 \nJul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \n0% \n \nService Delivery Month \n \n% of CM consumers with a Psychiatric inpatient admission \nwithin the past month \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator Denominator % \n \nN/A \n \nN/A \n \n2.5% \n \nN/A \n \nN/A \n \n1.7% \n \n1 \n \n129 \n \n0.8% \n \n3 \n \n90 \n \n3.3% \n \n3 \n \n141 \n \n2.1% \n \n0 \n \n148 \n \n0.0% \n \n5 \n \n132 \n \n3.8% \n \n4 \n \n177 \n \n2.3% \n \n8 \n \n238 \n \n3.4% \n \n12 \n \n303 \n \n4.0% \n \n15 \n \n347 \n \n4.3% \n \n9 \n \n406 \n \n2.2% \n \n60 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nMonthly Program Reports Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nCase Management Con't \nPercent of CM consumers housed (non homeless) (within the past month) Target (90%) \n100% \n \n90% \n \n80% \n \nService Delivery Month \n \n% of CM consumers housed (non homeless) within the past month \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator \n84 109 128 70 130 132 183 219 263 268 404 426 \n \nDenominator \n84 109 128 70 130 133 188 223 269 273 416 433 \n \n% \n100.0% 100.0% 100.0% 100.0% 100.0% 99.2% 97.3% 98.2% 97.8% 98.2% 97.1% 98.4% \n \n61 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nAverage number of jail/prison days utilized Jan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nCase Management Con't \nAverage # of jail/prison days utilized (per enrolled CM consumer) Target (0.25 days) \n0.75 \n \n0.5 \n \n0.25 \n \n0 \n \nService Delivery Month \n \nAverage # of jail/prison days utilized per enrolled CM consumer \nJan-12 Feb-12 Mar-12 Apr-12 May-12 Jun-12 Jul-12 Aug-12 Sep-12 Oct-12 Nov-12 Dec-12 \n \nConsumers \n \nNumerator \nN/A N/A N/A N/A N/A N/A \n1 43 99 126 133 215 \n \nDenominator % \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \nN/A \n \n223 \n \n0.004 \n \n261 \n \n0.165 \n \n326 \n \n0.304 \n \n402 \n \n0.313 \n \n469 \n \n0.284 \n \n525 \n \n0.410 \n \n62 \n \n Appendix E \n \nPercent of Consumers \n \nCommunity Behavioral Health Dashboard \n \nAddictive Disease \nPercent of Adult consumers who abstain from use or experience reduction in use (while in treatment) Target (40%) \n \n100% \n \n90% \n \n80% \n \n70% \n \n60% \n \n50% \n \n40% \n \n30% \n \n20% \n \n10% \n \n0% \n \nFY09 \n \nFY10 \n \nFY11 \n \nFY12 \n \nFiscal Year \n \n% of adult consumers who abstain from use or experience reduction in \nuse while in treatment \n \nConsumers Numerator Denominator % \n \nFY 09 \n \nN/A \n \nN/A \n \n38.0% \n \nFY 10 \n \n11,017 \n \n28,853 38.0% \n \nFY 11 \n \n9,782 \n \n24,656 39.7% \n \nFY 12 \n \n10,457 \n \n23,455 45.0% \n \n63 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nAddictive Disease Con't \nPercent of Youth consumers who abstain from use or experience reduction in use (while in treatment) Target (56%) \n \nPercent of Consumers \n \n100% \n \n90% 80% \n \n70% 60% \n \n50% \n \n40% 30% \n \n20% 10% \n \n0% \n \nFY09 \n \nFY10 \n \nFY11 \n \nFY12 \n \nFiscal Year \n \n% of youth consumers who abstain from use or experience reduction in \n \nConsumers \n \nuse while in treatment \n \nNumerator Denominator % \n \nFY 09 \n \nN/A \n \nN/A \n \n45.0% \n \nFY 10 \n \n716 \n \n1,334 \n \n54.0% \n \nFY 11 \n \n595 \n \n1,067 \n \n56.0% \n \nFY 12 \n \n329 \n \n571 \n \n58.0% \n \n64 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \nCustomer Satisfaction Survey \nPercent of individuals receiving ADA services and that are satisfied with the services they are receiving Target (90%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% 10% 0% \nApril-Sep 2012 \n \nPercent of Individuals \n \n% of individuals receiving ADA services and that are satisfied with \nthe services they are receiving \n \nConsumers Numerator Denominator % \n \nApril-Sep 2012 \n \n40 \n \n51 80.4% \n \nOctober-March 2012/2013 \n \nN/A \n \nN/A \n \nN/A \n \n65 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nCustomer Satisfaction Survey Con't \nPercent of individuals receiving ADA services which feel their quality of life has improved as a result of services Target (90%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% 10% 0% \nApril-Sep 2012 \n \nPercent of Individuals \n \n% of individuals receiving ADA \n \nservices which feel their quality of \n \nConsumers \n \nlife has improved as a result of \n \nservices \n \nNumerator Denominator % \n \nApril-Sep 2012 \n \n40 \n \n50 \n \n80.0% \n \nOctober-March 2012/2013 \n \nN/A \n \nN/A \n \nN/A \n \n66 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nCustomer Satisfaction Survey Con't \nPercent of individuals receiving ADA services which feel the location of service is convenient for them Target (90%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% 10% 0% \nApril-Sep 2012 \n \nPercent of Individuals \n \n% of individuals receiving ADA services which feel the location of \n \nConsumers \n \nservice is convenient for them \n \nNumerator Denominator % \n \nApril-Sep 2012 \n \n44 \n \n52 \n \n84.6% \n \nOctober-March 2012/2013 \n \nN/A \n \nN/A \n \nN/A \n \n67 \n \n Appendix E \n \nCommunity Behavioral Health Dashboard \n \nCustomer Satisfaction Survey Con't \nPercent of individuals receiving ADA services which feel staff treats them with respect Target (90%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% 10% 0% \nApril-Sep 2012 \n \nPercent of Individuals \n \n% of individuals receiving ADA services which feel staff treats \nthem with respect \n \nConsumers Numerator Denominator % \n \nApril-Sep 2012 \n \n45 \n \n52 \n \n86.5% \n \nOctober-March 2012/2013 \n \nN/A \n \nN/A \n \nN/A \n \n68 \n \n Appendix E \n \nPercent of Individuals \n \nCommunity Behavioral Health Dashboard \nCustomer Satisfaction Survey Con't \nPercent of individuals receiving ADA services which state they regularly discuss goals with staff Target (90%) \n100% 90% 80% 70% 60% 50% 40% 30% 20% 10% 0% \nApril-Sep 2012 \n \n% of individuals receiving ADA services which state they regularly \n \nConsumers \n \ndiscuss goals with staff \n \nNominator Denominator % \n \nApril-Sep 2012 \n \n39 \n \n50 \n \n78.0% \n \nOctober-March 2012/2013 \n \nN/A \n \nN/A \n \nN/A \n \n69 \n \n Appendix F \nCommunity Developmental Disabilities Dashboard \nData includes community consumers as well as the \"target population\". Data is reported for State Fiscal Year 2012 (July 1, 2011 through June 30, 2012) \nNumber of Individuals Transitioned from a State Hospital to the Community \n \n% of DD Transition Goal that was Met FY12 (Cumulative) \nJ July 1, 2011 through September 30, 2011 July 1, 2011 through December 31, 2011 July 1, 2011 through March 31, 2012 J July1, 2011 through June 30, 2012 \n \nConsumers \n \nNominator \n \nDenominator \n \n% \n \n29 \n \n150 \n \n19.4% \n \n53 \n \n150 \n \n35.4% \n \n102 \n \n150 \n \n68% \n \n165 \n \n150 \n \n100.1% \n \n70 \n \n Appendix F \n \nCommunity Developmental Disabilities Dashboard \n \nNumber of Individuals already residing in the community newly enrolled in Waiver Services \n \n% of DD Community Waiver Goal that was \n \nConsumers \n \nMet FY12 (Cumulative) \n \nNominator Denominator % \n \nJuly 1, 2011 through September 30, 2011 100 \n \n100 \n \n100% \n \nJuly 1, 2011 through December 31, 2011 200 \n \n100 \n \n200% \n \nJuly 1, 2011 through March 31, 2012 \n \n490 \n \n100 \n \n490% \n \nJuly1, 2011 through June 30, 2012 \n \n625 \n \n100 \n \n625% \n \n71 \n \n Appendix F \n \nCommunity Developmental Disabilities Dashboard \n \nNumber of Families/Individuals Receiving Family Support Services \n \nDD Family Support Goal that was Met FY12 \n(Cumulative) July 1, 2011 through September 30, 2011 \nJuly 1, 2011 through December 31, 2011 \nJuly 1, 2011 through March 31, 2012 \nJuly1, 2011 through June 30, 2012 \n \nConsumers \n \nNominator \n \nDenominator \n \n2136 \n \n450 \n \n2617 \n \n450 \n \n2935 \n \n450 \n \n3287 \n \n450 \n \n72 \n \n Appendix F \n \nCommunity Developmental Disabilities Dashboard \n \nNumber of DD Mobile Crisis Team Dispatches \n \n% of DD Mobile Crisis Team Dispatch Goal \n \nthat was Met FY12 \n \n(Cumulative) \n \nNominator \n \nConsumers Denominator \n \nJuly 1, 2011 through September 30, 2011 \n \n192 \n \n300 \n \nJuly 1, 2011 through December 31, 2011 \n \n388 \n \n300 \n \nJuly 1, 2011 through March 31, 2012 \n \n598 \n \n300 \n \nJuly1, 2011 through June 30, 2012 \n \n825 \n \n300 \n \n% 64% 100.0% 199.3% 275% \n \n73 \n \n Appendix F \n \nIndividual Support Plans \n \nThe Individual Support Plan (ISP) Quality Assurance (QA) Checklist is used in both Person Center Reviews (PCR) and Quality Enhancement Provider Reviews (QEPR). The ISP QA Checklist was developed by the Division of Developmental Disabilities to assess support plans. When completing the checklist, Delmarva Quality Improvement Consultants (QICs) determine the extent to which support plans are written to help individuals maintain a life in their communities, as they indicate. An overall rating is given to each support plan reviewed by Delmarva Quality Improvement Consultants, based upon how well the support plan is written to provide a meaningful life for the individual receiving services. An ISP can be written to support a Service Life, a Good by Paid Life, or a Community Life. Criteria used on this rating are based on definitions from the Good-to-Great (G2G)/Person-Centered Organizations. \n1. Service Life: The ISP supports a life with basic paid services and paid supports. The person's needs that are \"important for\" the person are addressed, such as health and safety. However, there is not an organized effort to support a person in obtaining other expressed desires that are \"important to\" the person, such as getting a driver's license, having a home, or acting in a play. The individual is not connected to the community and has not developed social roles, but expresses a desire to do so. \n2. Good but Paid Life: The ISP supports a life with connections to various supports and services (paid and non-paid). Expressed goals that are \"important to\" the person are present, indicating the person is obtaining goals and desires beyond basic health and safety needs. The person may go out into the community but with only limited integration into community activities. For example, the person may go to church or participate in Special Olympics. However, real community connections are lacking and the person indicates he or she wants to achieve more. \n3. Community Life: The ISP supports a life with the desired level of integration in the community and in various settings preferred by the person. The person has friends and support beyond providers and family members. The person has developed social roles that are meaningful to that person, such as belonging to a Red Hat club or a book club or having employment in a competitive rather than segregated environment. Rather than just going to church the person may be an usher at the church or sing in the choir. Relationships developed in the community are reciprocal. The ISP is written with goals that help support people in moving toward a Community Life, as the person chooses. \n \n74 \n \n The following tables and graphs show the distribution of the overall ratings by contract years. 75 \n \n "}],"pages":{"current_page":1,"next_page":null,"prev_page":null,"total_pages":1,"limit_value":10,"offset_value":0,"total_count":4,"first_page?":true,"last_page?":true},"facets":[{"name":"type_facet","items":[{"value":"Text","hits":4}],"options":{"sort":"count","limit":16,"offset":0,"prefix":null}},{"name":"creator_facet","items":[{"value":"Georgia Collaborative ASO","hits":2},{"value":"Georgia. Department of Behavioral Health and Developmental Disabilities. Office of Quality Management","hits":1},{"value":"Georgia. Department of Behavorial Health and Developmental Disabilities","hits":1}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"subject_facet","items":[{"value":"Annual reports","hits":2},{"value":"Developmentally disabled--Services for--Evaluation","hits":2},{"value":"Developmentally disabled--Services for--Georgia--Evaluation--Periodicals","hits":2},{"value":"Evaluation","hits":2},{"value":"Georgia","hits":2},{"value":"Georgia Collaborative ASO--Evaluation--Periodicals","hits":2},{"value":"Mental health services--Evaluation","hits":2},{"value":"Mental health services--Georgia--Evaluation--Periodicals","hits":2},{"value":"Periodicals","hits":2},{"value":"Developmental disabilities--United States--Georgia","hits":1},{"value":"Developmentally disabled Services for--Georgia--Evaluation","hits":1}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"location_facet","items":[{"value":"United States, Georgia, 32.75042, -83.50018","hits":4}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"year_facet","items":[{"value":"2012","hits":1},{"value":"2016","hits":1},{"value":"2017","hits":1},{"value":"2018","hits":1}],"options":{"sort":"count","limit":100,"offset":0,"prefix":null},"min":"2012","max":"2018","count":4,"missing":0},{"name":"medium_facet","items":[{"value":"annual reports","hits":2},{"value":"reports","hits":2},{"value":"state government records","hits":1}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"fulltext_present_b","items":[{"value":"true","hits":4}],"options":{"sort":"count","limit":100,"offset":0,"prefix":null}},{"name":"rights_facet","items":[{"value":"http://rightsstatements.org/vocab/InC/1.0/","hits":4}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"collection_titles_sms","items":[{"value":"Georgia Government Publications","hits":4}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"serial_titles_sms","items":[{"value":"Georgia Collaborative ASO ... quality management annual report","hits":4},{"value":"Georgia Collaborative Administrative Services Organization ... quality management annual report","hits":4},{"value":"Quality management annual report.","hits":4}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"provenance_facet","items":[{"value":"University of Georgia. Map and Government Information Library","hits":4}],"options":{"sort":"count","limit":11,"offset":0,"prefix":null}},{"name":"call_numbers_sms","items":[{"value":"B400 .A15","hits":4},{"value":"B400 .A15 2012","hits":1},{"value":"B400 .A15 2016","hits":1},{"value":"B400 .A15 2017","hits":1},{"value":"B400 .A15 2018","hits":1}],"options":{"sort":"count","limit":100,"offset":0,"prefix":null}},{"name":"class_name","items":[{"value":"Item","hits":4}],"options":{"sort":"count","limit":100,"offset":0,"prefix":null}},{"name":"geojson","items":[{"value":"{\"type\":\"Feature\",\"geometry\":{\"type\":\"Point\",\"coordinates\":[-83.50018, 32.75042]},\"properties\":{\"placename\":\"United States, Georgia\"}}","hits":4}],"options":{"sort":"index","limit":-2,"offset":0,"prefix":null}},{"name":"placename","items":[{"value":"United States, Georgia","hits":4}],"options":{"sort":"count","limit":100,"offset":0,"prefix":null}}]}}