@misc{methods_lab_sample_2018, title = {Sample criteria to select case studies for evaluation}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10043.pdf}, abstract = {Time and budget constraints often mean that organisations are unable to evaluate all of their programmes, and large programmes, operating in multiple locations, are unable to evaluate all project sites. This tool introduces two sets of criteria to support evaluators and programme managers to select case studies or programmes for evaluation: i) information about how relevant or feasible evaluation is for individual programmes, and ii) across the overall portfolio, strategic thinking around what types of cases are most important to understand. This tool was developed by Anne Buffardi, Irene Guijt, Simon Hearn and Tiina Pasanen for use in The Methods Lab projects.}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2018}, } @misc{prieto_martin_interview_2017, title = {Interview protocol: {Most} significant turning points}, url = {https://www.academia.edu/34704208}, abstract = {This interview protocol was used for a research project on adaptiveness in technology for governance initiatives in Kenya. For more information, please read the research report at: Prieto Martin, P.; Hernandez, K.; Faith, B. and Ramalingam, B. (2017) Doing Digital Development Differently: Lessons in adaptive management from technology for governance initiatives in Kenya, MAVC Research Report, Brighton: Institute of Development Studies, ids.ac.uk/project/making-all-voices-count}, urldate = {2017-09-28}, publisher = {Institute of Development Studies}, author = {Prieto Martin, Pedro and Faith, Becky}, month = oct, year = {2017}, keywords = {IMPORTANT}, } @misc{usaid_cla_2016, title = {{CLA} {Maturity} {Tool}: {Example} {Spectrum} {Cards}}, url = {https://usaidlearninglab.org/sites/default/files/resource/files/subcomponent_card_examples_11x17_20171212.pdf}, urldate = {2018-03-09}, author = {{USAID}}, month = oct, year = {2016}, } @misc{mercy_corps_adaptive_2016, title = {Adaptive {Management} {Self}-assessment tool}, url = {https://www.mercycorps.org/sites/default/files/2020-05/ADAPT_Self_Assessment.pdf}, abstract = {The Adaptive Management self-assessment tool has been designed to help teams assess the extent to which they have a supportive environment for adaptive management within their country program. The self-assessment tool helps you think about five different areas that have been identified as important for supporting adaptive management: 1. Culture \& leadership 2. Dynamic teams 3. Appropriate analysis 4. Responsive implementation \& operations 5. Enabling environment (for example donor funding and relationships)}, publisher = {Mercy Corps}, author = {mercy Corps}, year = {2016}, } @misc{office_of_the_prime_minister_minister_2015, title = {Minister of {International} {Development} and {La} {Francophonie} {Mandate} {Letter} ({November} 12, 2015)}, url = {https://pm.gc.ca/eng/minister-international-development-and-la-francophonie-mandate-letter}, abstract = {Dear Minister:I am honoured that you have agreed to serve Canadians as Minister of International Development and La Francophonie. You will be part of a strong team of ministers led by the Minister of Foreign Affairs.}, language = {en}, urldate = {2019-05-30}, author = {{Office of the Prime Minister}}, month = nov, year = {2015}, } @misc{methods_lab_guidance_2015, title = {Guidance on tasks and deliverables for different evaluation phases}, url = {https://www.odi.org/sites/odi.org.uk/files/resource-documents/10646.pdf}, abstract = {This tool describes the five key phases of evaluation, from planning and design, to implementation and communication of results. It provides a list of the main tasks and deliverables for each phase, intended for use by anyone managing an impact evaluation. This tool was developed by Irene Guijt, Simon Hearn, Tiina Pasanen and Patricia Rogers for use in Methods Lab projects. It follows to some extent the BetterEvaluation Rainbow Framework.}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{methods_lab_guiding_2015, title = {Guiding questions to help narrow the scope of an evaluation}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10038.pdf}, abstract = {Time and budget constraints can mean that programmes are not able to assess all possible evaluation questions; this is especially true for multi-component or multi-site programmes operating in challenging environments. This tool identifies areas of enquiry to help programmes prioritise the number of questions and measurement indicators used. This tool was developed by Anne Buffardi for use in in Methods Lab projects.}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{methods_lab_report_2015, title = {Report template on integrating impact into an existing monitoring and evaluation system}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10037.pdf}, abstract = {Many development programme staff will commission an impact evaluation towards the end of a project or programme, only to find that the monitoring system did not provide adequate data about implementation, context, baselines or interim results. This tool provides a template outline for a report making recommendations on how to integrate a focus on impact into a programme’s existing monitoring and evaluation system, as the programme moves into a new phase. This template was developed by Anne Buffardi and Tiina Pasanen for use in Methods Lab projects.}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{methods_lab_report_2015, title = {Report templates for evaluability assessment}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10036.pdf}, abstract = {An evaluability assessment aims to assess the extent to which, and how best, a project can be evaluated in a reliable and credible fashion. These templates are intended to help anyone conducting an evaluability assessment to structure the final report. This tool was developed by Anne Buffardi and Bronwen McDonald for use in Methods Lab projects. It accompanies The Methods Lab publication ‘Evaluability assessment for impact evaluation: guidance, checklists and decision support’.}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{methods_lab_sample_2015, title = {Sample agendas for an evaluability assessment stakeholder workshop}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10042.pdf}, abstract = {An evaluability assessment aims to assess the extent to which, and how best, an intervention can be evaluated in a reliable and credible fashion. These sample agendas are intended for people convening key stakeholders (such as project implementation staff and managers, donors and government officials) to discuss the purpose and scope of an impact evaluation and to identify key evaluation questions. This tool was developed by Bronwen McDonald, Anne Buffardi and Irene Guijt for use in Methods Lab projects. It accompanies the Methods Lab publication ‘Evaluability assessment for impact evaluation: guidance, checklists and decision support’.}, urldate = {2018-10-11}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{methods_lab_sample_2015, title = {Sample interview questions for evaluability assessment}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10032.pdf}, urldate = {2018-11-10}, author = {Methods Lab}, month = nov, year = {2015}, } @misc{methods_lab_template_2015, title = {Template concept note for an impact evaluation}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10040.pdf}, urldate = {2018-11-10}, author = {{Methods Lab}}, month = nov, year = {2015}, } @misc{buffardi_10_2015, title = {10 {Things} to {Know} {About} {Evaluation}}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/9685.pdf}, abstract = {Evaluation is essential to good development. But there are still many myths and misconceptions about what it is - and how it should be used. ODI's Research and Policy in Development Programme (RAPID) has many years' experience supporting evaluation in complex development contexts. In support of the International Year of Evaluation 2015, we've put together our essential 'things to know' about evaluation in 10 infographics. Available in English and French.}, urldate = {2018-11-10}, publisher = {RAPID}, author = {Buffardi, Anne and Hearn, Simon and Pasanen, Tiina and Price, clare and Ball, Louise}, month = jun, year = {2015}, } @misc{halloran_navigating_2015, title = {Navigating the {Evidence} on {Transparency}, {Participation} and {Accountability}: {What} {Insights} {Have} {Emerged}? {What} {Gaps} {Remain}? - {Terms} of reference for the {Consultant} {Author}(s)}, abstract = {Example of Terms of Reference for a Report on TAP.}, author = {Halloran, Brendan}, year = {2015}, } @misc{lim_reflections_2010, title = {Reflections on the {Utilization}-{Focused} {Evaluation} ({UFE}) {Process}}, url = {https://evaluationinpractice.files.wordpress.com/2010/12/sirca_conclave-2010-presentation-3_yl.pdf}, abstract = {This presentation from the Strengthening ICTD Research Capacity in Asia (SIRCA) provides an overview of how UFE was used in their SIRCA programme. It was presented at the Evaluation Conclave 2010, New Delhi, India The key objectives of the program are to: Enhance research capacity in Asia through rigorous academic research Create a space for dialogue on ICT4D social science research issues in Asia Create linkages through a mentorship program Disseminate findings in publications and conferences Contents SIRCA Programme SIRCA Key Objectives SIRCA Evaluation UFE Learnings UFE Challenges Evaluation is over…but there’s a lasting outcome...}, urldate = {2018-10-22}, publisher = {Strengthening ICTD Research Capacity in Asia (SIRCA) Programme}, author = {Lim, Yvonne and Mizumoto, Ann}, month = oct, year = {2010}, }