@techreport{aston_process_2023, address = {Brighton}, title = {Process {Tracing} {Innovations} in {Practice}: {Finding} the {Middle} {Path}}, copyright = {http://creativecommons.org/licenses/by-nc/4.0/}, shorttitle = {Process {Tracing} {Innovations} in {Practice}}, url = {https://opendocs.ids.ac.uk/opendocs/handle/20.500.12413/17913}, abstract = {Evaluation practitioners in the international development sector have given considerable attention in recent years to process tracing as a method for evaluating impact, including discussion of how to assess the relative importance of causal factors. Despite the increasing interest, there is a relative dearth of examples of practical learning and evidence of applying process tracing in practice. This CDI Practice Paper draws on comparative learning from applying three different types of process tracing in international development initiatives. It argues in favour of a ‘middle path’ of applying evidence tests and rubrics to structure evaluative judgements rather than formal Bayesian updating or looser forms of process tracing. It also calls attention to the potential added value of taking a participatory approach, offering practical recommendations for how to do this effectively.}, language = {en}, number = {25}, urldate = {2023-03-28}, institution = {Institute of Development Studies}, author = {Aston, Thomas and Wadeson, Alix}, month = mar, year = {2023}, note = {Accepted: 2023-03-27T15:20:09Z Publisher: Institute of Development Studies}, } @techreport{gokhale_adaptive_2023, address = {Boston}, title = {Adaptive {Evaluation}: {A} {Complexity}-based approach to {Systematic} {Learning} for {Innovation} and {Scaling} in {Development}}, abstract = {Nearly all challenges in international development tend to be complex because they depend on constantly evolving human behaviour, systems, and contexts, involving multiple actors, entities, and processes. As a result, both the discovery and scaling of innovations to address challenges in development often involve changes in system behaviour or even system-level transformation. This is rarely a linear process over time and can result in unexpected outcomes. Existing evaluation techniques commonly used in international development, including Randomized Control Trials (RCT) and quasi-experimental methods, are good at assessing specific effects of interventions but are not designed for the change processes inherent to innovation and scaling within a system. There is a need to reconstruct how we use existing measurement tools, techniques, and methodologies so that they capture the complexity of the environment in which an intervention or change occurs. We introduce Adaptive Evaluation, designed to learn at various levels of complexity while supporting the transformation needed to foster sustainable change. An Adaptive Evaluation uses three main approaches to work with complex questions—systems diagnosis, theorybased assessment of change processes, and iterative designs. An Adaptive Evaluation typically builds hypotheses from field-based interactions, emphasizes learning over testing, advocates open-mindedness with techniques, and appreciates the value of dialogue and participation in navigating complex processes. It can use RCT or similar techniques to analyse specific processes within a system or a development cycle, but these are embedded in a broader approach to assessment and interpretation. It is designed to be flexible and adjust to shifting contexts. Finally, an Adaptive Evaluation can be applied at any stage in a complex intervention's lifecycle, from the interpretation of the system and change processes to rapid experimentation, prototyping, and testing of select interventions, and then adaptation to different settings for impact at scale. This paper provides the theoretical basis for an Adaptive Evaluation—the main approaches, core ideology, process, and applications.}, language = {en}, number = {428}, institution = {Center for International Development, Harvard University}, author = {Gokhale, Siddhant and Walton, Michael}, month = mar, year = {2023}, } @techreport{lynn_how_2022, address = {Seattle}, title = {How to do {Process} {Tracing}: {A} {Method} for {Testing} “{How} {Change} {Happened}” in {Complex} and {Dynamic} {Settings}}, url = {https://www.orsimpact.com/directory/how-to-do-process-tracing.htm}, abstract = {Process tracing is a causal methodology that can help people understand how a particular large-scale change actually happened within a complex dynamic environment. Much of the existing literature provides important information about the method; we wrote this brief to help more people operationalize the concepts and learn about practical steps for using this method more easily, with quality, and toward a more equitable world. This piece was written based on our experiences implementing process tracing when our experience showed that existing materials on the method had a lot more conceptual than practical information. We’ve approached this as people with some successful (and some unsuccessful) experience with the method itself, alongside deep experience in evaluating initiatives and strategies in complex and dynamic settings. We focus not on the Bayesian side of process tracing but rather on how this can be implemented in a way that’s more participatory and lifts up the experiences and wisdom of those closest to the work and the problems being tackled. We hope this contributes to and helps make more approachable the important work of political scientists and methodologists upon which this work sits.}, urldate = {2024-02-19}, institution = {ORS Impact}, author = {Lynn, Jewlya and Stachowiak, Sarah and Beyers, Jennifer}, month = oct, year = {2022}, } @article{aston_monitoring_2022, title = {Monitoring and evaluation for thinking and working politically}, volume = {28}, issn = {1356-3890}, url = {https://doi.org/10.1177/13563890211053028}, doi = {10.1177/13563890211053028}, abstract = {This article explores the challenges of monitoring and evaluating politically informed and adaptive programmes in the international development field. We assess the strengths and weaknesses of some specific evaluation methodologies which have been suggested as particularly appropriate for these kinds of programmes based on scholarly literature and the practical experience of the authors in using them. We suggest that those methods which assume generative causality are particularly well suited to the task. We also conclude that factoring in the politics of uncertainty and evidence generation and use is particularly important in order to recognize and value diverse experiential knowledge, integrate understandings of the local context, accommodate adaptation and realistically grapple with the power relations which are inherent in evaluation processes.}, language = {en}, number = {1}, urldate = {2022-03-21}, journal = {Evaluation}, author = {Aston, Thomas and Roche, Chris and Schaaf, Marta and Cant, Sue}, month = jan, year = {2022}, note = {Publisher: SAGE Publications Ltd}, pages = {36--57}, } @article{krueger_theory_2022, title = {Theory amidst complexity – using process tracing in ex-post evaluations}, volume = {2022}, issn = {1534-875X}, url = {http://onlinelibrary.wiley.com/doi/abs/10.1002/ev.20524}, doi = {10.1002/ev.20524}, abstract = {Evaluators who take a complexity-aware approach must consider tradeoffs related to theoretical parsimony, falsifiability, and measurement validity. These tradeoffs may be particularly pronounced with ex-post evaluation designs in which program theory development and monitoring frameworks are often completed before the evaluator is engaged. In this chapter, we argue that theory-based evaluation (TBE) approaches can address unique ex-post evaluation challenges that complexity-aware evaluation (CAE) alone cannot, and that these two sets of approaches are complimentary. We will outline strategies that evaluators may use to conduct rigorous ex-post evaluations of democracy, human rights, and governance (DRG) interventions that merge CAE's inductive approaches with a theory-testing structure. It will illustrate these strategies with two case studies of ex-post evaluation using process tracing (PT).}, language = {en}, number = {176}, urldate = {2023-04-13}, journal = {New Directions for Evaluation}, author = {Krueger, Kate and Wright, Molly}, year = {2022}, note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1002/ev.20524}, pages = {119--128}, } @misc{aston_real_2020, title = {“{Real}”​ process tracing: part 1 — context}, shorttitle = {“{Real}”​ process tracing}, url = {https://thomasmtaston.medium.com/real-process-tracing-part-1-context-6a52777a6a98}, abstract = {When asserting the value of theory-based methods, you often here words like “black boxes” and “causal mechanisms.” These are commonly…}, language = {en}, urldate = {2021-02-18}, journal = {Medium}, author = {Aston, Thomas}, month = dec, year = {2020}, } @unpublished{wadeson_process_2020, title = {Process {Tracing} as a {Practical} {Evaluation} {Method}: {Comparative} {Learning} from {Six} {Evaluations}}, url = {https://mande.co.uk/wp-content/uploads/2020/03/Process-Tracing-as-a-Practical-Evaluation-Method_23March-Final-1.pdf}, language = {en}, author = {Wadeson, Alix and Monzani, Bernardo and Aston, Tom}, month = mar, year = {2020}, } @phdthesis{janus_examining_2020, address = {Manchester}, type = {{PhD} {Thesis}}, title = {Examining the results and adaptation ideas in foreign aid}, url = {https://www.research.manchester.ac.uk/portal/en/theses/examining-the-results-and-adaptation-ideas-in-foreign-aid(33eb1913-0918-4147-8080-f36f3f444c18).html}, abstract = {This thesis applies ideational and institutional theories to analyse how two specific ideas, results and adaptation, have changed the theory and practice of development cooperation. The thesis addresses the question of why the results and adaptation ideas are often treated as binaries and how this debate has evolved historically. In a first theoretical paper, the evolution of results and adaptation is conceptualised as a combination of institutional layering and diffusion within development organisations. The second theoretical paper applies ideational theory, in particular, the coalition magnet framework, to China as a donor country. The empirical papers apply ideational and institutional theories to study aid projects funded by the World Bank and China in the Rwandan agriculture sector. The third paper analyses through which mechanism, results-based principal-agent relationships or problem-driven iterative adaptation, the World Bank’s Program for Results in the agriculture sector in Rwanda has led to increased agricultural productivity. The paper combines causal process tracing and contribution analysis to investigate two underlying theories of change of the Program for Results. The fourth paper applies the same framework and methodology to the Chinese Agricultural Technology Demonstration Center in Rwanda. The fifth paper compares both projects, the World Bank project and the Chinese project. The thesis finds that the ideas of results and adaptation are often presented as mutually exclusive mainly at the general level of public philosophies or paradigms, but show overlap and potential for integration on the level of framing policy problems and policy solutions. The thesis also demonstrates that there is unexplored potential for convergence between China and Development Assistance Committee donors around “coalition magnet” ideas. The empirical part of the thesis reveals how results-based and adaptive causal mechanisms co-exist within given aid interventions by the World Bank and China, how these interact and how they ultimately contribute to achieving development outcomes. The key finding is that the broader political context of the Rwandan agricultural sector is the main factor for determining development outcomes, which neither the World Bank project nor the Chinese projects take into account. The comparison of the World Bank’s and China’s interventions finds that donor organisations need to address how results-based ideas in combination adaptive development ideas can be better tailored to fit into the specific context of the Rwandan agriculture sector.}, language = {en}, urldate = {2022-03-30}, school = {University of Manchester}, author = {Janus, Heiner}, year = {2020}, } @article{raimondo_getting_2020, title = {Getting {Practical} {With} {Causal} {Mechanisms}: {The} application of {Process}-{Tracing} {Under} {Real}-{World} {Evaluation} {Constraints}}, volume = {2020}, issn = {1534-875X}, shorttitle = {Getting {Practical} {With} {Causal} {Mechanisms}}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/ev.20430}, doi = {10.1002/ev.20430}, abstract = {Over the past decade, the field of development evaluation has seen a renewed interest in methodological approaches that can answer compelling causal questions about what works, for whom, and why. Development evaluators have notably started to experiment with Bayesian Process Tracing to unpack, test, and enhance their comprehension of causal mechanisms triggered by development interventions. This chapter conveys one such experience of applying Bayesian Process Tracing to the study of citizen engagement interventions within a conditional cash transfer program under real-world evaluation conditions. The chapter builds on this experience to discuss the benefits, challenges, and potential for the applicability of this approach under real-world evaluation conditions of time, money, and political constraints.}, language = {en}, number = {167}, urldate = {2021-02-18}, journal = {New Directions for Evaluation}, author = {Raimondo, Estelle}, year = {2020}, pages = {45--58}, } @article{schmidt_special_2020, title = {Special {Issue}: {Causal} {Mechanisms} in {Program} {Evaluation}}, volume = {2020}, issn = {1534-875X}, url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/ev.20357}, doi = {10.1002/ev.20357}, language = {en}, number = {167}, urldate = {2022-01-28}, journal = {New Directions for Evaluation}, author = {Schmidt, Johannes}, year = {2020}, note = {\_eprint: https://onlinelibrary.wiley.com/doi/pdf/10.1002/ev.20357}, pages = {1--6}, } @article{befani_clearing_2016, title = {Clearing the fog: new tools for improving the credibility of impact claims}, shorttitle = {Clearing the fog}, url = {https://pubs.iied.org/17359IIED/}, abstract = {Development actors facing pressure to provide more rigorous assessments of their impact on policy and practice need new methods to deliver them. There is now a broad consensus that the traditional counterfactual analysis leading to the assessment of the net effect of an intervention is incapable of capturing the complexity of factors at play in any particular policy change. We suggest that evaluations focus instead on establishing whether a clearly-defined process of change has taken place, and improve the validity and credibility of qualitative impact statements. IIED research in Uganda shows that the methods of process tracing and Bayesian updating facilitate a dialogue between theory and evidence that allows us to assess our degree of confidence in ‘contribution claims’ in a transparent and replicable way.}, urldate = {2019-06-04}, journal = {iied Briefing Papers}, author = {Befani, Barbara and D'Errico, Stefano and Booker, Francesca and Giuliani, Alessandra}, month = apr, year = {2016}, } @misc{punton_straws---wind_2015, type = {Centre for {Development} {Impact} {Practice} {Paper}}, title = {Straws-in-the-wind, {Hoops} and {Smoking} {Guns}: {What} can {Process} {Tracing} {Offer} to {Impact} {Evaluation}?}, shorttitle = {Straws-in-the-wind, {Hoops} and {Smoking} {Guns}}, url = {https://opendocs.ids.ac.uk/opendocs/handle/20.500.12413/5997}, abstract = {This CDI Practice Paper by Melanie Punton and Katharina Welle explains the methodological and theoretical foundations of process tracing, and discusses its potential application in international development impact evaluations. It draws on two early applications of process tracing for assessing impact in international development interventions: Oxfam Great Britain (GB)’s contribution to advancing universal health care in Ghana, and the impact of the Hunger and Nutrition Commitment Index (HANCI) on policy change in Tanzania. In a companion to this paper, Practice Paper 10 Annex describes the main steps in applying process tracing and provides some examples of how these steps might be applied in practice.}, language = {en}, urldate = {2019-06-21}, publisher = {Centre for Development Impact}, author = {Punton, Melanie and Welle, Katharina}, month = apr, year = {2015}, note = {Pages: 8}, }