@techreport{guijt_alps_2004, title = {{ALPS} in action: a review of the shift in {ActionAid} towards a new {Accountability}, {Learning} and {Planning} {System} {\textbar} {Participatory} {Methods}}, url = {http://www.participatorymethods.org/resource/alps-action-review-shift-actionaid-towards-new-accountability-learning-and-planning-system}, urldate = {2017-07-17}, author = {Guijt, Irene}, year = {2004}, } @techreport{guijt_participatory_2014, address = {Florence}, title = {Participatory {Approaches}}, url = {https://www.unicef-irc.org/publications/pdf/brief_5_participatoryapproaches_eng.pdf}, abstract = {This guide, written by Irene Guijt for UNICEF, looks at the use of participatory approaches in impact evaluation. Using participatory approaches means involving stakeholders, particularly those affected by intervention, in the evaluation process. This includes involvement in the design, data collection, analysis, reporting, and management of the study. Excerpt "By asking the question, ‘Who should be involved, why and how?’ for each step of an impact evaluation, an appropriate and context-specific participatory approach can be developed. Managers of UNICEF evaluations must recognize that being clear about the purpose of participatory approaches in an impact evaluation is an essential first step towards managing expectations and guiding implementation. Is the purpose to ensure that the voices of those whose lives should have been improved by the programme or policy are central to the findings? Is it to ensure a relevant evaluation focus? Is it to hear people’s own versions of change rather than obtain an external evaluator’s set of indicators? Is it to build ownership of the UNICEF programme? These, and other considerations, would lead to different forms of participation by different combinations of stakeholders in the impact evaluation."}, number = {5}, urldate = {2018-10-18}, institution = {UNICEF}, author = {Guijt, Irene}, year = {2014}, pages = {23}, } @techreport{visser_impact_2013, address = {Wageningen}, title = {Impact {Evaluation}: {Taking} stock and looking ahead - {Conference} report}, shorttitle = {Impact {Evaluation}}, url = {https://www.academia.edu/111110248/Impact_Evaluation_Taking_stock_and_looking_ahead}, abstract = {This report summarises the presentations and discussions of the Conference ‘Impact evaluation. Taking stock and looking ahead’, which took place in Wageningen on March 25 and 26, 2013. The Conference was organised and funded by the Centre for}, language = {en}, urldate = {2023-12-11}, institution = {Centre for Development Innovation}, author = {Visser, Irene and Guijt, Irene and Kusters, Cecile}, month = jan, year = {2013}, } @techreport{guijt_inspiring_2020, address = {London}, title = {Inspiring {Radically} {Better} {Futures} - {Evidence} and {Hope} for {Impact} at {Scale} in a {Time} of {Crisis}}, abstract = {The world faces converging crises of health, climate, gender and racial injustice and extreme economic inequality. The calls are mounting to ‘build back better’ to create more inclusive, caring and environmentally sustainable futures. But what evidence exists that this is possible? The Inspiring Better Futures case study series investigates whether radical change at scale is possible and how it was achieved. This paper synthesises 18 cases which show that people are already successfully building better futures, benefitting millions of people, even against the odds in some of the world’s toughest contexts in lower-income countries. Together they offer hope that transformative change and radically better futures after the pandemic are within reach.}, language = {en}, institution = {Oxfam}, author = {Guijt, Irene and Mayne, Ruth}, month = oct, year = {2020}, pages = {53}, } @techreport{green_adaptive_2019, address = {Brighton}, title = {Adaptive {Programming} in {Fragile}, {Conflict} and {Violence}-{Affected} {Settings}, {What} {Works} and {Under} {What} {Conditions}?: {The} {Case} of {Institutions} for {Inclusive} {Development}, {Tanzania}}, url = {https://opendocs.ids.ac.uk/opendocs/handle/123456789/14562}, abstract = {Adaptive Management involves a dynamic interaction between three elements: delivery, programming and governance. This case study focuses on a large DfID governance project, the Institutions for Inclusive Development (I4ID), a five-year initiative in Tanzania. The study forms part of a research project to examine whether and how adaptive approaches can strengthen aid projects promoting empowerment and accountability in fragile, conflict and violence-affected settings (FCVAS). The research examines some of the assertions around the adaptive management approach and explores if and how adaptive approaches, including rapid learning and planning responses (fast feedback loops and agile programming) are particularly relevant and useful for citizen empowerment and government accountability (E\&A) in FCVAS.}, language = {en}, urldate = {2018-08-02}, institution = {Itad, Oxfam and IDS}, author = {Green, Duncan and Guijt, Irene}, month = jul, year = {2019}, keywords = {A4EA, Adaptive Development, Economy, Fishery}, } @book{pretty_trainers_2002, address = {London}, edition = {Reprint}, series = {{IIED} {Participatory} methodology series}, title = {A trainer's guide for participatory learning and action}, isbn = {978-1-899825-00-4}, language = {eng}, editor = {Pretty, Jules and Guijt, Irene}, year = {2002}, } @article{chambers_pra_1995, title = {{PRA} five years later - where are we now? / {DRP} - {Cinco} años después, ¿dónde nos encontramos?}, shorttitle = {{PRA} five years later}, url = {https://www.academia.edu/111110280/PRA_five_years_later_where_are_we_now}, abstract = {PRA five years later: where are we now?}, language = {es}, number = {26/27}, urldate = {2023-12-11}, journal = {Forest, Trees and People Newsletter}, author = {Chambers, Robert and Guijt, Irene}, year = {1995}, } @techreport{guijt_participatory_1998, title = {Participatory {Monitoring} \& {Evaluation}: {Learning} from change}, url = {http://www.ids.ac.uk/files/dmfile/PB12.pdf}, abstract = {Development organisations need to know how effective their efforts have been. But who should make these judgements, and on what basis? Usually it is outside experts who take charge. Participatory monitoring and evaluation (PM\&E) is a different approach which involves local people, development agencies, and policy makers deciding together how progress should be measured, and results acted upon. It can reveal valuable lessons and improve accountability. However, it is a challenging process for all concerned since it encourages people to examine their assumptions about what constitutes progress, and to face up to the contradictions and conflicts that can emerge.}, number = {12}, urldate = {2018-10-19}, institution = {IDS}, author = {Guijt, Irene and Gaventa, John}, year = {1998}, pages = {6}, } @techreport{green_summer_2018, address = {Bologna}, title = {Summer {School} {Course} - {Adaptive} {Management} - {Working} {Effectively} in the {Complexity} of {International} {Development} (weekplan)}, url = {http://www.cid-bo.org/2018/Summer%20school/Adaptive-management_2018.html}, institution = {Oxfam}, author = {Green, Duncan and Guijt, Irene}, year = {2018}, } @techreport{van_hemelrijck_balancing_2016, address = {Brighton}, title = {Balancing {Inclusiveness}, {Rigour} and {Feasibility}: {Insights} from {Participatory} {Impact} {Evaluations} in {Ghana} and {Vietnam}}, url = {https://opendocs.ids.ac.uk/opendocs/bitstream/handle/123456789/8888/CDI_PracticePaper_14.pdf?sequence=1}, abstract = {This paper by Adinda Van Hemelrijck and Irene Guijt explores how impact evaluation can live up to standards broader than statistical rigour in ways that address challenges of complexity and enable stakeholders to engage meaningfully. A Participatory Impact Assessment and Learning Approach (PIALA) was piloted to assess and debate the impacts on rural poverty of two government programmes in Vietnam and Ghana funded by the International Fund for Agricultural Development (IFAD). We discuss the trade-offs between rigour, inclusiveness and feasibility encountered in these two pilots. Trade-offs occur in every impact evaluation aiming for more than reductionist rigour, but the pilots suggest that they can be reduced by building sufficient research and learning capacity.}, urldate = {2019-03-12}, institution = {CDI}, author = {van Hemelrijck, Adinda and Guijt, Irene}, month = feb, year = {2016}, } @incollection{guijt_accountability_2010, address = {London}, title = {Accountability and {Learning}: {Exploding} the {Myth} of {Incompatibility} between {Accountability} and {Learning}}, isbn = {978-1-84977-542-7}, url = {https://www.taylorfrancis.com/chapters/edit/10.4324/9781849775427-36/accountability-learning-exploding-myth-incompatibility-accountability-learning-irene-guijt}, abstract = {When accountability is understood as reporting on pre-deined deliverables, it is often considered to be irreconcilable with learning. This conventional wisdom inhibits an appreciation of their connection. In this chapter, Irene Guijt exposes the laws and traps in reasoning that keep accountability and learning apart. She provides practitioners with principles and basic good ideas that open up prospects for accountability and learning to complement each other.}, language = {en}, urldate = {2023-01-24}, booktitle = {{NGO} {Management} - {The} {Earthscan} {Compendium}}, publisher = {Routledge}, author = {Guijt, Irene}, editor = {Fowler, Alan and Malunga, Chiku}, year = {2010}, doi = {10.4324/9781849775427-36}, note = {Publication Title: NGO Management}, pages = {339--352}, } @incollection{kenton_17_2004, series = {{PLA} notes}, title = {17. {Shifting} perceptions, changing practices in {PRA}: from infinite innovation to the quest for quality}, isbn = {978-1-84369-526-4}, url = {https://books.google.co.uk/books?id=-Im4wDpECt0C}, abstract = {In the beginning, there were methods. For many of us in the circle of enthusiasts of participatory approaches in the early 1990s, maps and models, calendars and Venn diagrams, matrices and rankings and the interactions and insights they produced defined what we did and what we had in common. It was this, too, that made participatory rural appraisal (PRA) – and rapid rural appraisal (RRA) before it – something that was very different from anything we’d known before. PRA bridged barriers that might otherwise have kept a social anthropologist and an irrigation engineer like us apart. And it brought us together with dozens of others, from a constellation of disciplines and professions, who shared our excitement about an approach that seemed to offer much for ‘doing development’ differently. In 1995, we co-edited PLA Notes 24 on Critical reflections on practice, in which we sought to engage practitioners and advocates in debate about the looming crises of quality that were to become so much a feature of PRA practice in the later 1990s. In this paper, we look back over more than a decade of engagement with PRA as ‘critical insiders’. Participatory Learning and Action has, naturally enough, served more as a vehicle for practitioners to share their successes and innovations than their critical reflections. Accordingly, we draw here on sources that go beyond it, including reflections from the Pathways to Participation project (see Cornwall and Pratt, 2003a, in PLA Notes 47, and contributions to Cornwall and Pratt 2003b), from work with gender and participatory development (Welbourn, 1992; Guijt and Kaul Shah, 1998; Cornwall 2000), and from the lively debates that we have had for more than a decade with colleagues the world over. These thoughts are our personal reflections, from standpoints associated with the two institutions – IIED and IDS – that were so much part of early efforts to promote and institutionalise PRA in international development practice. Our account is, therefore, very much a partial one. We offer it here as a means of locating some of the threads that have run through debates about PRA since the first issues of Participatory Learning and Action, and some of the challenges that practitioners of participatory learning and action methodologies continue to face. In it, we reflect on distinct phases in the development of PRA (see Figure 1), during which a series of issues emerged as themes for critical reflection. The phases indicated in the diagram relate generally to the prevailing sentiment and practice. Clearly there are exceptions – there have been critical voices and some were using PRA to address issues of power from day one, just as there is still innovation and excitement in some quarters today.}, booktitle = {Participatory {Learning} and {Action} 50: {Critical} reflections, future directions}, publisher = {IIED}, author = {Cornwall, Andrea and Guijt, Irene}, editor = {Kenton, N.}, year = {2004}, } @techreport{peersman_evaluability_2015, title = {Evaluability assessment for {Impact} {Evaluation}: {Guidance}, checklists and decision support}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/9802.pdf}, abstract = {This guidance note focuses on the utility of, and guidance for, evaluability assessment before undertaking an impact evaluation. The primary audience for this guidance note is evaluators conducting an evaluability assessment for impact evaluation. The secondary audience is people commissioning or managing an evaluability assessment for impact evaluation, as well as funders of an impact evaluation. Sections one and two provide an overview of evaluability assessment and how it can be used for impact evaluation. Section three provides guidance for planning to undertake an evaluability assessment for impact evaluation. This is informative for all intended users of the guidance note. Section four includes checklists and decision support for evaluability assessments. The checklist is geared to those conducting the evaluability assessment and can be adapted to suit a particular context or purpose. The decision support provides those conducting an evaluability assessment with evidence-based recommendations for impact evaluation funders and commissioners, about whether, when and how to proceed with the evaluation. Sections five and six provide guidance on what to do after the assessment is concluded, and offer lessons learned from evaluability assessments in practice.}, urldate = {2018-11-10}, institution = {ODI}, author = {Peersman, Greet and Guijt, Irene and Pasanen, Tiina}, month = aug, year = {2015}, } @techreport{van_es_theory_2015, title = {Theory of {Change} {Thinking} in {Practice}}, copyright = {Creative Commons 3.0}, url = {https://www.hivos.org/theory-change-thinking-practice}, abstract = {Want to know better how your interventions can contribute to change? A Theory of Change (ToC) approach helps in deepening your understanding - and that of your partners - of how you collectively think change happens and what the effect will be of your intervention. Not only does it show what political, social, economic, and/or cultural factors are in play, it also clarifies your assumptions. Once a ToC has been developed, it can be used to continually reflect on it in ways that allow for adaptation and checking of assumptions of your intervention. This user friendly guideline helps you to use a Theory of Change approach. Theories of change are the ideas and beliefs people have – consciously or not – about why and how the world and people change. How people perceive and understand change and the world around them is infused by their underlying beliefs about life, human nature and society. They are deep drivers of people’s behaviour and of the choices they make. Social change processes are complex and characterised by non-linear feedback loops: our own actions interact with those of others and a myriad of influencing factors. This triggers reactions that cannot be foreseen and makes outcomes of change interventions unpredictable. Given these uncertainties, how can we plan strategically and sensibly? How can social change initiatives move forward in emerging change processes in a flexible way, while remaining focused on the goal? In this context of complexity, Hivos has found a theory of change (ToC) approach useful in guiding its strategic thinking and action, as well as its collaborative efforts with others. As it fosters critical questioning of all aspects of change interventions and supports adaptive planning and management in response to diverse and quickly changing contexts. It contributes to the quality and transparency of strategic thinking, and therefore to personal, organisational and social learning. This guide builds on the experiences of Hivos working with a ToC approach. It is a practical guide for Hivos staff in applying a Theory of Change approach, but is also very useful for others working on social change such as social entrepreneurs and innovators. This guide builds on the work of a ToC Learning Group initiated by Hivos and comprising of staff of the Centre for Development Innovation (CDI) of Wageningen University and Research Centre and of experts Iñigo Retolaza Eguren, Isabel Vogel and Irene Guijt. For current thinking and work on the use of Theory of Change thinking in complex change processes, see http://www.theoryofchange.nl}, language = {English}, urldate = {2016-04-20}, author = {van Es, Marjan and Guijt, Irene and Vogel, Isabel}, month = nov, year = {2015}, } @book{eyben_politics_2015, title = {The {Politics} of {Evidence} and {Results} in {International} {Development}: {Playing} the {Game} to {Change} the {Rules}?}, isbn = {978-1-85339-886-5}, shorttitle = {The {Politics} of {Evidence} and {Results} in {International} {Development}}, url = {http://www.ids.ac.uk/publication/the-politics-of-evidence-and-results-in-international-development-playing-the-game-to-change-the-rules}, abstract = {Understanding and demonstrating the effectiveness of efforts to improve the lives of those living in poverty is an essential part of international development practice. But who decides what counts as good or credible evidence? Can the drive to measure results do justice to and promote transformational change change that challenges the power relations that produce and reproduce inequality, injustice and the non-fulfillment of human rights? The Politics of Evidence in International Development provides a critical examination of the results agenda, with practical strategies for rendering it more helpful in supporting transformative development. The book deconstructs the origins and concepts of the results and evidence agendas employed in international development. It describes with concrete examples the current effects and consequences of the agenda, and goes on to outline a range of strategies used by individuals and organizations to resist, adapt or comply with the useful and problematic demands for results-oriented measurement and evidence of value for money."}, language = {English}, publisher = {Practical Action Publishing}, editor = {Eyben, Rosalind and Guijt, Irene and Roche, Chris and Shutt, Cathy}, month = jul, year = {2015}, } @book{guijt_learning_2022, title = {The {Learning} {Power} of {Listening}}, isbn = {978-1-78853-200-6}, url = {https://practicalactionpublishing.com/book/2622/the-learning-power-of-listening}, abstract = {Steff had the pleasure to co-author the first SenseMaker Practitioner Guide with a group of friends and colleagues supported and published by Oxfam and CRS. This practical guide is for those who wish to use SenseMaker to conduct assessments, monitor progress, and undertake evaluations or research. Drawing on more than a decade of experience, the authors share dozens of examples from international development, providing practical tips and ideas for context-specific adaptations. They show how the method can be used to for difficult-to-measure outcomes related to poverty reduction, social justice, peacebuilding, resilience, gender norms, behavior change, governance and environmental management. ​ SenseMaker is a unique participatory method of inquiry that encourages and enables novel insights not obtained from conventional quantitative and quantitative and qualitative methods. It is action-oriented and, therefore, well-suited for people needing data- informed insights for adaptive management. "Writing this guide together with Irene, Veronica, Anna and Rita was an enormous learning process in itself and has further shaped our thinking and practice. We hope it will support first-time and experienced users to enhance their practice and that it will inspire people to explore and innovate further with the method."}, urldate = {2022-07-26}, publisher = {PRACTICAL ACTION PUBLISHING}, author = {Guijt, Irene and Gottret, Maria Veronica and Hanchar, Anna and Deprez, Steff and Muckenhirn, Rita}, month = jun, year = {2022}, doi = {10.3362/9781788532006}, note = {Pages: 1-188}, } @techreport{peersman_when_2016, title = {When and how to develop an impact-oriented monitoring and evaluation system}, url = {https://www.odi.org/sites/odi.org.uk/files/odi-assets/publications-opinion-files/10327.pdf}, abstract = {This guidance note focuses on: • what an impact-oriented monitoring and evaluation system entails • why an organisation may want to establish such a system • when integrating an impact-orientation into an monitoring and evaluation system is most useful • what should be considered in developing the monitoring and evaluation system, or in tweaking an existing system, to become more impact-focused. The primary audience for this guidance note is internal and external monitoring and evaluation advisors involved in designing and implementing, and/or assessing monitoring and evaluation systems to include a focus on impact. It will also be useful for senior management of organisations who need to know how best to plan for a sustainable monitoring and evaluation system that supports impact assessment or to adapt an existing system to incorporate an impact perspective.}, urldate = {2018-11-10}, author = {Peersman, Greet and Rogers, Patricia and Guijt, Irene and Hearn, Simon and Pasanen, Tiina and Buffardi, Anne}, month = mar, year = {2016}, } @article{mayne_using_2018, title = {Using evidence to influence policy: {Oxfam}’s experience}, volume = {4}, issn = {2055-1045}, shorttitle = {Using evidence to influence policy}, url = {http://www.nature.com/articles/s41599-018-0176-7}, doi = {10.1057/s41599-018-0176-7}, abstract = {Policymaking is rarely ‘evidence-based’. Rather, policy can only be strongly evidence-informed if its advocates act effectively. Policy theories suggest that they can do so by learning the rules of political systems, and by forming relationships and networks with key actors to build up enough knowledge of their environment and trust from their audience. This knowledge allows them to craft effective influencing strategies, such as to tell a persuasive and timely story about an urgent policy problem and its most feasible solution. Empirical case studies help explain when, how, and why such strategies work in context. If analysed carefully, they can provide transferable lessons for researchers and advocates that are seeking to inform or influence policymaking. Oxfam Great Britain has become an experienced and effective advocate of evidence-informed policy change, offering lessons for building effective action. In this article, we combine insights from policy studies with specific case studies of Oxfam campaigns to describe four ways to promote the uptake of research evidence in policy: (1) learn how policymaking works, (2) design evidence to maximise its influence on specific audiences, (3) design and use additional influencing strategies such as insider persuasion or outsider pressure, and adapt the presentation of evidence and influencing strategies to the changing context, and (4) embrace trial and error. The supply of evidence is one important but insufficient part of this story.}, language = {en}, number = {1}, urldate = {2018-11-28}, journal = {Palgrave Communications}, author = {Mayne, Ruth and Green, Duncan and Guijt, Irene and Walsh, Martin and English, Richard and Cairney, Paul}, month = dec, year = {2018}, }