standards_review.bib

@inproceedings{AnkrumKromholz2005,
  author = {Ankrum, T. S. and Kromholz, A. H.},
  booktitle = {Ninth {IEEE} International Symposium on High-Assurance Systems Engineering ({HASE}'05)},
  title = {Structured Assurance Cases: Three Common Standards},
  year = {2005},
  publisher = {Institute of Electrical and Electronics Engineers ({IEEE})},
  abstract = {For safety-, mission-, or security-critical systems, there are typically regulations or acquisition guidelines requiring a documented body of evidence to provide a compelling justification that the system satisfies specified critical properties. Current frameworks suggest the detailed outline of the final product but leave the truly meaningful and challenging aspects of arguing assurance to the developers and reviewers. We began with two major hypotheses. We selected a software notation suitable for building structured safety cases and applied it to three disparate assurance standards. Each of the three standard mapping efforts is discussed, along with the problems we encountered. In addition to the standards, we used the notation to structure an assurance case for a practical security-critical system, and we describe the lessons learned from that experience. We conclude with practical options for using our mappings of the standards and how well our initial hypotheses are borne out by the project.},
  doi = {10.1109/hase.2005.20},
  keywords = {ASCAD, GSN, assurance cases, modelling, CAE}
}
@misc{CAE,
  author = {Adelard},
  note = {last accessed 12 Feb 2017},
  title = {Claims, Arguments and Evidence {(CAE)}},
  url = {https://www.adelard.com/asce/cae/}
}
@inproceedings{CaseleyWhite2009,
  author = {Caseley, P. R. and White, T. A. D.},
  title = {The {MOD} procurement guidance on software safety assurance---assessing and understanding software evidence},
  booktitle = {4th IET International Conference on Systems Safety},
  year = {2009},
  pages = {1--12},
  month = oct,
  abstract = {The UK Ministry of Defence (MOD) has compiled acquisition guidance for the safety of systems containing complex electronic elements (CEEs) to complement Def Stan 00-56 Issue 4 [4]. The term CEE is defined in the Def Stan and refers to both software and custom hardware, this means that terms such as firmware become redundant from a standards perspective. CEE also encompasses the development processes of Field Programmable Gate Arrays (FPGAs) which are treated the same as software. The MOD Guidance is applicable to any acquisition project whose CEE has any effect on the safety of the overall system. This paper outlines the strategy and key points of the Guidance. Throughout the paper the term CEE and software are interchanged as they are viewed, from a safety and standards perspective, as the same problem.},
  doi = {10.1049/cp.2009.1547},
  keywords = {Assurance;Evidence;Safety;Safety Case;Software}
}
@article{GraydonKelly2013,
  author = {Graydon, Patrick J. and Kelly, Tim P.},
  title = {Using argumentation to evaluate software assurance standards},
  journal = {Information and Software Technology},
  year = {2013},
  volume = {55},
  number = {9},
  pages = {1551--1562},
  month = sep,
  abstract = {Context

Many people and organisations rely upon software safety and security standards to provide confidence in software intensive systems. For example, people rely upon the Common Criteria for Information Technology Security Evaluation to establish justified and sufficient confidence that an evaluated information technology product's contributions to security threats and threat management are acceptable. Is this standard suitable for this purpose?

Objective

We propose a method for assessing whether conformance with a software safety or security standard is sufficient to support a conclusion such as adequate safety or security. We hypothesise that our method is feasible and capable of revealing interesting issues with the proposed use of the assessed standard.

Method

The software safety and security standards with which we are concerned require evidence and discuss the objectives of that evidence. Our method is to capture a standard's evidence and objectives as an argument supporting the desired conclusion and to subject this argument to logical criticism. We have evaluated our method by case study application to the Common Criteria standard.

Results

We were able to capture and criticise an argument from the Common Criteria standard. Review revealed 121 issues with the analysed use of the standard. These range from vagueness in its text to failure to require evidence that would substantially increase confidence in the security of evaluated software.

Conclusion

Our method was feasible and revealed interesting issues with using a Common Criteria evaluation to support a conclusion of adequate software security. Considering the structure of similar assurance standards, we see no reason to believe that our method will not prove similarly valuable in other applications.},
  doi = {10.1016/j.infsof.2013.02.008},
  keywords = {argument, CAE},
  publisher = {Elsevier {BV}}
}
@mastersthesis{Groom2002,
  author = {Groom, Sam},
  title = {The Life Cycle and Legal Cycles in Software Engineering},
  school = {University of Oxford Software Engineering Programme},
  year = {2002},
  month = mar,
  note = {last accessed 12 Jan 17},
  abstract = {Software development is often carried out in the wider context of a commercial transaction between the developer and the customer.  This dissertation contrasts the software engineer's view of law with the lawyer's view of software. The rules of the law of contract, which govern commercial transactions, are  examined  in  the  contest  of  recent  cases  concerning  software  contracts.

The rules of product liability are examined in the context of the legislation, with examples, and lessons drawn from three major software failures.  The legal rules are mapped onto the various stages of the software life cycle.  The relation of the various legal rules to the activities undertaken in the development of software has some consequences for the software engineer, and these consequences are considered.  Some suggestions are made for the structure and supervision of software development projects.},
  url = {https://www.cs.ox.ac.uk/signin/dissertation/lifecycle.pdf}
}
@inproceedings{Hamilton2006,
  author = {Hamilton, Viv},
  booktitle = {Proceedings of the 10th Australian Workshop on Safety Related Programmable Systems},
  title = {A New Concept in Defence Safety Standards: the Revised {UK Defence Standard 00-56}},
  year = {2006},
  address = {Darlinghurst, Australia, Australia},
  editor = {Tony Cant},
  month = apr,
  note = {last accessed 13 Feb 17},
  pages = {77--83},
  publisher = {Australian Computer Society, Inc.},
  series = {Conferences in Research and Practice in Information Technology},
  volume = {55},
  abstract = {In January 2005 the UK Ministry of Defence released Issue 3 of Defence Standard 00-56 (MOD 2004). This standard provides a comprehensive structure for safety management and for engineering safety into defence equipment and services. The standard moves away from mandating specific processes and instead takes a goal-based approach that requires suppliers to justify their systems by means of safety cases containing explicit arguments based on compelling evidence. This approach provides greater flexibility, especially for systems using COTS components. It is likely that the introduction of the new standard will pose challenges, as both suppliers and procurers develop new skills in developing and justifying safety claims. In this paper, the background to the revision is explained, including the challenge in producing a goal-based software standard. The structure of the standard is described and contrasting examples of potentially compliant approaches are provided. The consultation that has taken place with industry and the key challenges for both suppliers and procurers are explained.},
  acmid = {1151824},
  isbn = {1-920-68237-6},
  keywords = {defence standards, goal-based},
  location = {Sydney, Australia},
  numpages = {7},
  url = {https://dl.acm.org/citation.cfm?id=1151816.1151824}
}
@misc{Hancock2015,
  author = {Hancock, Matthew},
  title = {Open Standards principles 2015},
  month = sep,
  year = {2015},
  note = {last accessed 17 Nov 18},
  abstract = {Standards for software interoperability, data and document formats in government IT specifications.},
  groups = {NotUsed},
  keywords = {Open Standards},
  url = {https://www.gov.uk/government/publications/open-standards-principles}
}
@inproceedings{McdermidWilliams2014,
  author = {Mcdermid, J. A. and Williams, P.},
  title = {Defence standard 00-56 issue 5: concepts, principles and pragmatics},
  booktitle = {9th IET International Conference on System Safety and Cyber Security (2014)},
  year = {2014},
  pages = {1--6},
  month = oct,
  abstract = {Defence Standard (DS) 00-56 is the UK MoD's primary contracting safety management standard, used to govern the safety aspects of work undertaken by industry under contract to the MoD. The MoD is going through a period of unprecedented change and this is impacting the way in which MoD and its suppliers manage safety, as well as many other aspects of the defence enterprise. The paper explains some of the drivers for updating DS 00-56 from issue 4 to issue 5 and explains the rationale for the key changes in the standard.},
  keywords = {computational linguistics;defence industry;military standards;safety;DS;Defence Standard 00-56 Issue 5;UK MoD primary contracting safety management standard;defence enterprise;Contracting for Safety;Defence;Safety Management;Safety Standards}
}
@techreport{SEB008,
  author = {{Ministry of Defence}},
  title = {Defence Standards 00-56, 00-55 and 00-27},
  institution = {{Defence Equipment \& Support}},
  year = {2015},
  type = {Safety \& Environmental Bulletin},
  number = {SEB/008},
  address = {Bristol},
  month = apr,
  note = {last accessed 13 Feb 17},
  abstract = {This SEB has been produced to keep staff informed of the changes to S&E Defence Standards and contains information on the review and publication of:

* Def Stan 00-56 Part 1 Issue 6, Safety Management Requirements for Defence Systems.
* Def Stan 00-55 Part 1 Interim Issue 3, Requirements for Safety of Programmable Elements in Defence Systems.
* Def Stan 00-27 Issue 3, Measurement of Impulse Noise from Military Weapons, Explosives and Pyrotechnics (MWEP) and Selection of Hearing Protection.},
}
@mastersthesis{Tamos2011,
  author = {Tamos, Massimo},
  title = {Applicability of safety critical systems techniques to business domain},
  school = {University of Oxford Software Engineering Programme},
  year = {2011},
  abstract = {The dissertation aims to analyse the possibility to apply safety critical system (SCS) techniques, usually applied in contexts of engineering where human life may come in danger, to business contexts where the business itself could be at risk.

SCS techniques will first be introduced to the reader to offer the fundamental knowledge and context from where the dissertation will develop its key concept of safety business software and associated techniques.  The comparison of qualitative versus quantitative analysis about SCS is also introduced, and the related convenience to apply one or other in the context of business.

A key element of the dissertation is to offer to the reader the background and reasons why companies may benefit from extended risk management techniques like HAZOP or fault tree analysis (FTA), to gain competitive advantage or tune the information system (IS) to a better profitability.

The overall aim of the dissertation is to highlight the advantages that a software architect or an IT manager can derive by adopting SCS techniques when engineering business applications.  The work provides an overview of related work to safety critical systems and analyses how these concepts can fit in a business context.},
}
@article{Tierney1992,
  author = {Tierney, Margaret},
  title = {Software engineering standards: the `formal methods debate' in the {UK}},
  journal = {Technology Analysis {\&} Strategic Management},
  year = {1992},
  volume = {4},
  number = {3},
  pages = {245--278},
  month = jan,
  abstract = {This paper traces the evolution of two standards, Def Stan 00-55 and 00-56, regulating the identification and production of safety-critical software for defence applications, issued by the Ministry of Defence (MoD) as interim standards in 1991. The standards—00-55, in particular—have become an important forum for articulating the interests of those who work in the UK safety-critical software engineering field; a debate which has largely revolved around the intergral role 00-55 demands for formal methods of sofware development for safety-critical functions or components. In recounting the stor of their gestation within the MoD during the early 1980s; their controversial release in draft form in 1989; and their subsequent second release as interim standards in 1991, the aim has been to illuminate some of the current `politics' of formal methods of software production, and to consider how the standards are reshaping the discipline of software engineering.},
  doi = {10.1080/09537329208524097},
  publisher = {Informa {UK} Limited}
}
@inproceedings{WallaceKuhnIppolito1992,
  author = {Wallace, D. R. and Kuhn, D. R. and Ippolito, L. M.},
  title = {An analysis of selected software safety standards},
  booktitle = {COMPASS `92 Proceedings of the Seventh Annual Conference on Computer Assurance},
  year = {1992},
  pages = {123--136},
  month = jun,
  abstract = {This study examines standards, draft standards, and guidelines that provide requirements for the assurance of high-integrity software. It focuses on identifying the attributes necessary in such documents for providing reasonable assurance for high-integrity software, and on identifying the relative strengths and weaknesses of the documents. The documents vary widely in their requirements and the precision with which the requirements are expressed. Security documents tend to have a narrow focus and to be more product-oriented, whereas safety documents tend to be broad in scope and center primarily on the software development process. Overall there is little relationship between the degree of risk and the rigor of applicable standards. Recommendations are provided for a base standard for the assurance of high-integrity software.},
  doi = {10.1109/CMPASS.1992.235757},
  keywords = {safety;software reliability;standards;draft standards;guidelines;high-integrity software;safety documents;security documents;software development process;software safety standards;Guidelines;NIST;Procurement;Product safety;Programming;Security;Software safety;Software standards;Software systems;Standards development}
}
@mastersthesis{Watkinson2012,
  author = {Watkinson, Paul},
  title = {Software Engineering---Methodology for Critical Systems},
  school = {University of Oxford Software Engineering Programme},
  year = {2012},
  abstract = {As our use of information technology has evolved, we have reached the point where we are often completely dependent on software systems.  More than ever before, the failure of a software system may have severe consequences for our health, our livelihood, our society.  This dissertation explores the engineering approaches adopted for software systems that may have critical impact upon individuals or organizations, as set out in existing textbooks, papers, and standards documents.  It considers the application of these approaches to the development of a system for mobile heart rate monitoring, diagnosis, and response.  It makes recommendations regarding software engineering methodology for critical systems.},
}
@mastersthesis{Williams2012,
  author = {Williams, Mark Peter},
  title = {Safety-Related Software Engineering and Support Policy in {MOD} Acquisition},
  school = {University of Oxford Software Engineering Programme},
  year = {2012},
  abstract = {Safety-related software systems present key design and support challenges that can severely affect the perceived success or failure of projects.  In the context of MOD acquisition this is intensified due to the longevity of projects and the complex nature of the systems.  This dissertation explores the components of these challenges, in relation to the MOD acquisition policy and the support of safety-related software systems.  Identifying how the reorganization of MOD acquisition has led to additional demands in this sector and potentially adversely affected the availability of safety-related software expertise.  Additionally exploring how these changes to the MOD have weakened organizational confidence and fostered a culture of dependence on external agencies for specialist advice.},
  groups = {NotUsed},
  keywords = {00-55}
}
@inproceedings{WongGidvaniLopezEtAl2014,
  author = {Wong, W. E. and Gidvani, T. and Lopez, A. and Gao, R. and Horn, M.},
  title = {Evaluating Software Safety Standards: A Systematic Review and Comparison},
  booktitle = {2014 IEEE Eighth International Conference on Software Security and Reliability-Companion},
  year = {2014},
  pages = {78--87},
  month = jun,
  abstract = {Software safety standards are commonly used to guide the development of safety-critical software systems. However, given the existence of multiple competing standards, it is critical to select the most appropriate one for a given project. We have developed a set of 15 criteria to evaluate each standard in terms of its usage, strengths, and limitations. Five standards are studied, including a NASA Software Safety Standard, an FAA System Safety Handbook, MIL-STD-882D (US Department of Defense), DEF-STAN 00-56 (UK Ministry of Defense), and DO-178B (Commercial avionics). Results of our evaluation suggest that different standards score differently with respect to each evaluation criterion. No standard performs better than others on all the criteria. The lessons learned from software-related accidents in which the standards were involved provide further insights on the pros and cons of using each standard.},
  doi = {10.1109/SERE-C.2014.25},
  keywords = {safety-critical software;security of data;software standards;DEF-STAN 00-56;DO-178B;FAA system safety handbook;MIL-STD-882D;NASA software safety standard;UK Ministry of Defense;US Department of Defense;commercial avionics;evaluation criterion;safety-critical software system;software safety standards;software-related accidents;FAA;Hazards;NASA;Software safety;Standards;hazards;mishap;safety standard;safety-critical software;software safety;system safety}
}
@incollection{Adam2009,
  author = {Adam, Alison and Spedding, Paul},
  title = {Trusting Computers Through Trusting Humans: Software Verification in a Safety-Critical Information Society},
  booktitle = {Social and Human Elements of Information Security: Emerging trends and countermeasures},
  publisher = {Information Science Reference},
  year = {2009},
  editor = {Gupta, Manish and Sharman, Raj},
  chapter = {V},
  pages = {61--75},
  isbn = {978-1-60566-037-0},
  abstract = {This chapter considers the question of how we may trust automatically generated program code. The code walkthroughs and inspections of software engineering mimic the ways that mathematicians go about assuring themselves that a mathematical proof is true. Mathematicians have difficulty accepting a computer generated proof because they cannot go through the social processes of trusting its construction. Similarly, those involved in accepting a proof of a computer system or computer generated code cannot go through their traditional processes of trust. The process of software verification is bound up in software quality assurance procedures, which are themselves subject to commercial pressures. Quality standards, including military standards, have procedures for human trust designed into them. An action research case study of an avionics system within a military aircraft company illustrates these points, where the software quality assurance (SQA) procedures were incommensurable with the use of automatically generated code.},
}
@book{Patton2005,
  title = {Software Testing},
  publisher = {Sams Publishing},
  year = {2005},
  author = {Patton, Ron},
  address = {Indianapolis IN},
  edition = {2nd},
  month = aug,
  isbn = {067232798-8},
  keywords = {STE, testing}
}
@book{Ould1999,
  title = {Managing software quality and business risk},
  publisher = {John Wiley \& Sons Ltd},
  year = {1999},
  author = {Ould, Martyn A.},
  address = {Chichester},
  isbn = {047199782X},
  abstract = {Software development failures are invariably caused by a combination of circumstances - circumstances that are rarely technical in origin. Increasingly, standard risk management practices used in other industries are being applied to software development projects.},
  keywords = {MRQ, quality assurance, quality control, software quality, QA, risk}
}
@article{Knight1993,
  author = {Knight, John C. and Myers, E. Ann},
  title = {An Improved Inspection Technique},
  journal = {Commun. ACM},
  year = {1993},
  volume = {36},
  number = {11},
  pages = {51--61},
  month = nov,
  issn = {0001-0782},
  acmid = {163366},
  address = {New York, NY, USA},
  doi = {10.1145/163359.163366},
  issue_date = {Nov. 1993},
  keywords = {formal inspections, reviews, walkthroughs},
  numpages = {11},
  publisher = {ACM}
}
@article{Fagan1976,
  author = {Fagan, M. E.},
  title = {Design and code inspections to reduce errors in program development},
  journal = {{IBM} Systems Journal},
  year = {1976},
  volume = {15},
  number = {3},
  pages = {182--211},
  abstract = {We can summarize the discussion of design and code inspections and process control in developing programs as follows:
1. Describe the program development process in terms of operations, and define exit criteria which must be satisfied for completion of each operation.
2. Separate the objectives of the inspection process operations to keep the inspection team focused on one objective at a time: Operation Overview Preparation Inspection Rework Follow-up Objective Communications/education Education Find errors Fix errors Ensure all fixes are applied correctly
3. Classify errors by type, and rank frequency of occurrence of types. Identify which types to spend most time looking for in the inspection.
4. Describe how to look for presence of error types.
5. Analyze inspection results and use for constant process improvement (until process averages are reached and then use for process control).},
  doi = {10.1147/sj.153.0182},
  keywords = {inspection, review},
  publisher = {{IBM}}
}
@book{Lankhorst2009,
  title = {Enterprise Architecture at Work},
  publisher = {Springer Berlin Heidelberg},
  year = {2009},
  author = {Lankhorst, Marc},
  edition = {2nd},
  isbn = {978-3-642-01309-6},
  abstract = {An enterprise architecture tries to describe and control an organisation's structure, processes, applications, systems and techniques in an integrated way. The unambiguous specification and description of components and their relationships in such an architecture requires a coherent architecture modelling language.

Lankhorst and his co‑authors present such an enterprise modelling language that captures the complexity of architectural domains and their relations and allows the construction of integrated enterprise architecture models. They provide architects with concrete instruments that improve their architectural practice. As this is not enough, they additionally present techniques and heuristics for communicating with all relevant stakeholders about these architectures. Since an architecture model is useful not only for providing insight into the current or future situation but can also be used to evaluate the transition from `as-is' to `to-be', the authors also describe analysis methods for assessing both the qualitative impact of changes to an architecture and the quantitative aspects of architectures, such as performance and cost issues.
The modelling language presented has been proven in practice in many real‑life case studies and has been adopted by The Open Group as an international standard. So this book is an ideal companion for enterprise IT or business architects in industry as well as for computer or management science students studying the field of enterprise architecture.},
  doi = {10.1007/978-3-642-01310-2},
}
@misc{KID,
  author = {{Ministry of Defence}},
  title = {{Knowledge in Defence} website},
  year = {2018},
  note = {last accessed 2 Dec 18},
  url = {https://www.aof.mod.uk/}
}
@misc{SQM,
  author = {{Ministry of Defence}},
  howpublished = {last accessed 9 Apr 2018},
  month = jan,
  note = {last accessed 9 Apr 18},
  title = {Software Quality Management Guidance},
  year = {2009},
  keywords = {quality assurance, quality control, software quality, QA},
  url = {https://web.archive.org/web/20110316104449/http://www.aof.mod.uk/aofcontent/tactical//quality/downloads/sqmg.pdf}
}
@inproceedings{Galloway2005,
  author = {Galloway, A. and Paige, R.F. and Tudor, N.J. and Weaver, R.A. and Toyn, I. and McDermid, J.},
  title = {Proof Vs Testing in the Context of Safety Standards},
  booktitle = {24th Digital Avionics Systems Conference},
  year = {2005},
  month = oct,
  publisher = {{IEEE}},
  doi = {10.1109/dasc.2005.1563405},
  keywords = {GSN, argument, DO-178B},
@article{Reed2007,
  author = {Reed, Chris and Walton, Douglas and Macagno, Fabrizio},
  title = {Argument diagramming in logic, law and artificial intelligence},
  journal = {The Knowledge Engineering Review},
  year = {2007},
  volume = {22},
  number = {01},
  pages = {87},
  month = mar,
  doi = {10.1017/s0269888907001051},
  keywords = {argument, diagram, argument visualisation},
  publisher = {Cambridge University Press ({CUP})}
}
@techreport{JSP886,
  author = {{Ministry of Defence}},
  title = {Software Support},
  institution = {{Defence Equipment and Support, Engineering Group, Software Supportability Team}},
  year = {2013},
  type = {Joint Service Publication},
  number = {JSP886 Vol 7 Part 4},
  month = feb,
  note = {last accessed 17 Mar 18},
  abstract = {This Part contains subject matter provided by the Defence Equipment and Support (DE&S), Engineering Group (EG), Software Supportability (SS) Team.  The purpose of this Part is to define authoritative policy and provide guidance to maximise the likelihood of procuring and maintaining supportable software.  It supersedes software policy and information previously contained within: 
a. Air Publication (AP) 100D-10. 
b. Joint Air Publication (JAP) 100A-01 Chapter 12.8. 

Software may provide a wide variety of functions at various levels within a system’s physical structure.  As such, all software will require appropriate through-life support in order to sustain operational effectiveness of the host system. 

This publication is applicable to software employed in the Land, Sea, Air and Information System domains and should be considered across all Defence Lines of Development (DLODs).},
  groups = {NotUsed},
  keywords = {supportability},
  url = {https://www.defencegateway.mod.uk/sites/dlf/framework/publisher.kc/Rule%20Knowledge%20Centre/Orphans/20110513-JSP886-V7P4-SoftwareSp-v2-1-U.pdf}
}
@techreport{00-00-2,
  author = {{Ministry of Defence}},
  title = {Standards for Defence---{P}art 2: Management and Production of Defence Standards},
  institution = {UK Defence Standardization},
  year = {2010},
  type = {Defence Standard},
  number = {00-00 Part 2 Issue 5},
  address = {Glasgow},
  month = dec,
  note = {Withdrawn 12 Dec 2012},
}
@article{Ould1990,
  author = {Ould, Martyn A.},
  title = {Software development under {Def Stan} 00-55: a guide},
  journal = {Information and Software Technology},
  year = {1990},
  volume = {32},
  number = {3},
  pages = {170--175},
  month = apr,
  abstract = {In May 1989 the UK Ministry of Defence issued Interim Defence Standard 00–55 ‘Requirements for the procurement of safety critical software in defence equipment’ for comment. The standard sets stiff requirements on the development of safety-critical software in the defence arena. The paper looks at the scope of the new standard and examines its methodological implications, giving commentary on the standard's requirements.},
  keywords = {software development, safety-critical software, standards},
  publisher = {Elsevier {BV}}
}
@techreport{NAO2013,
  author = {Amyas Morse},
  title = {Major Projects Report 2012},
  institution = {{National Audit Office}},
  year = {2013},
  type = {House of Commons report},
  number = {HC 684-I 2012--13},
  month = jan,
  note = {last accessed 29 May 18},
  abstract = {This report examines the Ministry of Defence’s progress in delivering its largest defence equipment projects to agreed cost,time and performance measures.},
  keywords = {MPR, software, delays},
  url = {https://www.nao.org.uk/wp-content/uploads/2013/03/Major-Projects-full-report-Vol-1.pdf}
}
@techreport{NAO2008,
  author = {Tim Burr},
  title = {Chinook {Mk3} Helicopters},
  institution = {{National Audit Office}},
  year = {2008},
  type = {House of Commons report},
  number = {HC 512 2007--2008},
  month = jun,
  note = {last accessed 29 May 18},
  keywords = {glass cockpit, software, delays},
  url = {https://www.nao.org.uk/wp-content/uploads/2008/06/0708512.pdf}
}
@techreport{NAO2004,
  author = {Bourn, Sir John},
  title = {Battlefield Helicopters},
  institution = {{National Audit Office}},
  year = {2004},
  type = {House of Commons report},
  number = {HC 486 2003--2004},
  month = apr,
  note = {last accessed 29 May 18},
  keywords = {software, delays, chinook},
  url = {https://www.nao.org.uk/wp-content/uploads/2004/04/0304486.pdf}
}
@inproceedings{Kelly2014,
  author = {Tim Kelly},
  title = {Software Certification: where is Confidence Won and Lost?},
  booktitle = {Addressing Systems Safety Challenges: Proceedings of the Twenty-second Safety-critical Systems Symposium, Brighton, UK, 4-6th February 2014},
  year = {2014},
  editor = {Tom Anderson},
  volume = {22},
  series = {Safety-critical Systems Symposium},
  pages = {255--267},
  month = feb,
  note = {last accessed 8 Dec 18},
  abstract = {Given that we cannot prove the safety of software (in a system context) we are forced to wrestle with the issue of confidence in software certification.  Some draw confidence from compliance with software assurance standards and believe this is sufficient, yet we do not have consensus in these standards.  Some establish confidence through the process of constructing and presenting a software assurance case, but ignore the experience and ‘body of knowledge’ provided by standards.  Some (sensibly) use a combination of these approaches.  Using a framework of 4+1 principles of software safety assurance, this paper discusses where and how in current safety-critical software development and assessment approaches confidence is typically won and lost.  Based on this assessment, we describe how the activity and structure of an assurance case should best be targeted to explicitly address issues of confidence.},
  keywords = {software safety, standards, 4+1},
  url = {https://scsc.uk/scsc-126}
}
@article{Williams2015,
  author = {Phil Williams and John McDermid},
  title = {Reincarnation of {Def Stan 00-55}},
  journal = {Safety Systems},
  year = {2015},
  volume = {25},
  number = {1},
  month = sep,
  note = {last accessed 3 Jun 18},
  abstract = {Def Stan 00-55 Issue 3 was released as an Interim standard in December 2014. It is currently under review, with the intent of raising it to full extant status. This article presents some of the motivation and rationale behind its resurrection and update.},
  keywords = {00-55, publication},
  url = {https://scsc.uk/scsc-145}
}
@article{Howard2005,
  author = {Chris Howard},
  title = {The {MOD}'s new system safety standard: {Interim Defence Standard} 00-56 Issue 3},
  journal = {Safety Systems},
  year = {2005},
  volume = {15},
  number = {1},
  month = sep,
  note = {last accessed 3 Jun 18},
  url = {https://scsc.uk/scsc-75}
}
@book{Humphrey2005,
  title = {{PSP(SM):} A Self-Improvement Process for Software Engineers},
  publisher = {Addison Wesley},
  year = {2005},
  author = {Humphrey, Watts S.},
  isbn = {0321305493},
  date = {2005-03-11},
  ean = {9780321305497},
  keywords = {PRO, quality, process improvement},
  pagetotal = {368}
}
@mastersthesis{Bates2017,
  author = {Robert Bates},
  school = {University of Oxford Software Engineering Programme},
  title = {Software Safety in the {MOD}---A comprehensive review of the acquisition of Safety Critical Software},
  year = {2017},
  month = oct,
  abstract = {The MOD considered Software to be just an intrinsic element of its capabilities used to deliver functionality, and affords it no special consideration in the Safety Assurance process used to determine that the capability is justifiably "safe". After suffering a catastrophic lose of life and a number of damming reports, the MOD began to realise the prevalence of software in its capabilities that provided a safety-related function, and the implications of a failure of that software in the system safety chain.

This dissertation looks into the background of software in military systems, how it is defined and, its safety, security and mission characteristics that determine how it is utilised in the military operating domain. The study looks into a variety of safety assurance tools, techniques and methodologies, and their applicability to the military safety assurance process. The study then identifies the common trends that effect the safety assurance process and proposes an approach on how the MOD should undertake Software Safety Assurance.

The study presents a number of unique techniques and processes for the MOD to adopt, including: A new standard for Software Safety Requirements, including the pre-tender assessment of the safety functions of the software; the definition of a common taxonomy for MOD software safety assessment; a methodology for justifying software as having no safety function, and; the definition of a process that identifies the level of engagement the MOD needs with its software, covering the whole life of the software. This will ensure that the MOD understands the software it has in its systems, any safety function of that software, and the generation of the required evidence for governance and obtaining an agreed level of confidence that the software will not exhibit unintended hazardous behaviours.},
  groups = {NotUsed, Important},
  keywords = {00-56}
}
@misc{Bates2012,
  author = {Robert Bates},
  title = {{Defence Standard} 00-56 {MODAF} Views},
  howpublished = {Unpublished},
  month = nov,
  year = {2012},
  groups = {Don't use},
  keywords = {MODAF, architecture, 00-56}
}
@article{Mattern2008,
  author = {Steven F. Mattern},
  title = {Increasing the Likelihood of Success of a Software Assurance Program},
  journal = {Journal of System Safety},
  year = {2008},
  volume = {44},
  number = {4},
  pages = {19--25},
  issn = {0743-8826},
  issue = {Jul./Aug.},
  keywords = {software assurance, goals}
}
@techreport{SEP082013,
  author = {{DES Tech-QSEP Support}},
  title = {Safety Management Guidance for Software-Only Projects},
  institution = {{Defence Equipment and Support}},
  year = {2014},
  type = {DE\&S Safety \& Environmental Protection Leaflet},
  number = {08/2013 v1.3},
  address = {Bristol, UK},
  month = sep,
  note = {last accessed 31 Aug 18},
  abstract = {Guidelines for achieving confidence that the integrity of software products delivered to military systems, that are not considered to be safety related, is assured.},
  groups = {NotUsed},
  keywords = {software, assurance, benign software, 00-055, 00-056},
  url = {https://www.asems.mod.uk/sites/default/files/documents/SEP%20Leaflets/GMP00/20140903-SEP_Leaflet_Benign_Software_08_2013_Ver_1_3.pdf}
}
@article{Hendrix2010,
  author = {Barry Hendrix},
  title = {New System Safety Standard {ANSI/GEIA-STD-0010} is Available},
  journal = {Journal of System Safety},
  year = {2010},
  volume = {46},
  number = {2},
  pages = {36--37},
  issn = {0743-8826},
  keywords = {standards, GEIA-STD-0010}
}
@article{Hawkins2010,
  author = {Richard D. Hawkins and Tim P. Kelly},
  title = {A Systematic Approach for Developing Software Safety Arguments},
  journal = {Journal of System Safety},
  year = {2010},
  volume = {46},
  number = {4},
  pages = {25--33},
  issn = {0743-8826},
  keywords = {GSN, software safety, software assurance, SSEI SOBP, assurance deficit, HAZOPS}
}
@article{Bowen1995,
  author = {J.P. Bowen and M.G. Hinchey},
  title = {Seven more myths of formal methods},
  journal = {{IEEE} Software},
  year = {1995},
  volume = {12},
  number = {4},
  pages = {34--41},
  month = jul,
  abstract = {New myths about formal methods are gaining tacit acceptance both outside and inside the system-development community. The authors address and dispel these myths based on their observations of industrial projects. The myths include: formal methods delay the development process; they lack tools; they replace traditional engineering design methods; they only apply to software; are unnecessary; not supported; and formal methods people always use formal methods.},
  keywords = {00-55, formal methods, software tools, costs},
  publisher = {Institute of Electrical and Electronics Engineers ({IEEE})}
}
@article{Hendrix2017,
  author = {Barry Hendrix and Saralyn Dwyer and Dave West},
  title = {Model-based Systems Engineering and Software System Workshop},
  journal = {Journal of System Safety},
  year = {2017},
  volume = {53},
  number = {3},
  pages = {24--29},
  issn = {0743-8826},
  issue = {Winter},
  keywords = {MBSE, systems engineering, software engineering, modelling}
}
@article{Wilkinson2013,
  author = {Pamela K. Wilkinson},
  title = {Dealing with Conflicting Contract System Safety Requirements},
  journal = {Journal of System Safety},
  year = {2013},
  volume = {49},
  number = {2},
  pages = {14--17},
  issn = {0743-8826},
  issue = {Mar./Apr.}
}
@techreport{DO-178C,
  author = {{RTCA Special Committee 205}},
  title = {Software considerations in airborne systems and equipment certification},
  institution = {RTCA Inc.},
  year = {2011},
  type = {Recommendation},
  number = {DO-178C},
  address = {Washington},
  month = dec,
  abstract = {This document provides recommendations for the production of software for airborne systems and equipment that performs its intended function with a level of confidence in safety that complies with airworthiness requirements.  Compliance with the objectives of DO-178C is the primary means of obtaining approval of software used in civil aviation products.},
  keywords = {DO-178C}
}
@techreport{DO-178B,
  author = {{RTCA Special Committee 167}},
  title = {Software considerations in airborne systems and equipment certification},
  institution = {RTCA Inc.},
  year = {1992},
  type = {Recommendation},
  number = {DO-178B},
  address = {Washington},
  month = jan,
  abstract = {The rapid increase in the use of software in airborne systems and equipment used on aircraft and engines in the early 1980s resulted in a need for industry-accepted guidance for satisfying airworthiness requirements. DO-178, "Software Considerations in Airborne Systems and Equipment Certification," was written to satisfy this need.

This document, now revised in the light of experience, provides the aviation community with guidance for determining, in a consistent manner and with an acceptable level of confidence, that the software aspects of airborne systems and equipment comply with airworthiness requirements.},
  keywords = {DO-178B},
  pagination = {section}
}
@misc{CC,
  title = {Common Criteria for Information Technology Security Evaluation},
  howpublished = {v3.1 revision 3},
  month = jul,
  year = {2009},
  note = {last accessed 8 Nov 18},
  keywords = {Common Criteria, CC},
  url = {https://www.commoncriteriaportal.org/cc/}
}
@inproceedings{Jacklin2012,
  author = {Stephen Jacklin},
  title = {Certification of Safety-Critical Software Under {DO-178C} and {DO-278A}},
  booktitle = {Infotech@Aerospace 2012},
  year = {2012},
  month = jun,
  organization = {NASA Ames Research Center},
  publisher = {American Institute of Aeronautics and Astronautics},
  abstract = {The RTCA has recently released DO-178C and DO-278A as new certification guidance for the production of airborne and ground-based air traffic management software, respectively. Additionally, RTCA special committee SC-205 has also produced, at the same time, five other companion documents. These documents are RTCA DO-248C, DO-330, DO-331, DO- 332, and DO-333. These supplements address frequently asked questions about software certification, provide guidance on tool qualification requirements, and illustrate the modifications recommended to DO-178C when using model-based software design, object oriented programming, and formal methods. The objective of this paper is to first explain the relationship of DO-178C to the former DO-178B in order to give those familiar with DO- 178B an indication of what has been changed and what has not been changed. With this background, the relationship of DO-178C and DO-278 to the new DO-278A document for ground-based software development is shown. Last, an overview of the new guidance contained in the tool qualification document and the three new supplements to DO-178C and DO-278A is presented. For those unfamiliar with DO-178B, this paper serves to provide an entry point to this new certification guidance for airborne and ground-based CNS/ATM software certification.},
  doi = {10.2514/6.2012-2473},
  keywords = {AIR TRAFFIC CONTROL, COMPUTER SYSTEMS PROGRAMS, SOFTWARE ENGINEERING, CERTIFICATION, COMPUTER PROGRAMMING, SAFETY, AIR TRAFFIC, DO-178C}
}
@techreport{DO-331,
  author = {{RTCA Special Committee 205}},
  title = {Model-Based Development and Verification Supplement to {DO-178C} and {DO-278A}},
  institution = {RTCA Inc.},
  year = {2011},
  type = {Recommendation},
  number = {DO-331},
  address = {Washington},
  month = dec,
  abstract = {This supplement contains modifications and additions to DO-178C and DO-278A objectives, activities, explanatory text and software life cycle data that should be addressed when model-based development and verification are used as part of the software life cycle. This includes the artifacts that would be expressed using models and the verification evidence that could be derived from them. Therefore, this supplement also applies to the models developed in the system process that define software requirements or software architecture.

A model is an abstract representation of a set of software aspects of a system that is used to support the software development process or the software verification process. This supplement addresses model(s) that have the following characteristics:

 * The model is completely described using an explicitly identified modeling notation. The modeling notation may be graphical and/or textual.
 * The model contains software requirements and/or software architecture definition.
 * The model is of a form and type that is used for direct analysis or behavioral evaluation as supported by the software development process or the software verification process},
  keywords = {DO-178C, DO-331, MBSE}
}
@techreport{00-56/1,
  author = {{Ministry of Defence}},
  title = {Hazard Analysis and Safety Classification of the Computer and Programmable Electronic System Elements of Defence Equipment},
  institution = {Directorate of Standardization},
  year = {1991},
  type = {Interim Defence Standard},
  number = {00-56 Issue 1},
  address = {Glasgow},
  month = apr
}
@techreport{8719.13B,
  author = {{National Aeronautics and Space Administration}},
  institution = {NASA},
  title = {Software Safety Standard},
  year = {2004},
  address = {Washington, DC},
  month = jul,
  note = {last accessed 13 Feb 17},
  number = {NASA-STD-8719.13B w/Change 1},
  type = {Technical Standard},
  abstract = {NASA-STD 8719.13B describes the activities necessary to ensure that safety is designed into the software that is acquired or developed by NASA. All Program/Project Managers, Area Safety Managers, IT managers, and other responsible managers are to assess the inherent safety risk of the software in their individual programs. The magnitude and depth of software safety activities should reflect the risk posed by the software while fulfilling the requirements of this Standard.},
  url = {https://web.archive.org/web/20160506011430/http://system-safety.org/Documents/NASA-STD-8719.13B.pdf}
}
@techreport{882D,
  author = {{Department of Defense}},
  title = {Standard Practice for System Safety},
  institution = {DOD},
  year = {2000},
  type = {Military Standard},
  number = {MIL-STD-882D},
  month = feb,
  abstract = {This document outlines a standard practice for conducting system safety.

The system safety practice as defined herein conforms to the acquisition procedures in DoD Regulation 5000.2-R and provides a consistent means of evaluating identified risks. Mishap risk must be identified, evaluated, and mitigated to a level acceptable (as defined by the system user or customer) to the appropriate authority and compliant with federal (and state where applicable) laws and regulations, Executive Orders, treaties, and agreements. Program trade studies associated with mitigating mishap risk must consider total life cycle cost in any decision. When requiring MIL-STD-882 in a solicitation or contract and no specific paragraphs of this standard are identified, then apply only those requirements presented in section 4.}
}
@techreport{FAAHdbk,
  author = {{Federal Aviation Administration}},
  institution = {FAA},
  title = {System Safety Handbook},
  year = {2000},
  month = dec,
  note = {last accessed 8 Nov 18},
  type = {Handbook},
  abstract = {The System Safety Handbook (SSH) was developed for the use of Federal Aviation Administration (FAA) employees, supporting contractors and any other entities that are involved in applying system safety policies and procedures throughout FAA. As the Federal agency with primary responsibility for civil aviation safety, the FAA develops and applies safety techniques and procedures in a wide range of activities from NAS modernization, to air traffic control, and aircraft certification. On June 28, 1998, the FAA Administrator issued Order 8040.4 to establish FAA safety risk management policy. This policy requires all the Lines of Business (LOB) of the FAA to establish and implement a formal risk management program consistent with the LOB’s role in the FAA. The policy reads in part: ``The FAA shall use a formal, disciplined, and documented decision making process to address safety risks in relation to high-consequence decisions impacting the complete life cycle.''},
  keywords = {software safety},
  url = {https://web.archive.org/web/20180207062904/https://www.faa.gov/regulations_policies/handbooks_manuals/aviation/risk_management/ss_handbook/}
}
@article{Vara2016,
  author = {Jose Luis de la Vara and Alejandra Ruiz and Katrina Attwood and Hu{\'{a}}scar Espinoza and Rajwinder Kaur Panesar-Walawege and {\'{A}}ngel L{\'{o}}pez and Idoya del R{\'{i}}o and Tim Kelly},
  title = {Model-based specification of safety compliance needs for critical systems: A holistic generic metamodel},
  journal = {Information and Software Technology},
  year = {2016},
  volume = {72},
  pages = {16--30},
  month = apr,
  issn = {0950-5849},
  abstract = {Context: Many critical systems must comply with safety standards as a way of providing assurance that they do not pose undue risks to people, property, or the environment. Safety compliance is a very demanding activity, as the standards can consist of hundreds of pages and practitioners typically have to show the fulfilment of thousands of safety-related criteria. Furthermore, the text of the standards can be ambiguous, inconsistent, and hard to understand, making it difficult to determine how to effectively structure and manage safety compliance information. These issues become even more challenging when a system is intended to be reused in another application domain with different applicable standards.
Objective: This paper aims to resolve these issues by providing a metamodel for the specification of safety compliance needs for critical systems.
Method: The metamodel is holistic and generic, and abstracts common concepts for demonstrating safety compliance from different standards and application domains. Its application results in the specification of ``reference assurance frameworks'' for safety-critical systems, which correspond to a model of the safety criteria of a given standard. For validating the metamodel with safety standards, parts of several standards have been modelled by both academic and industry personnel, and other standards have been analysed. We further augment this with feedback from practitioners, including feedback during a workshop.
Results: The results from the validation show that the metamodel can be used to specify safety compliance needs for aerospace, automotive, avionics, defence, healthcare, machinery, maritime, oil and gas, process industry, railway, and robotics. Practitioners consider that the metamodel can meet their needs and find benefits in its use.
Conclusion: The metamodel supports the specification of safety compliance needs for most critical computer-based and software-intensive systems. The resulting models can provide an effective means of structuring and managing safety compliance information.},
  doi = {10.1016/j.infsof.2015.11.008},
  groups = {Important},
  keywords = {Safety-critical system, safety standard, safety compliance, safety assurance, safety certification, reference assurance framework, metamodel},
  publisher = {Elsevier {BV}}
}
@article{Scheuer2010,
  author = {Oliver Scheuer and Frank Loll and Niels Pinkwart and Bruce M. McLaren},
  title = {Computer-supported argumentation: A review of the state of the art},
  journal = {International Journal of Computer-Supported Collaborative Learning},
  year = {2010},
  volume = {5},
  number = {1},
  pages = {43--102},
  month = jan,
  issn = {1556-1615},
  abstract = {Argumentation is an important skill to learn. It is valuable not only in many professional contexts, such as the law, science, politics, and business, but also in everyday life. However, not many people are good arguers. In response to this, researchers and practitioners over the past 15–20 years have developed software tools both to support and teach argumentation. Some of these tools are used in individual fashion, to present students with the ``rules'' of argumentation in a particular domain and give them an opportunity to practice, while other tools are used in collaborative fashion, to facilitate communication and argumentation between multiple, and perhaps distant, participants. In this paper, we review the extensive literature on argumentation systems, both individual and collaborative, and both supportive and educational, with an eye toward particular aspects of the past work. More specifically, we review the types of argument representations that have been used, the various types of interaction design and ontologies that have been employed, and the system architecture issues that have been addressed. In addition, we discuss intelligent and automated features that have been imbued in past systems, such as automatically analyzing the quality of arguments and providing intelligent feedback to support and/or tutor argumentation. We also discuss a variety of empirical studies that have been done with argumentation systems, including, among other aspects, studies that have evaluated the effect of argument diagrams (e.g., textual versus graphical), different representations, and adaptive feedback on learning argumentation. Finally, we conclude by summarizing the ``lessons learned'' from this large and impressive body of work, particularly focusing on lessons for the CSCL research community and its ongoing efforts to develop computer-mediated collaborative argumentation systems.},
  doi = {10.1007/s11412-009-9080-x},
  keywords = {Collaborative argumentation, Argumentation systems, Argument visualisation, Analysis and feedback, Empirical studies of argumentation systems},
  publisher = {Springer Nature}
}
@misc{Sansone2016,
  author = {Sansone, Susanna-Assunta and Rocca-Serra, Philippe},
  title = {Interoperability standards---Digital Objects in their Own Right},
  month = oct,
  year = {2016},
  abstract = {Review commissioned by the Wellcome Trust focusing on interoperability standards for digital research outputs.},
  doi = {10.6084/m9.figshare.4055496},
  keywords = {Science Policy, Data Format, 80403 Data Structures},
  publisher = {Figshare}
}
@techreport{00-55/4,
  author = {{Ministry of Defence}},
  title = {Requirements for Safety of {P}rogrammable {E}lements ({PE}) in Defence Systems---{P}art 1: Requirements and Guidance},
  institution = {UK Defence Standardization},
  year = {2016},
  type = {Defence Standard},
  number = {00-055 Part 1 Issue 4},
  address = {Glasgow},
  month = apr,
  pagination = {section}
}
@techreport{00-55/1,
  author = {{Ministry of Defence}},
  title = {The Procurement of Safety Critical Software in Defence Equipment---{P}art 1: Requirements},
  institution = {Directorate of Standardization},
  year = {1991},
  type = {Interim Defence Standard},
  number = {00-55 Part 1 Issue 1},
  address = {Glasgow},
  month = apr
}
@techreport{00-55/2,
  author = {{Ministry of Defence}},
  title = {Requirements for Safety Related Software in Defence Equipment---{P}art 1: Requirements},
  institution = {Directorate of Standardization},
  year = {1997},
  type = {Defence Standard},
  number = {00-55 Part 1 Issue 2},
  address = {Glasgow},
  month = aug
}
@techreport{00-55/3,
  author = {{Ministry of Defence}},
  title = {Requirements for Safety of {P}rogrammable {E}lements ({PE}) in Defence Systems---{P}art 1: Requirements and Guidance},
  institution = {UK Defence Standardization},
  year = {2014},
  type = {Interim Defence Standard},
  number = {00-55 Part 1 Issue 3},
  address = {Glasgow},
  month = dec
}
@techreport{00-56/3,
  author = {{Ministry of Defence}},
  title = {Safety Management Requirements for Defence Systems---{P}art 1: Requirements},
  institution = {UK Defence Standardization},
  year = {2004},
  type = {Interim Defence Standard},
  number = {00-56 Part 1 Issue 3},
  address = {Glasgow},
  month = dec
}
@techreport{00-56/4,
  author = {{Ministry of Defence}},
  title = {Safety Management Requirements for Defence Systems---{P}art 1: Requirements},
  institution = {UK Defence Standardization},
  year = {2007},
  type = {Defence Standard},
  number = {00-56 Part 1 Issue 4},
  address = {Glasgow},
  month = jun,
  pagination = {section}
}
@misc{DStan2018,
  author = {{Ministry of Defence}},
  title = {Defence Standard Development},
  note = {last accessed 1 Dec 18},
  url = {https://www.dstan.mod.uk/policy/defstan_development.html}
}
@techreport{ISOIECDir2,
  title = {Principles and rules for the structure and drafting of {ISO} and {IEC} documents},
  institution = {{International Organisation for Standardisation} / {International Electrotechnical Commission}},
  year = {2016},
  type = {ISO/IEC Directives},
  number = {Part 2 Edition 7},
  address = {Geneva},
  month = may,
  note = {last accessed 1 Dec 18},
  url = {https://www.iec.ch/members_experts/refdocs/iec/isoiecdir-2%7Bed7.0%7Den.pdf}
}
@incollection{Sannier2014,
  author = {Nicolas Sannier and Benoit Baudry},
  title = {{INCREMENT}: A Mixed {MDE}-{IR} Approach for Regulatory Requirements Modeling and Analysis},
  booktitle = {Requirements Engineering: Foundation for Software Quality},
  publisher = {Springer International Publishing},
  year = {2014},
  editor = {Camille Salinesi and Inge van de Weerd},
  volume = {8396},
  series = {Lecture Notes in Computer Science},
  pages = {135--151},
  month = apr,
  isbn = {978-3-319-05843-6},
  abstract = {[Context and motivation] Regulatory requirements for Nuclear instrumentation and control (I&C) systems are first class requirements. They are written by national safety entities and are completed through a large documentation set of national recommendation guides and national/international standards. [Question/Problem] I&C systems important to safety must comply to all of these requirements. The global knowledge of this domain is scattered through these different documents and not formalized. Its organization and traceability relationships within this domain is mainly implicit. As a consequence, such long lasting nuclear I&C projects set important challenges in terms of tacit expertise capitalization and domain analysis. [Principal ideas/results] To tackle this domain formalization issue, we propose a dual Model-driven Engineering (MDE) and Information Retrieval (IR) approach to address the nuclear regulatory requirements domain definition, and assisted traceability based on the acquired requirements model. [Contributions] In this paper, we present the Connexion metamodel that provides a canvas for the definition and capitalization of the nuclear regulatory requirements domain. We also present an hybrid MDE/IR-based approach, named INCREMENT, for acquiring, modeling and analyzing these regulatory requirements. This approach is supported by a tool that is developed in the context of the CONNEXION project, which gathers French major nuclear I&C industrial actors.},
  doi = {10.1007/978-3-319-05843-6_11},
  keywords = {Nuclear Instrumentation and Control Systems, Regulatory Requirements, Standards, Metamodeling, Traceability, Information Retrieval}
}
@misc{GSN2,
  author = {{The Assurance Case Working Group (ACWG)}},
  month = jan,
  note = {last accessed 2 Dec 18},
  title = {{GSN Community Standard} Version 2},
  year = {2018},
  abstract = {This Standard has two intended functions. Firstly, it seeks to provide a comprehensive, authoritative definition of the Goal Structuring Notation (GSN).  Secondly, it aims to provide  clear  guidance  on  current  best  practice  in  the  use  of  the  notation  for  those concerned with the development and evaluation of engineering arguments---argument owners, readers, authors and approvers.},
  keywords = {GSN},
  pagination = {section},
  url = {https://scsc.uk/SCSC-141B}
}
@techreport{SEIG2011,
  author = {{Systems Engineering and Integration Group (SEIG)}},
  title = {{SEIG} Review of {D}efence {S}tandard 00-56: Safety Management Requirements for Defence Systems---{P}art 1},
  institution = {Ministry of Defence},
  year = {2011},
  month = nov,
  note = {Unpublished},
}
@misc{MODAF,
  author = {{Ministry of Defence}},
  title = {{MOD} Architecture Framework},
  month = dec,
  year = {2012},
  note = {last accessed 3 Dec 18},
  abstract = {The MOD Architecture Framework (MODAF) is a set of rules that support defence planning and change management activities.

The Ministry of Defence Architecture Framework (MODAF) is an internationally recognised enterprise architecture framework developed by the Ministry of Defence (MOD) to support defence planning and change management activities. It does this by enabling the capture and presentation of information in a rigorous, coherent and comprehensive way that aids the understanding of complex issues.},
  url = {https://www.gov.uk/guidance/mod-architecture-framework}
}
@misc{SofSPol,
  author = {Gavin Williamson},
  title = {Health, Safety and Environmental Protection in Defence: Policy Statement by the Secretary of State for Defence},
  month = jun,
  year = {2018},
  note = {last accessed 5 Dec 18},
  abstract = {Health and safety and the protection of the environment have always been at the centre of the Ministry of Defence’s (MOD) policy making and its activities. The MOD needs to manage these responsibilities in a consistent manner and the Secretary of State’s policy statement provides the high level framework under which this is done.},
  keywords = {SofS, policy},
  url = {https://www.gov.uk/government/publications/secretary-of-states-policy-statement-on-safety-health-environmental-protection-and-sustainable-development}
}
@inproceedings{Hawkins2013,
  author = {R. D. Hawkins and I. Habli and T. P. Kelly},
  booktitle = {31st International System Safety Conference},
  title = {The Principles of Software Safety Assurance},
  year = {2013},
  address = {Boston, MA},
  month = aug,
  note = {last accessed 8 Dec 18},
  organization = {International System Safety Society},
  abstract = {We present common principles of software safety assurance that can be observed from software safety standards and best practice. These principles are constant across domains and across projects, and can be regarded as the immutable core of any software safety justification.  The principles also help maintain understanding of the ‘big picture’ of software safety issues whilst examining and negotiating the detail of individual standards, and provide a reference model for cross-sector certification.},
  keywords = {software safety, standards, 4+1},
  url = {https://www.academia.edu/64591468/The_principles_of_software_safety_assurance}
}
@inproceedings{Jolliffe2014,
  author = {Graham Jolliffe},
  title = {Re-Issuing {Def Stan} 00-55},
  booktitle = {Addressing Systems Safety Challenges: Proceedings of the Twenty-second Safety-critical Systems Symposium, Brighton, UK, 4--6th February 2014},
  year = {2014},
  editor = {Tom Anderson},
  volume = {22},
  series = {Safety-critical Systems Symposium},
  pages = {25--34},
  month = feb,
  note = {last accessed 3 Jun 18},
  abstract = {When Def Stan-00-55 (MOD 1997) was declared obsolete on 29 April 2005, there was an expectation that Def Stan 00-56 would provide guidance on how to develop and procure safety related software. That has proved elusive despite a number of attempts to provide the MOD with material that would assist its suppliers of safety related software. However, Def Stan 00-55 continued to be one of the most frequently downloaded defence standards and coupled with the MOD's need for clarity and consistency when procuring safety related software, it has been decided to re-issue the standard. This paper provides an insight into the rationale and main drivers behind its re-introduction and focuses on the principles and strategic intent of the standard compared with its predecessor.},
  keywords = {00-55},
  url = {https://scsc.uk/scsc-126}
}
@techreport{McDermid2014,
  author = {John McDermid},
  title = {Report of a Working Party on Software Management in Acquisition and Support},
  institution = {Defence Scientific Advisory Council},
  year = {2014},
  type = {Internal Report},
  number = {D/DST/01/14/16/22},
  address = {London},
  month = apr,
  owner = {James},
  timestamp = {2015.01.29}
}
@misc{BSIcomment,
  author = {{British Standards Institute}},
  title = {National Comment Template},
  howpublished = {On BSI eCommittees System},
  note = {last accessed 11 Dec 18},
  keywords = {comments}
}
@techreport{JSP920,
  author = {{Ministry of Defence}},
  title = {{MOD} Standardization Management Policy---{P}art 1: Directive},
  institution = {Defence Authority for Technical \& Quality Assurance},
  year = {2017},
  type = {Joint Service Publication},
  number = {JSP920 Part 1 V3.0},
  month = aug,
  note = {last accessed 15 Dec 18},
  keywords = {standardization},
  url = {https://www.dstan.mod.uk/policy/JSP920_Part1.pdf}
}
@techreport{JSP430,
  author = {{Ministry of Defence}},
  title = {Management of Ship Safety and Environmental Protection---{P}art 1: Policy},
  institution = {Ship Safety Management Office},
  year = {2011},
  type = {Joint Service Publication},
  number = {JSP430 Part 1 Issue 4},
  address = {Bristol},
  month = jun
}
@techreport{IEC61508,
  author = {{{IEC} Subcommittee 65A: System Aspects}},
  title = {Functional safety of electrical/ electronic/ programmable electronic safety-related systems---{P}art 0: Functional safety and {IEC 61508}},
  institution = {International Electrotechnical Commission},
  year = {2005},
  type = {techreport},
  number = {IEC/TR 61508-0:2005}
}
@misc{hswa1974,
  howpublished = {1974 c. 37},
  month = jul,
  note = {last accessed 18 Dec 18},
  title = {Health and Safety at Work etc. Act},
  year = {1974},
  keywords = {health, safety},
  url = {https://www.legislation.gov.uk/ukpga/1974/37/contents}
}
@techreport{Carlone1992,
  author = {Ralph V. Carlone},
  institution = {US General Accounting Office},
  title = {Patriot Missile Defense---Software Problem Led to System Failure at {D}hahran, {S}audi {A}rabia},
  year = {1992},
  address = {Washington, D.C.},
  month = feb,
  note = {last accessed 25 Dec 18},
  number = {GAO/IMTEC-92-26},
  type = {Report to the Chairman, Subcommittee on Investigations and Oversight, Committee on Science, Space, and Technology, House of Representatives},
  abstract = {The Patriot battery at Dhahran failed to track and intercept the Scud missile because of a software problem in the system’s weapons control computer.  This problem led to an inaccurate tracking calculation that became worse the longer the system operated. At the time of the incident, the battery had been operating continuously for over 100 hours. By then, the inaccuracy was serious enough to cause the system to look in the wrong place for the incoming Scud.

The Patriot had never before been used to defend against Scud missiles nor was it expected to operate continuously for long periods of time. Two weeks before the incident, Army officials received Israeli data indicating some loss in accuracy after the system had been running for 8 consecutive hours. Consequently, Army officials modified the software to improve the system’s accuracy. However, the modified software did not reach Dhahran until February 26,1991---the day after the Scud incident.},
  owner = {last accessed 18 Feb 16},
  url = {https://www.gao.gov/products/imtec-92-26}
}
@techreport{Lions1996,
  author = {Jacques-Louis Lions},
  institution = {Ariane 5 Inquiry Board},
  title = {Ariane 5---{Flight 501} Failure},
  year = {1996},
  address = {Paris},
  month = jul,
  note = {last accessed 25 Dec 18},
  type = {Report by the Inquiry Board},
  keywords = {software failure, ariane, rocket, overflow},
  url = {https://www.esa.int/Newsroom/Press_Releases/Ariane_501_-_Presentation_of_Inquiry_Board_report}
}
@techreport{DAIB2016,
  author = {{Defence Accident Investigation Branch}},
  title = {Service Inquiry into the {Watchkeeper (WK006) Unmanned Air Vehicle (UAV)} accident at {Boscombe Down Aerodrome} on 2 {November} 2015},
  institution = {Defence Safety Authority},
  year = {2016},
  type = {Service Inquiry},
  month = aug,
  note = {Redacted, last accessed 25 Dec 18},
  keywords = {accident, daib, watchkeeper, software},
  url = {https://www.gov.uk/government/publications/service-inquiry-into-the-watchkeeper-wk006-unmanned-air-vehicle-uav-accident-at-boscombe-down-aerodrome-on-2-november-2015}
}
@article{Zhang2015,
  author = {Benjamin Zhang},
  title = {A software problem caused a brand-new {A}irbus military plane to crash},
  journal = {Business Insider},
  year = {2015},
  month = jun,
  note = {last accessed 25 Dec 18},
  keywords = {a400m, software, accident},
  url = {https://www.businessinsider.com/a-software-problem-caused-an-airbus-a400m-to-crash-2015-6?r=US&IR=T}
}
@techreport{00-56/7,
  author = {{Ministry of Defence}},
  title = {Safety Management Requirements for Defence Systems---{P}art 1: Requirements},
  institution = {UK Defence Standardization},
  year = {2017},
  type = {Defence Standard},
  number = {00-56 Part 1 Issue 7},
  address = {Glasgow},
  month = feb,
  pagination = {section}
}
@incollection{Ferrari2013,
  author = {Alessio Ferrari and Stefania Gnesi and Gabriele Tolomei},
  title = {Using Clustering to Improve the Structure of Natural Language Requirements Documents},
  booktitle = {Requirements Engineering: Foundation for Software Quality},
  publisher = {Springer Berlin Heidelberg},
  year = {2013},
  editor = {Joerg Doerr and Andreas L. Opdahl},
  volume = {7830},
  series = {Lecture Notes in Computer Science},
  pages = {34--49},
  month = apr,
  isbn = {978-3-642-37422-7},
  abstract = {[Context and motivation] System requirements are normally provided in the form of natural language documents. Such documents need to be properly structured, in order to ease the overall uptake of the requirements by the readers of the document. A structure that allows a proper understanding of a requirements document shall satisfy two main quality attributes: (i) requirements relatedness: each requirement is conceptually connected with the requirements in the same section; (ii) sections independence: each section is conceptually separated from the others.
[Question/Problem] Automatically identifying the parts of the document that lack requirements relatedness and sections independence may help improve the document structure.
[Principal idea/results] To this end, we define a novel clustering algorithm named Sliding Head-Tail Component (S-HTC). The algorithm groups together similar requirements that are contiguous in the requirements document. We claim that such algorithm allows discovering the structure of the document in the way it is perceived by the reader. If the structure originally provided by the document does not match the structure discovered by the algorithm, hints are given to identify the parts of the document that lack requirements relatedness and sections independence.
[Contribution] We evaluate the effectiveness of the algorithm with a pilot test on a requirements standard of the railway domain (583 requirements).},
  doi = {10.1007/978-3-642-37422-7_3},
  keywords = {Requirements analysis, requirements documents structure, requirements quality, similarity-based clustering, lexical clustering}
}
@inproceedings{Uusitalo2014,
  author = {Eero Uusitalo and Mikko Raatikainen and Mikko Ylikangas and Tomi M\"{a}nnist\"{o}},
  title = {Experiences from an industry-wide initiative for setting metadata for regulatory requirements in the nuclear domain},
  booktitle = {2014 {IEEE} 7th International Workshop on Requirements Engineering and Law ({RELAW})},
  year = {2014},
  pages = {2--9},
  month = aug,
  publisher = {{IEEE}},
  abstract = {The industrial organizations involved in developing and maintaining nuclear power plants need to comply with the requirements coming from the legislative regulation. In the newly renewed Finnish guidelines, there are over 6500 such requirements, which are not always easy to interpret. The industrial stakeholders particularly find the situation highly challenging. Therefore, the Finnish nuclear industry and regulators formed a joint task force to clarify the new requirements by attaching metadata to regulatory requirements. We observed the work process and created tooling to support this work. We present the initial results of the ongoing work of the task force at its halfway milestone, the KLAD tool and experiences on its usage. In the process of setting metadata, the industrial stakeholders reported increasing understanding of the content meaning of the requirements, and regulatory requirement authors learned about writing good requirements. The tool was successful, and in addition, it provides good opportunities for further analysis of the requirements and metadata.},
  doi = {10.1109/relaw.2014.6893474},
  keywords = {Nuclear, requirements management, requirements engineering, requirements attributes, requirements metadata, regulatory requirements}
}
@techreport{00-56/5,
  author = {{Ministry of Defence}},
  title = {Safety Management Requirements for Defence Systems---{P}art 2: Requirements},
  institution = {UK Defence Standardization},
  year = {2017},
  type = {Defence Standard},
  number = {00-056 Part 2 Issue 5},
  address = {Glasgow},
  month = feb,
  pagination = {section}
}
@misc{TOGAF2018,
  author = {{The Open Group Architecture Forum}},
  howpublished = {https://pubs.opengroup.org/architecture/togaf9-doc/arch/index.html},
  month = apr,
  note = {last accessed 10 Jan 19},
  title = {{TOGAF} Version 9.2, Enterprise Edition},
  year = {2018},
  keywords = {EAR, EA, TOGAF},
  url = {https://pubs.opengroup.org/architecture/togaf9-doc/arch/index.html}
}
@techreport{Kincaid1975,
  author = {Kincaid, J. Peter and Fishburne, Jr, Robert P. and Rogers, Richard L. and Chissom, Brad S.},
  title = {Derivation Of New Readability Formulas (Automated Readability Index, Fog Count And {F}lesch Reading Ease Formula) For {N}avy Enlisted Personnel},
  institution = {Naval Technical Training Command},
  year = {1975},
  type = {Research Branch Report},
  number = {8-75},
  address = {Millington, TN},
  month = feb,
  note = {last accessed 12 Jan 19},
  abstract = {Three readability formulas were recalculated to be more suitable for Navy use. The three formulas are the Automated Readability Index (ARI), Fog Count, and Flesch Reading Ease Formula. They were derived from test results of 531 Navy enlisted personnel enrolled in four technical training schools at two Navy bases: Naval Air Station at Memphis, and Great Lakes Navy Training Center. Personnel were tested for their reading comprehension level according to the comprehension section of the Gates-McGinitie reading test. At the same time, they were tested for their comprehension of 18 passages taken from Rate Training Manuals.  Scores on the reading test and training material passages allowed the calculation of the grade level of the passages.  This scaled reading grade level is based on Navy personnel reading Navy training material and comprehending it.  Thus, the three recalculated formulas (derived using multiple regression techniques) are specifically for Navy use.  Furthermore, the formulas are directly interchangeable because they were all calculated using the same data base.  That is, the ARI can be used when new material is being written, as the new material is usually typed anyway.  The Flesch Formula is prefarable when the Automated Flesch Count machine is available and existing material is being graded for reading difficulty.  The Fog COunt can be used when no equipment is available to aid the count.  The use of these readability formulas should help the understanding of Navy training material when this material must be read by enlisted personnel.},
  keywords = {ARI (Automated Readability Index); Cloze test; FJP (Farr-Jenkins- Paterson); Flesch Formula; Fog Count; FORCAST Formula; Readability; Reading comprehension; Reading level; Technical publications},
  url = {https://stars.library.ucf.edu/istlibrary/56/}
}
@techreport{IEC61882,
  author = {{{IEC} Subcommittee 56: Dependability}},
  title = {Hazard and operability studies ({HAZOP} studies). {A}pplication guide},
  institution = {International Electrotechnical Commission},
  year = {2016},
  type = {International Standard},
  number = {IEC61882},
  doi = {10.3403/30309555u},
  publisher = {{BSI} British Standards}
}
@misc{CCBY4.0,
  author = {{Creative Commons}},
  title = {{C}reative {C}ommons Attribution 4.0 International Public License {CC-BY 4.0}},
  note = {last accessed 15 Jan 19},
  url = {https://creativecommons.org/licenses/by/4.0/}
}
@techreport{IEC60812,
  author = {{{IEC} Subcommittee 56: Dependability}},
  title = {{Failure Modes and Effects Analysis (FMEA and FMECA)}},
  institution = {International Electrotechnical Commission},
  year = {2018},
  type = {International Standard},
  number = {IEC60812},
  abstract = {EC 60812:2018 explains how failure modes and effects analysis (FMEA), including the failure modes, effects and criticality analysis (FMECA) variant, is planned, performed, documented and maintained. The purpose of failure modes and effects analysis (FMEA) is to establish how items or processes might fail to perform their function so that any required treatments could be identified. An FMEA provides a systematic method for identifying modes of failure together with their effects on the item or process, both locally and globally. It may also include identifying the causes of failure modes. Failure modes can be prioritized to support decisions about treatment. Where the ranking of criticality involves at least the severity of consequences, and often other measures of importance, the analysis is known as failure modes, effects and criticality analysis (FMECA). This document is applicable to hardware, software, processes including human action, and their interfaces, in any combination. An FMEA can be used in a safety analysis, for regulatory and other purposes, but this being a generic standard, does not give specific guidance for safety applications. This third edition cancels and replaces the second edition published in 2006. This edition constitutes a technical revision.This edition includes the following significant technical changes with respect to the previous edition:
a) the normative text is generic and covers all applications;
b) examples of applications for safety, automotive, software and (service) processes have been added as informative annexes;
c) tailoring the FMEA for different applications is described;
d) different reporting formats are described, including a database information system;
e) alternative means of calculating risk priority numbers (RPN) have been added;
f) a criticality matrix based method has been added;
g) the relationship to other dependability analysis methods have been described.
Keywords: failure modes and effects analysis (FMEA), failure modes effects and criticality analysis (FMECA)},
  keywords = {FMEA, FMECA, failure modes}
}
@techreport{IEC61025,
  author = {{{IEC} Subcommittee 56: Dependability}},
  title = {{Fault Tree Analysis (FTA)}},
  institution = {International Electrotechnical Commission},
  year = {2006},
  type = {International Standard},
  number = {IEC61025},
  keywords = {FTA, fault tree}
}
@misc{BPMN2011,
  author = {{Object Management Group}},
  month = jan,
  note = {last accessed 10 Jan 19},
  title = {Business Process Model and Notation},
  year = {2011},
  keywords = {BPMN, business modelling},
  url = {https://www.bpmn.org/}
}
@misc{Cohn,
  author = {Mike Cohn},
  howpublished = {https://www.mountaingoatsoftware.com/agile/user-stories},
  note = {last accessed 10 Jan 19},
  title = {User Stories},
  abstract = {User stories are part of an agile approach that helps shift the focus from writing about requirements to talking about them. All agile user stories include a written sentence or two and, more importantly, a series of conversations about the desired functionality.},
  keywords = {agile, user stories, AGM, REN},
  url = {https://www.mountaingoatsoftware.com/agile/user-stories}
}
@article{MaalejKurtanovicNabilEtAl2016,
  author = {Maalej, Walid and Kurtanović, Zijad and Nabil, Hadeer and Stanik, Christoph},
  title = {On the automatic classification of app reviews},
  journal = {Requirements Engineering},
  year = {2016},
  volume = {21},
  number = {3},
  pages = {311},
  month = sep,
  issn = {1432-010X},
  abstract = {App stores like Google Play and Apple AppStore have over 3 million apps covering nearly every kind of software and service. Billions of users regularly download, use, and review these apps. Recent studies have shown that reviews written by the users represent a rich source of information for the app vendors and the developers, as they include information about bugs, ideas for new features, or documentation of released features. The majority of the reviews, however, is rather non-informative just praising the app and repeating to the star ratings in words. This paper introduces several probabilistic techniques to classify app reviews into four types: bug reports, feature requests, user experiences, and text ratings. For this, we use review metadata such as the star rating and the tense, as well as, text classification, natural language processing, and sentiment analysis techniques. We conducted a series of experiments to compare the accuracy of the techniques and compared them with simple string matching. We found that metadata alone results in a poor classification accuracy. When combined with simple text classification and natural language preprocessing of the text---particularly with bigrams and lemmatization---the classification precision for all review types got up to 88--92 p.c. and the recall up to 90--99 p.c. Multiple binary classifiers outperformed single multiclass classifiers. Our results inspired the design of a review analytics tool, which should help app vendors and developers deal with the large amount of reviews, filter critical reviews, and assign them to the appropriate stakeholders. We describe the tool main features and summarize nine interviews with practitioners on how review analytics tools including ours could be used in practice.},
  doi = {10.1007/s00766-016-0251-9},
  keywords = {REN},
  publisher = {Springer}
}
@inproceedings{Habli2014,
  author = {Ibrahim Habli and Andrew Rae},
  booktitle = {Proceedings of Planning the Unplanned Experiment: Assessing the Efficacy of Standards for Safety Critical Software (AESSCS)},
  title = {Formalism of Requirements for Safety-Critical Software: Where Does the Benefit Come From?},
  year = {2014},
  month = may,
  note = {last accessed 22 Feb 19},
  abstract = {Safety and assurance standards often rely on the principle that requirements errors can be minimised by expressing the requirements more formally. Although numerous case studies have shown that the act of formalising previously informal requirements finds requirements errors, this principle is really just a hypothesis. An industrially persuasive causal relationship between formalisation and better requirements has yet to be established. We describe multiple competing explanations for this hypothesis, in terms of the levels of precision, re-formulation, expertise, effort and automation that are typically associated with formalising requirements. We then propose an experiment to distinguish between these explanations, without necessarily excluding the possibility that none of them are correct.},
  eprint = {https://arxiv.org/abs/1404.6802v1},
  eprinttype = {arXiv},
  keywords = {cs.SE, formal methods}
}
@misc{OpenCert,
  author = {PolarSys},
  title = {OpenCert Website},
  month = dec,
  year = {2018},
  note = {last accessed 10 Jan 19},
  keywords = {OpenCert, AMASS},
  url = {https://www.polarsys.org/opencert/}
}
@misc{ASCE,
  author = {Adelard},
  note = {last accessed 18 Jun 18},
  title = {ASCE Software},
  year = {2018},
  url = {https://www.adelard.com/asce/}
}
@article{Ould1991,
  author = {Martyn A. Ould},
  journal = {Software Engineering Journal},
  title = {Testing---a challenge to method and tool developers},
  year = {1991},
  month = mar,
  number = {2},
  pages = {59},
  volume = {6},
  abstract = {The paper looks at the requirements for testing in Defence Standard 00-55, compares those requirements with what is possible today, and sets a challenge for methods and tools developers if the situation is to be improved.},
  doi = {10.1049/sej.1991.0008},
  keywords = {Conformance testing, Military computing, Safety, Software reliability, tools developers, conformance testing, safety critical software, tool developers, Defence standard 00-55},
  publisher = {Institution of Engineering and Technology ({IET})}
}
@book{Hull2005,
  author = {Hull, Elizabeth and Jackson, Kenneth and Dick, Jeremy},
  publisher = {Springer-Verlag},
  title = {Requirements Engineering},
  year = {2005},
  edition = {2nd},
  isbn = {978-1-85233-879-4},
  abstract = {This second edition follows quickly on the first edition and is an indication of how fast the subject is changing and developing. In the past two years there have been significant advances and these are reflected in this new edition. Essentially, this is an update that places more emphasis on modelling by describing a greater range of approaches to system modelling. It introduces the UML2, which is the recent standard approved by the OMG. There is also an enhanced discussion on the relationship between requirements management and modelling, which relates well to the concept of rich traceability. The chapter on the requirements management tool DOORS has been revised to use Version 7 of the tool and this is complemented with examples taken from the DOORS/Analyst tool which demonstrates how the concepts of modelling can be captured and created within DOORS. The text is still aimed at students and practitioners of systems engineering who are keen to gain knowledge of using requirements engineering for system development.},
  doi = {10.1007/b138335},
  keywords = {REN, DOORS HCI Requirements Engineering Unified Modeling Language (UML) computer human computer interaction human-computer interaction (HCI) management modeling requirements management software software engineering systems engineering}
}
@techreport{Holloway2018,
  author = {Holloway, C. Michael and Graydon, Patrick J.},
  institution = {NASA Langley Research Center},
  title = {Explicate '78: Assurance Case Applicability to Digital Systems},
  year = {2018},
  month = jan,
  note = {last accessed 4 Nov 18},
  number = {DOT/FAA/TC-17/67},
  type = {Final report to the FAA},
  abstract = {This report documents the results of the Explicate '78 project. The project was conducted by NASA Langley Research Center in support  of  an  annex  (Assurance  Case  Applicability  to  Digital  Systems)  to  the  Reimbursable  Interagency  Agreement  IA1-1073 (Design, Verification,  and  Validation  of  Advanced  Digital  Airborne  Systems  Technology).  In  particular, the report describes an assurance case developed to express the arguments contained in, or implied by, DO-178C (Software Considerations in Airborne Systems  and  Equipment  Certification),  which  implicitly  justifies the  assumption  that  the  document  meets  its  stated  purpose  of  providing "guidelines for the production of software for airborne systems and equipment that performs its intended function with a level of confidence in safety that complies with airworthiness requirements." An appendix to the report provides an assurance case for DO-330 (Software Tool Qualification Considerations).},
  keywords = {assurance argument, assurance case, DO-178C, explicate78, GSN},
  url = {https://www.faa.gov/aircraft/air_cert/design_approvals/air_software/media/TC-17-67.pdf}
}
@techreport{Graydon2015,
  author = {Patrick J. Graydon and C. Michael Holloway},
  institution = {NASA Langley Research Center},
  title = {Planning the Unplanned Experiment: Assessing the Efficacy of Standards for Safety-Critical Software},
  year = {2015},
  month = sep,
  note = {last accessed 8 Nov 18},
  number = {NASA/TM-2015-218804},
  type = {Technical Memorandum},
  abstract = {We need well-founded means of determining whether software is fit for use in safety-critical applications. While software in industries such as aviation has an excellent safety record, the fact that software flaws have contributed to deaths illustrates the need for justifiably high confidence in software. It is often argued that software is fit for safety-critical use because it conforms to a standard for software in safety-critical systems. But little is known about whether such standards ‘work.’ Reliance upon a standard without knowing whether it works is an experiment; without collecting data to assess the standard, this experiment is unplanned. This paper reports on a workshop intended to explore how standards could practicably be assessed. Planning the Unplanned Experiment: Assessing the Efficacy of Standards for Safety Critical Software (AESSCS) was held on 13 May 2014 in conjunction with the European Dependable Computing Conference (EDCC). We summarize and elaborate on the workshop’s discussion of the topic, including both the presented positions and the dialogue that ensued.},
  keywords = {SOFTWARE ENGINEERING, SAFETY, STANDARDS, ASSESSMENTS, SOFTWARE RELIABILITY, CONFERENCES, RESEARCH},
  url = {https://ntrs.nasa.gov/search.jsp?R=20150018918}
}
@inproceedings{Steele2014,
  author = {P. Steele and J. Knight},
  booktitle = {2014 IEEE 15th International Symposium on High-Assurance Systems Engineering},
  title = {Analysis of Critical Systems Certification},
  year = {2014},
  month = jan,
  pages = {129--136},
  abstract = {We introduce the filter model of regulatory certification of safety-critical systems. This model characterizes the certification process itself as a safety-critical system in which incorrectly certifying a system that should be rejected is an accident. The role of certification is modeled as the identification of faults in the system being certified, i.e., a filter for system faults. Application of safety-engineering techniques to the certification process allows assessment of certification and offers the possibility of correction of weaknesses.},
  doi = {10.1109/HASE.2014.26},
  issn = {1530-2059},
  keywords = {safety-critical software;critical systems certification analysis;regulatory certification;safety-critical systems;fault identification;safety engineering techniques;Standards;Software;Hazards;Accidents;Fault trees;Unified modeling language;Modeling;safety-critical system;certification;approval;standards}
}
@book{Leveson1995,
  author = {Nancy G. Leveson},
  publisher = {Addison-Wesley Professional},
  title = {Safeware: System Safety and Computers},
  year = {1995},
  isbn = {0201119722},
  abstract = {We are building systems today---and using computers to control them---that have the potential for large-scale destruction of life and environment. More than ever, software engineers and system developers, as well as their managers, must understand the issues and develop the skills needed to anticipate and prevent accidents before they occur. Professionals should not require a catastrophe to happen before taking action.

Addressing this need in her long-awaited book, Nancy Levenson examines what is currently known about building safe electromagnetic systems and looks at past accidents to see what practical lessons can be applied to new computer-controlled systems.

Safeware

* Demonstrates the importance of integrating software safety efforts with system safety engineering
* Describes models of accidents and human error that underlie particular approaches to safety problems
* Presents the elements of a software program, including management, hazard analysis, requirements analysis, design for safety, design of the human-machine interface, and verification},
  chapter = {12 \& 18}
}
@book{Storey1996,
  author = {Storey, Neil},
  publisher = {Pearson Education (US)},
  title = {Safety Critical Computer Systems},
  year = {1996},
  isbn = {0201427877},
  month = jul,
  abstract = {This book is an introduction to the field of safety-critical computer systems written for any engineer who uses microcomputers within real-time embedded systems. It assumes no prior knowledge of safety, or of any specific computer hardware or programming language. This text is intended for both engineering and computer science students, and for practising engineers within computer related industries. The approach taken is equally suited to engineers who consider computers from a hardware, software or systems viewpoint.},
  date = {1996-07-18},
  ean = {9780201427875},
  pagetotal = {472}
}
@misc{UML2017,
  author = {{Object Management Group}},
  month = dec,
  note = {last accessed 10 Jan 19},
  title = {About the Unified Modeling Language Specification Version 2.5.1},
  year = {2017},
  keywords = {UML, Abstraction, Action Sequence, Action State, Activity Graph, Architecture, Association, Class Diagram, Collaboration Diagram, Component Diagram, Control Flow, Data Flow, Deployment Diagram, Execution, Implementation, Pins, Procedure},
  url = {https://www.omg.org/spec/UML/}
}
@article{Hoare1981,
  author = {Charles Antony Richard Hoare},
  journal = {Communications of the ACM},
  title = {The emperor's old clothes},
  year = {1981},
  month = feb,
  number = {2},
  pages = {75--83},
  volume = {24},
  abstract = {The author recounts his experiences in the implementation, design, and standardization of computer programming languages, and issues a warning for the future.},
  doi = {10.1145/358549.358561},
  keywords = {programming languages, history of programming languages, lessons for the future},
  publisher = {Association for Computing Machinery ({ACM})}
}





This file was generated by bibtex2html 1.98.