| 401 | Bernhard J. M. Gr{\"{u}}n and David Schuler and Andreas Zeller The Impact of Equivalent Mutants Second International Conference on Software Testing Verification and Validation, {ICST} 2009, Denver, Colorado, USA, April 1-4, 2009, Workshops Proceedings, 2009. |
|
| | Abstract: Available soon... |
| | @INPROCEEDINGS{GrunSZ09,
author = {Bernhard J. M. Gr{\"{u}}n and David Schuler and Andreas Zeller},
title = {The Impact of Equivalent Mutants},
booktitle = {Second International Conference on Software Testing Verification and Validation, {ICST} 2009, Denver, Colorado, USA, April 1-4, 2009, Workshops Proceedings},
year = {2009},
address = {},
month = {},
pages = {192--199}
} |
| 402 | David Schuler and Valentin Dallmeier and Andreas Zeller Efficient mutation testing by checking invariant violations Proceedings of the Eighteenth International Symposium on Software Testing and Analysis, {ISSTA} 2009, Chicago, IL, USA, July 19-23, 2009, 2009. |
|
| | Abstract: Available soon... |
| | @INPROCEEDINGS{SchulerDZ09,
author = {David Schuler and Valentin Dallmeier and Andreas Zeller},
title = {Efficient mutation testing by checking invariant violations},
booktitle = {Proceedings of the Eighteenth International Symposium on Software Testing and Analysis, {ISSTA} 2009, Chicago, IL, USA, July 19-23, 2009},
year = {2009},
address = {},
month = {},
pages = {69--80}
} |
| 403 | Adenilso Simao and Jose Carlos Maldonado and Roberto da Silva Bigonha A Transformational Language for Mutant Description Computer Languages, Systems & Structures, 35(3), October 2009. |
|
| | Abstract: Mutation testing has been used to assess the quality of test case suites by analyzing the ability in distinguishing the artifact under testing from a set of alternative artifacts, the so-called mutants. The mutants are generated from the artifact under testing by applying a set of mutant operators, which produce artifacts with simple syntactical differences. The mutant operators are usually based on typical errors that occur during the software development and can be related to a fault model. In this paper, we propose a language—named View the MathML source (MUtant DEfinition Language)—for the definition of mutant operators, aiming not only at automating the mutant generation, but also at providing precision and formality to the operator definition. The proposed language is based on concepts from transformational and logical programming paradigms, as well as from context-free grammar theory. Denotational semantics formal framework is employed to define the semantics of the View the MathML source language. We also describe a system—named mudelgen—developed to support the use of this language. An executable representation of the denotational semantics of the language is used to check the correctness of the implementation of mudelgen. At the very end, a mutant generator module is produced, which can be incorporated into a specific mutant tool/environment. |
| | @ARTICLE{SimaoMB09,
author = {Adenilso Simao and Jose Carlos Maldonado and Roberto da Silva Bigonha},
title = {A Transformational Language for Mutant Description},
journal = {Computer Languages, Systems & Structures},
year = {2009},
month = {October},
volume = {35},
number = {3},
pages = {322-339}
} |
| 404 | M. Prasanna and K.R. Chandran Automatic Test Case Generation for UML Object diagrams using Genetic Algorithm International Journal of Soft Computing Applications, 1(1), July 2009. |
|
| | Abstract: A new model based approach for automated generation of test cases in object oriented systems has been presented. The test cases are derived by analyzing the dynamic behavior of the objects due to internal and external stimuli. The scope of the paper has been limited to the object diagrams taken from the Unified Modeling Language model of the system. Genetic Algorithm’s tree crossover has been proposed to bring out all possible test cases of a given object diagram. Illustrative case study has been presented to establish the effectiveness of our methodology coupled with mutation analysis |
| | @ARTICLE{PrasannaC09,
author = {M. Prasanna and K.R. Chandran},
title = {Automatic Test Case Generation for UML Object diagrams using Genetic Algorithm},
journal = {International Journal of Soft Computing Applications},
year = {2009},
month = {July},
volume = {1},
number = {1},
pages = {19–32}
} |
| 405 | Chanchal K. Roy and James R. Cordy A Mutation / Injection-based Automatic Framework for Evaluating Code Clone Detection Tools Proceedings of the 4th International Workshop on Mutation Analysis (MUTATION'09)Denver, Colorado, 1-4 April 2009. |
|
| | Abstract: In recent years many methods and tools for software clone detection have been proposed. While some work has been done on assessing and comparing performance of these tools, very little empirical evaluation has been done. In particular, accuracy measures such as precision and recall have only been roughly estimated, due both to problems in creating a validated clone benchmark against which toolscan be compared, and to the manual effort required to hand check large numbers of candidate clones. In this paper we propose an automated method for empirically evaluating clone detection tools that leverages mutation-based techniques to overcome these limitations by automatically synthesizing large numbers of known clones based on an editing theory of clone creation. Our framework is effective in measuring recall and precision of clone detection tools for various types of fine-grained clones in real systems without manual intervention. |
| | @INPROCEEDINGS{RoyC09,
author = {Chanchal K. Roy and James R. Cordy},
title = {A Mutation / Injection-based Automatic Framework for Evaluating Code Clone Detection Tools},
booktitle = {Proceedings of the 4th International Workshop on Mutation Analysis (MUTATION'09)},
year = {2009},
address = {Denver, Colorado},
month = {1-4 April},
pages = {157-166}
} |
| 406 | Suraj Sharma and S. K. Jena and K. Satyababu New Approach for Testing the Correctness of
Access Control Policies Proceedings of the International Advance Computing Conference (IACC'09)Patiala, Punjab, India, 06-07 March 2009. |
|
| | Abstract: To increase the confidence in the correctness of specified policies, policy developers can conduct policy testing by supplying typical test inputs (request) and subsequently checking test output (responses) against expected ones to enhance the correctness of specified policies. Testing of Access Control Policies along with the Application program is not a worthful practice. Unlike Software Testing we have the tools and technique for Access Control Policy Testing.Unfortunately, manual testing is tedious and time consuming job. We designed a model called ACPC (Access Control Policy Checker) which include mutation operators for comparing the original policy response with the response of mutant policy and check the correctness of the original policy. The ACPC includes two sections in first section we generate the requests set automatically which is previously not available and in second section we perform testing. This model uses the policy written in XACML (eXtensible Access Control Markup Language) [1] which is the standard language for writing Access Control Policies. We have used a tool called Margrave [8] for Change Impact Analysis and other programming languages like Java and C++ for building different module. |
| | @INPROCEEDINGS{SharmaJS09,
author = {Suraj Sharma and S. K. Jena and K. Satyababu},
title = {New Approach for Testing the Correctness of
Access Control Policies},
booktitle = {Proceedings of the International Advance Computing Conference (IACC'09)},
year = {2009},
address = {Patiala, Punjab, India},
month = {06-07 March},
pages = {}
} |
| 407 | David Schuler and Valentin Dallmeier and Andreas Zeller Efficient Mutation Testing by Checking Invariant Violations Proceedings of the International Symposium on Software Testing and Analysis (ISSTA'09)Chicago, Illinois, 19-23 July 2009. |
|
| | Abstract: Mutation testing measures the adequacy of a test suite by seeding artificial defects (mutations) into a program. If a mutation is not detected by the test suite, this usually means that the test suite is not adequate. However, it may also be that the mutant keeps the program's semantics unchanged–-and thus cannot be detected by any test. Such equivalent mutants have to be eliminated manually, which is tedious.
We assess the impact of mutations by checking dynamic invariants. In an evaluation of our Javalanche framework on seven industrial-size programs, we found that mutations that violate invariants are significantly more likely to be detectable by a test suite. As a consequence, mutations with impact on invariants should be focused upon when improving test suites. With less than 3% of equivalent mutants, our approach provides an efficient, precise, and fully automatic measure of the adequacy of a test suite. |
| | @INPROCEEDINGS{SchulerDZ09,
author = {David Schuler and Valentin Dallmeier and Andreas Zeller},
title = {Efficient Mutation Testing by Checking Invariant Violations},
booktitle = {Proceedings of the International Symposium on Software Testing and Analysis (ISSTA'09)},
year = {2009},
address = {Chicago, Illinois},
month = {19-23 July},
pages = {}
} |
| 408 | David Schuler and Valentin Dallmeier and Andreas Zeller Efficient Mutation Testing by Checking Invariant Violations Saarland University, Saarbrucken, Telefon, 2009. |
|
| | Abstract: Mutation testing measures the adequacy of a test suite by seeding artificial defects (mutations) into a program. If a mutation is not detected by the test suite, this usually means that the test suite is not adequate. However, it may also be that the mutant keeps the program's semantics unchanged–-and thus cannot be detected by any test. Such equivalent mutants have to be eliminated manually, which is tedious.
We assess the impact of mutations by checking dynamic invariants. In an evaluation of our Javalanche framework on seven industrial-size programs, we found that mutations that violate invariants are significantly more likely to be detectable by a test suite. As a consequence, mutations with impact on invariants should be focused upon when improving test suites. With less than 3% of equivalent mutants, our approach provides an efficient, precise, and fully automatic measure of the adequacy of a test suite. |
| | @TECHREPORT{SchulerDZ09a,
author = {David Schuler and Valentin Dallmeier and Andreas Zeller},
title = {Efficient Mutation Testing by Checking Invariant Violations},
institution = {Saarland University},
year = {2009},
type = {techreport},
number = {},
address = {Saarbrucken, Telefon},
month = {},
} |
| 409 | David Schuler and Andreas Zeller Javalanche: Efficient Mutation Testing for Java Proceedings of the 7th joint meeting of the European Software Engineering Conference and the International Symposium on Foundations of Software EngineeringAmsterdam, Netherlands, 24-28 August 2009. |
|
| | Abstract: To assess the quality of a test suite, one can use mutation testing - seeding artificial defects (mutations) into the program and checking whether the test suite finds them. Javalanche is an open source framework for mutation testing Java programs with a special focus on automation, efficiency, and effectiveness. In particular, Javalanche assesses the impact of individual mutations to effectively weed out equivalent mutants; it has been demonstrated to work on programs with up to 100,000 lines of code. |
| | @INPROCEEDINGS{SchulerZ09,
author = {David Schuler and Andreas Zeller},
title = {Javalanche: Efficient Mutation Testing for Java},
booktitle = {Proceedings of the 7th joint meeting of the European Software Engineering Conference and the International Symposium on Foundations of Software Engineering},
year = {2009},
address = {Amsterdam, Netherlands},
month = {24-28 August},
pages = {297-298}
} |
| 410 | Hossain Shahriar and Mohammad Zulkernine MUTEC: Mutation-based Testing of Cross Site Scripting Proceedings of the 5th International Workshop on Software Engineering for Secure Systems (SESS'09)Vancouver, Canada, 19 May 2009. |
|
| | Abstract: Vulnerabilities in applications and their widespread exploitation through successful attacks are common these days. Testing applications for preventing vulnerabilities is an important step to address this issue. In recent years, a number of security testing approaches have been proposed. However, there is no comparative study of these work that might help security practitioners select an appropriate approach for their needs. Moreover, there is no comparison with respect to automation capabilities of these approaches. In this work, we identify seven criteria to analyze program security testing work. These are vulnerability coverage, source of test cases, test generation method, level of testing, granularity of test cases, testing automation, and target applications. We compare and contrast prominent security testing approaches available in the literature based on these criteria. In particular, we focus on work that address four most common but dangerous vulnerabilities namely buffer overflow, SQL injection, format string bug, and cross site scripting. Moreover, we investigate automation features available in these work across a security testing process. We believe that our findings will provide practical information for security practitioners in choosing the most appropriate tools. |
| | @INPROCEEDINGS{ShahriarZ09,
author = {Hossain Shahriar and Mohammad Zulkernine},
title = {MUTEC: Mutation-based Testing of Cross Site Scripting},
booktitle = {Proceedings of the 5th International Workshop on Software Engineering for Secure Systems (SESS'09)},
year = {2009},
address = {Vancouver, Canada},
month = {19 May},
pages = {47–53}
} |