@mastersthesis{Klockmann2020, title = {MWMA-SLAM: Manhattan-World-Multi-Agent-SLAM}, author = {Maximilian Klockmann}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/10/ma_klockmann-1.pdf}, year = {2020}, date = {2020-10-20}, school = {Otto-von-Guericke-University Magdeburg}, abstract = {Einsatzkr\"{a}fte m\"{u}ssen sich regelm\"{a}\ssig zur Rettung von Opfern in unbekannten Umgebungen zurechtfinden. Dabei liegen nur selten Karten vor. Die Einsatzleitung muss die einzelnen Teams mittels Funk koordinieren und kann die Positionen nur absch\"{a}tzen. Um die Einsatzkr\"{a}fte zu unterst\"{u}tzen, k\"{o}nnen Multi-Agent-SLAM Algorithmen eingesetzt werden. Diese m\"{u}ssen jedoch datensparsam sein, um mit den Netzwerk-Bedingungen vor Ort zu funktionieren. Oftmals muss das Netzwerk selbst aufgespannt werden. F\"{u}r die Kartographierung vor Ort gibt es eine Vielzahl an verschiedenen Multi-Agent- SLAM-Algorithmen. Diese arbeiten \"{u}blicherweise mit RGBD-Bildern oder Punktwolken. Die berechneten Rotationen erreichen dabei eine hohe Genauigkeit mit weniger als 1,9 Grad Abweichung. In einigen SLAM-Algorithmen werden Ebenen als Eingabe verwendet [35, 8]. Diese Algorithmen arbeiten jedoch nur lokal und wurden nicht als Multi-Agent-SLAM entwickelt. Ebenen haben durch ihre einfache, mathematische Darstellung ein hohes Potenzial, um Daten einzusparen. In der Koordinatenform m\"{u}ssen nur eine Normale und die Distanz zum Ursprung, also vier Dezimalzahlen, \"{u}bertragen werden. In dieser Arbeit wird untersucht, ob sich Ebenen auch f\"{u}r einen Multi-Agent-SLAM eignen. Es wurde der auf Ebenen basierende MWMA-SLAM konzipiert. Die lokalen Karten werden daf\"{u}r in Form von Ebenen \"{u}bertragen und auf dem Server miteinander gematcht. Untersucht wird neben der Tauglichkeit der Ebenen auch die ben\"{o}tigte Datenmenge f\"{u}r die \"{U}bertragung. Der Algorithmus wurde sowohl mit synthetischen als auch mit realen Daten getestet. Es wurde in den Tests gezeigt, dass die Verwendung von Ebenen funktioniert. In jeweils \"{u}ber 97% der F\"{a}lle konnte eine gute Rotation mit weniger als 0.2_ Abweichung berechnet werden. Eine gute Translation mit weniger als 1 mm Abweichung konnte in durchschnittlich 90% der F\"{a}lle berechnet werden. Der MAE der Rotation liegt bei 0,6 Grad. Insgesamt hat sich gezeigt, dass das Konzept eines Ebenen basierten Multi-Agent-SLAMs funktioniert. Die Ergebnisse zeigen eine \"{a}hnlich gute Genauigkeit bei der Rotation wie andere Multi-Agent-SLAM-Algorithmen. F\"{u}r die Translation m\"{u}ssen jedoch noch Anpassungen in der Berechnung vorgenommen werden, um die Ergebnisse zu verbessern. Mit einer Gr\"{o}\sse von maximal 576 Byte f\"{u}r die \"{U}bertragung der einzelnen Testumgebungen, konnte.}, keywords = {EVOK, SLAM}, pubstate = {published}, tppubtype = {mastersthesis} } @article{sap2020, title = {Publish or Perish, but do not Forget your Software Artifacts}, author = {Robert Heum\"{u}ller and Sebastian Nielebock and Jacob Kr\"{u}ger and Frank Ortmeier}, editor = {Springer}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/07/2020-emse-paper-publish-or-perish.pdf}, doi = {10.1007/s10664-020-09851-6}, year = {2020}, date = {2020-10-08}, journal = {Empirical Software Engineering}, abstract = {Open-science initiatives have gained substantial momentum in computer science, and particularly in software-engineering research. A critical aspect of open-science is the public availability of artifacts (e.g., tools), which facilitate the replication, reproduction, extension, and verification of results. While we experienced that many artifacts are not publicly available, we are not aware of empirical evidence supporting this subjective claim. In this article, we report an empirical study on software artifact papers (SAPs) published at the International Conference on Software Engineering (ICSE), in which we investigated whether and how researchers have published their software artifacts, and whether this had scientific impact. Our dataset comprises 789 ICSE research track papers, including 604 SAPs (76.6,%), from the years 2007 to 2017. While showing a positive trend towards artifact availability, our results are still sobering. Even in 2017, only 58.5,% of the papers that stated to have developed a software artifact made that artifact publicly available. As we did find a small, but statistically significant, positive correlation between linking to artifacts in a paper and its scientific impact in terms of citations, we hope to motivate the research community to share more artifacts. With our insights, we aim to support the advancement of open science by discussing our results in the context of existing initiatives and guidelines. In particular, our findings advocate the need for clearly communicating artifacts and the use of non-commercial, persistent archives to provide replication packages.}, keywords = {Artifacts, Open Science, Open Source, Publishing, Software}, pubstate = {published}, tppubtype = {article} } @inproceedings{matschek2020, title = {Learning References with Gaussian Processes in Model Predictive Control Applied to Robot Assisted Surgery}, author = {Janine Matschek and Tim Gonschorek and Magnus Hanses and Norbert Elkmann and Frank Ortmeier and Rolf Findeisen}, editor = {IFAC}, year = {2020}, date = {2020-05-13}, keywords = {robotics}, pubstate = {forthcoming}, tppubtype = {inproceedings} } @inproceedings{Nielebock2020, title = {Cooperative API Misuse Detection Using Correction Rules}, author = {Sebastian Nielebock and Robert Heum\"{u}ller and Jacob Kr\"{u}ger and Frank Ortmeier}, editor = {ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/02/cooperative-api-misuse-detection-1.pdf https://bitbucket.org/SNielebock/icse-2020-nier-cooperative-api-misuse/src/master/}, doi = {10.1145/3377816.3381735}, year = {2020}, date = {2020-05-01}, booktitle = {Proccedings of the 42nd IEEE/ACM International Conference on Software Engineering - New Ideas and Emerging Results Track, ICSE-NIER}, publisher = {ACM}, keywords = {API Misuse, API Misuse Detection, Bug Fix, Misuse Detection, testing}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Nielebock2020A, title = {Using API-Embedding for API-Misuse Repair}, author = {Sebastian Nielebock and Robert Heum\"{u}ller and Jacob Kr\"{u}ger and Frank Ortmeier}, editor = {ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/04/api-embeddings-for-repair-Nielebock-et-al-APR2020.pdf}, doi = {10.1145/3387940.3392171}, year = {2020}, date = {2020-05-01}, booktitle = {Proceedings of the 1st International Workshop on Automated Program Repair (APR 2020) in conjunction with 42nd International Conference on Software Engineering (ICSE 2020), Seoul, South Korea}, keywords = {API Embedding, API Misuse, automatic program repair, Program Repair}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Kr\"{u}ger2020, title = {How Can I Contribute? A Qualitative Analysis of Community Websites of 25 Unix-Like Distributions}, author = {Jacob Kr\"{u}ger and Sebastian Nielebock and Robert Heum\"{u}ller}, editor = {ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/02/docsAnalysis.pdf https://doi.org/10.5281/zenodo.3665429}, doi = {10.1145/3383219.3383256}, isbn = {9781450377317}, year = {2020}, date = {2020-04-17}, booktitle = {Proceedings of the 24th International Conference on Evaluation and Assessment in Software Engineering, (EASE) - Short Papers Track}, journal = {Proceedings of the 24th International Conference on Evaluation and Assessment in Software Engineering, (EASE)}, pages = {324\textendash329}, address = {Trondheim, Norway}, keywords = {Bug fixing, Community, Contribution, Development, Information provisioning, Linux, Qualitative Study, Unix}, pubstate = {published}, tppubtype = {inproceedings} } @article{fritzsche2020innovative, title = {Innovative Hospital Management: Tracking of Radiological Protection Equipment}, author = {Holger Fritzsche and Elmer Jeto Gomes Ataide and Afshan Bi and Rohit Kalva and Sandeep Tripathi and Axel Boese and Michael Friebe and Tim Gonschorek}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/04/Innovative-Hospital-Management_-Tracking-of-Radiological-Protection-Equipment.pdf}, doi = {10.4018/IJBCE.2020010103}, year = {2020}, date = {2020-01-01}, journal = {International Journal of Biomedical and Clinical Engineering (IJBCE)}, volume = {9}, number = {1}, pages = {33--47}, publisher = {IGI Global}, abstract = {The healthcare industry is consistently developing a constant supply of medical equipment, e.g. radiation protection wear. These must be inspected regularly to ensure safety and quality. As this equipment keeps on moving from department to department, it has to be located in one place for annual inspection and must be properly documented after quality check. Conventionally, barcodes, QR codes, and manual entry of the required data are used as a tracking method which requires tedious human efforts without delivering the expected results for registration, tracking, and maintenance. A fully or semi-automated computerized system would be desirable in this case. Radio frequency identification systems which consist of tag, reader, and database can be used for tracking. This article presents new innovative RFID based system which is dedicated to quality assurance of radiological protection wear specifically lead aprons. This process facilitates the service management of hospitals.}, keywords = {embedded software engineering, medical engineering}, pubstate = {published}, tppubtype = {article} } @incollection{Ataide20, title = {ENT Endoscopic Surgery and Mixed Reality: Application Development and Integration}, author = {Elmer Ataide and Holger Fritzsche and Marco Filax and Dinesh Chittamuri and Lakshmi Potluri and Michael Friebe}, url = {https://www.igi-global.com/gateway/chapter/239074}, year = {2020}, date = {2020-01-01}, booktitle = {Biomedical and Clinical Engineering for Healthcare Advancement}, pages = {17-29}, publisher = {IGI Global}, keywords = {Augmented Reality, virtual reality}, pubstate = {published}, tppubtype = {incollection} } @article{fuentealba2019independent, title = {Independent Analysis of Decelerations and Resting Periods through CEEMDAN and Spectral-Based Feature Extraction Improves Cardiotocographic Assessment}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2019/12/fuentealba2019independent.pdf}, doi = {https://doi.org/10.3390/app9245421}, issn = {2076-3417}, year = {2019}, date = {2019-12-11}, journal = {Applied Sciences}, volume = {9}, number = {24}, pages = {5421}, publisher = {Multidisciplinary Digital Publishing Institute}, abstract = {Fetal monitoring is commonly based on the joint recording of the fetal heart rate (FHR) and uterine contraction signals obtained with a cardiotocograph (CTG). Unfortunately, CTG analysis is difficult, and the interpretation problems are mainly associated with the analysis of FHR decelerations. From that perspective, several approaches have been proposed to improve its analysis; however, the results obtained are not satisfactory enough for their implementation in clinical practice. Current clinical research indicates that a correct CTG assessment requires a good understanding of the fetal compensatory mechanisms. In previous works, we have shown that the complete ensemble empirical mode decomposition with adaptive noise, in combination with time-varying autoregressive modeling, may be useful for the analysis of those characteristics. In this work, based on this methodology, we propose to analyze the FHR deceleration episodes separately. The main hypothesis is that the proposed feature extraction strategy applied separately to the complete signal, deceleration episodes, and resting periods (between contractions), improves the CTG classification performance compared with the analysis of only the complete signal. Results reveal that by considering the complete signal, the classification performance achieved 81.7% quality. Then, including information extracted from resting periods, it improved to 83.2%.}, keywords = {biomedical signal processing, cardiotocograph, empirical mode decomposition, fetal heart rate, spectral analysis, time-varying autoregressive modeling}, pubstate = {published}, tppubtype = {article} } @mastersthesis{Kirchheim2019, title = {Self-Assessment of Visual Recognition Systems based on Attribution}, author = {Konstantin Kirchheim}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/01/MA_2019_KonstantinKirchheim.pdf}, year = {2019}, date = {2019-12-09}, school = {Otto-von-Guericke-University Magdeburg}, abstract = {Convolutional Neural Networks achieve state of the art results in various visual recognition tasks like object classification and object detection. While CNNs perform surprisingly well, it is difficult to retrace why they arrive at a certain prediction. Additionally, they have been shown to be prone to certain errors. As CNN are increasingly deployed into physical systems - for example in self driving vehicles - undetected errors could result in catastrophic consequences. Approaches to prevent this include the usage of attribution based explanation methods to facilitate an understanding in the systems decision in hindsight, as well as the detection of recognition errors at runtime, called self-assessment. Some state-of-the-art self-assessment approaches aim to detect anomalies in the activation patterns of neurons in a CNN. This work explores the usage of attribution based explanations for self-assessment of CNNs. We build multiple self-assessment models and evaluate their performance in various settings. In our experiments, we find that, while self-assessment based on attribution does not outperform self-assessment based on neural activity on its own, it always surpasses random guessing. Furthermore, we find that self-assessment models using neural activation patterns as well as neural attribution can in some cases outperform models which do not consider attribution patterns. Thus, we conclude that it might be possible to improve self-assessment models by including the explanation of the model into the assessment process.}, keywords = {Novelty Detection, Open Set Recognition, Out-of-Distribution Detection, Outlier Detection, Recognition, Self-Assessment}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{fuentealba2019study, title = {A Study on the Classification Performance of Cardiotocographic Data vs Class Formation Criteria}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, year = {2019}, date = {2019-11-24}, abstract = {Fetal monitoring during labor is commonly based on the joint recording of the fetal heart rate (FHR) and uterine contraction data obtained by a Cardiotocograph (CTG). Currently, the interpretation of such data is difficult because it involves a visual analysis of highly complex signals. For this reason, several approaches based on signal processing and classification have been proposed. Most of the CTG classification approaches use class formation criteria based on the pH value, which is considered as a gold standard measure for postpartum evaluation. However, at birth, the association of a precise value of pH with the neonatal outcome is still inconclusive, which makes the classification training a difficult task. This work focuses on studying the CTG classification performance in relation to the used class formation criterion. For this purpose, first, the FHR signal is decomposed by using the complete ensemble empirical mode decomposition with adaptive noise (CEEMDAN) method. Second, we extract a set of signal features based on CEEMDAN and conventional time-domain features proposed in the literature, which are computed in different FHR signal lengths just before delivery. Then, the features classification performance is evaluated according to a set of class formation criteria based on different pH values used as thresholds. Results reveal that the classification performance significantly depends on the selected pH value for the class formation, whose best performance is achieved by considering a class formation based on a pH=7.05.}, keywords = {biomedical signal processing, cardiotocograph, empirical mode decomposition, fetal heart rate}, pubstate = {forthcoming}, tppubtype = {inproceedings} } @article{fuentealba2019thyroid, title = {Cardiotocographic Signal Feature Extraction through CEEMDAN and Time-Varying Autoregressive Spectral-Based Analysis for Fetal Welfare Assessment}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2019/11/fuentealba2019cardiotocographic.pdf}, doi = {10.1109/ACCESS.2019.2950798}, year = {2019}, date = {2019-10-31}, journal = {IEEE Access}, volume = {7}, number = {1}, pages = {159754 - 159772}, publisher = {IEEE}, abstract = {Cardiotocograph (CTG) is a widely used tool for fetal surveillance during labor, which provides the joint recording of fetal heart rate (FHR) and uterine contraction data. Unfortunately, the CTG interpretation is difficult because it involves a visual analysis of highly complex signals. Recent clinical research indicates that a correct CTG assessment requires a good understanding of the fetal compensatory mechanisms modulated by the autonomic nervous system. Certainly, this modulation reflects variations in the FHR, whose characteristics can involve significant information about the fetal condition. The main contribution of this work is to investigate these characteristics by a new approach combining two signal processing methods: the complete ensemble empirical mode decomposition with adaptive noise (CEEMDAN) and time-varying autoregressive (TV-AR) modeling. The idea is to study the CEEMDAN intrinsic mode functions (IMFs) in both the time-domain and the spectral-domain in order to extract information that can help to assess the fetal condition. For this purpose, first, the FHR signal is decomposed, and then for each IMF, the TV-AR spectrum is computed in order to study their spectral dynamics over time. In this paper, we first explain the foundations of our proposed features. Then, we evaluate their performance in CTG classification by using three machine learning classifiers. The proposed approach has been evaluated on real CTG data extracted from the CTU-UHB database. Results show that by using only conventional FHR features, the classification performance achieved 78,0%. Then, by including the proposed CEEMDAN spectral-based features, it increased to 81,7%.}, keywords = {biomedical signal processing, cardiotocograph, empirical mode decomposition, fetal heart rate, spectral analysis, time-varying autoregressive modeling}, pubstate = {published}, tppubtype = {article} } @inproceedings{fuentealba2019cardiotocograph, title = {Cardiotocograph Data Classification Improvement by Using Empirical Mode Decomposition}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://ieeexplore.ieee.org/document/8856673}, doi = {10.1109/EMBC.2019.8856673}, isbn = {978-1-5386-1311-5 }, year = {2019}, date = {2019-10-07}, booktitle = {2019 41st Annual International Conference of the IEEE Engineering in Medicine and Biology Society (EMBC)}, pages = {5646--5649}, organization = {IEEE}, abstract = {This work proposes to study the fetal heart rate (FHR) signal based on information about its dynamics as a signal resulting from the modulation by the autonomic nervous system. The analysis is performed using the complete ensemble empirical mode decomposition with adaptive noise (CEEMDAN) technique. The main idea is to extract a set of signal features based on that technique and also conventional time-domain features proposed in the literature in order to study their performance by using a support vector machine (SVM) as a classifier. As a hypothesis, we postulate that by including CEEMDAN based features, the classification performance should improve compared with the performance achieved by conventional features. The proposed method has been evaluated using real FHR data extracted from the open access CTU-UHB database. Results show that the classification performance improved from 67, 6% using only conventional features, to 71, 7% by incorporating CEEMDAN based features.}, keywords = {databases, feature extraction, fetal heart rate, support vector machines, testing, time-domain analysis, training}, pubstate = {published}, tppubtype = {inproceedings} } @article{fuentealba2019foetal, title = {Foetal heart rate assessment by empirical mode decomposition and spectral analysis}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://www.degruyter.com/downloadpdf/j/cdbme.2019.5.issue-1/cdbme-2019-0096/cdbme-2019-0096.pdf}, doi = {https://doi.org/10.1515/cdbme-2019-0096}, year = {2019}, date = {2019-09-18}, journal = {Current Directions in Biomedical Engineering}, volume = {5}, number = {1}, pages = {381--383}, publisher = {De Gruyter}, abstract = {This paper focuses on studying the time-variant dynamics involved in the foetal heart rate (FHR) response resulting from the autonomic nervous system modulation. It provides a comprehensive analysis of such dynamics by relating the spectral information involved in the FHR signal with foetal physiological characteristics. This approach is based on two signal processing methods: the complete ensemble empirical mode decomposition with adaptive noise (CEEMDAN) and time-varying autoregressive (TV-AR) modelling. First, the CEEMDAN allows to decompose the signal into intrinsic mode functions (IMFs). Then, the TV-AR modelling allows to analyse their spectral dynamics. Results reveal that the IMFs can involve significant spectral information (p -value < 0.05) that can help to assess the foetal condition.}, keywords = {cardiotocograph, empirical mode decomposition, foetal monitoring, time-varying autoregressive spectrum}, pubstate = {published}, tppubtype = {article} } @inproceedings{10.1007/978-3-030-26601-1_19, title = {SafeDeML: On Integrating the Safety Design into the System Model}, author = {Tim Gonschorek and Philipp Bergt and Marco Filax and Frank Ortmeier and Jan von Hoyningen-H\"{u}ne and Thorsten Piper}, editor = {Alexander Romanovsky and Elena Troubitsyna and Friedemann Bitsch}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/04/GonschorekEtAl_SafeDeML.pdfhttps://link.springer.com/chapter/10.1007/978-3-030-26601-1_19}, doi = {10.1007/978-3-030-26601-1_19}, isbn = {978-3-030-26601-1}, year = {2019}, date = {2019-09-18}, booktitle = {Computer Safety, Reliability, and Security}, pages = {271--285}, publisher = {Springer International Publishing}, address = {Cham}, abstract = {The safety design definition of a safety critical system is a complex task. On the one hand, the system designer must ensure that he addressed all potentially hazardous harwdware faults. This is often defined not(!) in the model but within extra documents (e.g., Excel sheets). On the other hand, all defined safety mechanisms must be transformed back into the system model. We think an improvement for the designer would be given by a modeling extension integrating relevant safety design artifacts into the normal design work-flow and supporting the safety design development directly from within the model.}, keywords = {model-based safety assessment, safety design integration}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{10.1007/978-3-030-32872-6_7, title = {Integrating Safety Design Artifacts into System Development Models Using SafeDeML}, author = {Tim Gonschorek and Philipp Bergt and Marco Filax and Frank Ortmeier}, editor = {Yiannis Papadopoulos and Koorosh Aslansefat and Panagiotis Katsaros and Marco Bozzano}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/04/SafeDeML_gonschorekEtAl.pdfhttps://link.springer.com/chapter/10.1007/978-3-030-32872-6_7}, doi = {10.1007/978-3-030-32872-6_7}, isbn = {978-3-030-32872-6}, year = {2019}, date = {2019-09-18}, booktitle = {Model-Based Safety and Assessment}, pages = {93--106}, publisher = {Springer International Publishing}, address = {Cham}, abstract = {Applying a safety artifact language as Safety Design Modeling Language SafeDeML integrates the generation of the safety design into the system modeling stage -- directly within the system architecture. In this paper, we present a modeling process and a prototype for the CASE tool Enterprise Architect for SafeDeML. The goal is to support the system designer in developing a standard (in this paper Iso 26262) conform system and safety design containing all relevant safety artifact within one model. Such integration offers several modeling guarantees like consistency checks or computation of coverage and fault metrics. Since all relevant information and artifacts are contained within the model, SafeDeML and the prototype can help to decrease the effect of structural faults during the safety design and further supports the safety assessment. To give an idea to the reader of the complexity of the approach's application, we present an exemplary implementation of the safety design for a brake light system, a real case-study from the Iso 26262 context.}, keywords = {model-based, model-based safety assessment, safety design}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Heum\"{u}ller2019, title = {SpecTackle - A Specification Mining Experimentation Platform}, author = {Robert Heum\"{u}ller and Sebastian Nielebock and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2020/08/paper-spectackle.pdf}, year = {2019}, date = {2019-08-30}, booktitle = {Proceedings of the 45th Euromicro Conference on Software Engineering and Advanced Applications (SEAA),Kallithea, Chalkidiki. Greece}, organization = {Euromicro}, abstract = {Nowadays, API Specification Mining is an important cornerstone of automated software engineering. In this paper, we introduce SpecTackle, an IDE-based experimentation platform aiming to facilitate experimentation and validation of specification mining algorithms and tools. SpecTackle strives toward (1) providing easy access to various specification mining tools, (2) simplifying configuration and usage through a shared interface, and (3) in-code visualization of pattern occurrences. The first version supports two heterogeneous mining tools, a third-party graph-based miner as well as a custom sequence mining tool. In the long term, SpecTackle envisions to also provide ground-truth benchmark projects, a unified pattern meta-model and parameter optimization for mining tools.}, keywords = {API, specification mining, SpecTackle, Tool}, pubstate = {published}, tppubtype = {inproceedings} } @misc{datenschutzkonzept_nielebock, title = {Leitfaden "Ihre ersten Schritte auf dem Weg zu einem Datenschutzkonzept f\"{u}r Ihr Unternehmen - Das k\"{o}nnen Sie selbst tun!}, author = {Sebastian Nielebock and Mykhaylo Nykolaichuk and Frank Ortmeier}, editor = {Mittelstand 4.0-Kompetenzzentrum Magdeburg c/o ZPVP GmbH }, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2019/06/Leitfaden_Erste-Schritte_zum_Datenschutzkonzept_final.pdf}, year = {2019}, date = {2019-06-03}, abstract = {Sie informieren sich auf einer Firmen-Website im Internet. Sie m\"{o}chten in einem Onlineshop bestellen oder sind Mitglied in einem Verband? Selbst wenn Sie auf den ersten Blick gar keine pers\"{o}nlichen Daten preisgeben, so hinterlassen Sie doch, bei allem, was Sie tun, Ihre Datenspur. Diese Datenspur gilt es zu sch\"{u}tzen und im Umkehrschluss nat\"{u}rlich auch die Ihrer Kunden. Am 25. Mai 2018 trat die so genannte Datenschutzgrundverordnung (DSGVO) in Kraft. Damit sind die Richtlinien noch strenger geworden. Wir alle wissen, dass uns der Schutz unserer Daten wichtig ist. Aber was genau bedeutet das? Welche Daten fallen zum Beispiel in meinem Unternehmen an? Welche Daten muss ich sch\"{u}tzen und wie mache ich das? Wenn ich schon Schutzma\ssnahmen ergriffen habe, sind diese ausreichend? Vor diesen Fragen stehen nicht nur Sie, sondern viele Unternehmer und Unternehmerinnen. W\"{a}hrend Gro\ssbetriebe \"{u}ber eigene IT- und Rechtsabteilungen verf\"{u}gen, stehen Firmenchefs kleinerer und mittlerer Unternehmen diesen Fragen h\"{a}ufig alleine gegen\"{u}ber. Oftmals fehlt schon das Basiswissen, von Zeit und Mu\sse ganz zu schweigen. Dazu vorab eine gute und eine weniger gute Nachricht. Beginnen wir mit der weiniger guten: Datenschutz ist komplex und entwickelt sich st\"{a}ndig weiter. Um wirklich auf Nummer sicher zu gehen, brauchen Sie sehr wahrscheinlich Rat und Hilfe eines Datenschutz-Experten. Dabei kann dieser Leitfaden Sie unterst\"{u}tzen, auf Augenh\"{o}he mit einem Datenschutz-Profi zu reden, denn daf\"{u}r statten wir Sie mit dem notwendigen Vokabular aus und vermitteln Ihnen die wichtigsten datenschutzrechtlichen Grundlagen. Am Ende eines Prozesses steht ein f\"{u}r Ihr Unternehmen ma\ssgeschneidertes Datenschutzkonzept. Das ist nichts anderes als ein Ma\ssnahmenkonzept, um den Datenschutz in Ihrer Firma einzuhalten. Wir zeigen Ihnen auch, wer genau Ihnen beim Datenschutzkonzept f\"{u}r Ihr Unternehmens unter die Arme greifen kann. So kommen wir nun zur guten Nachricht: Guter Datenschutz ist wichtig, muss aber nicht teuer sein. Wichtig ist, dass Sie f\"{u}r Ihr Unternehmen systematisch vorgehen und im Thema stecken. So wird es Ihnen leicht fallen, die gr\"{o}\ssten Risiken herauszufinden und zielgerichtet zu minimieren. Ist Ihr Blick einmal f\"{u}r eventuelle Sicherheitsl\"{u}cken geschult, sind Sie auch zuk\"{u}nftig f\"{u}r noch kommende Datenschutzma\ssnahmen sensibilisiert. Datenschutz ist komplex und kompliziert. Aber Datenschutz ist auch keine „Raketenwissenschaft“. Wer es geschafft hat, ein Unternehmen aufzubauen und zu leiten, der kann auch die ersten Schritte hin zu einem Datenschutzkonzept selbst\"{a}ndig gehen. Lassen Sie sich dazu von uns an die Hand nehmen. Datenschutz und Datensicherheit entwickeln sich stetig weiter. Am besten ist, Sie entwickeln sich einfach mit ...}, keywords = {Datenschutzkonzept, KMU, Leitfaden, Mittelstand 4.0-Kompetenzzentrum Magdeburg, Mittelstand-Digital}, pubstate = {published}, tppubtype = {misc} } @inproceedings{Filax2019, title = {Data for Image Recognition Tasks: An Efficient Tool for Fine-Grained Annotations}, author = {Marco Filax and Tim Gonschorek and Frank Ortmeier}, url = {https://bitbucket.org/cse_admin/md_groceries http://www.scitepress.org/DigitalLibrary/Link.aspx?doi=10.5220/0007688709000907}, year = {2019}, date = {2019-02-19}, booktitle = {Proceedings of the 8th International Conference on Pattern Recognition Applications and Methods}, abstract = {Using large datasets is essential for machine learning. In practice, training a machine learning algorithm requires hundreds of samples. Multiple off-the-shelf datasets from the scientific domain exist to benchmark new approaches. However, when machine learning algorithms transit to industry, e.g., for a particular image classification problem, hundreds of specific purpose images are collected and annotated in laborious manual work. In this paper, we present a novel system to decrease the effort of annotating those large image sets. Therefore, we generate 2D bounding boxes from minimal 3D annotations using the known location and orientation of the camera. We annotate a particular object of interest in 3D once and project these annotations on to every frame of a video stream. The proposed approach is designed to work with off-the-shelf hardware. We demonstrate its applicability with an example from the real world. We generated a more extensive dataset than available in other works for a particular industrial use case: fine-grained recognition of items within grocery stores. Further, we make our dataset available to the interested vision community consisting of over 60,000 images. Some images were taken under ideal conditions for training while others were taken with the proposed approach in the wild. }, keywords = {Augmented Reality, Fine-Grained Recognition, VIOL}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{engel2019companion, title = {Companion Specifications for Smart Factories: From Machine to Process View}, author = {Christoph Engel and Steffen Mencke and Robert Heum\"{u}ller and Frank Ortmeier}, year = {2019}, date = {2019-01-01}, booktitle = {Smart SysTech 2019; European Conference on Smart Objects, Systems and Technologies}, pages = {1--8}, organization = {VDE}, abstract = {Currently Companion Specifications are used to create general interfaces to machines within a plant (see: [1, P. 27ff]). This is one step to reach an exchangeability of machines on the way to the future of Industry 4.0. But in the environment of enterprise companies also a process view and a recombinability of production steps is desirable to get the flexibility needed for the Industry 4.0. This paper proposes an Enterprise Domain Companion Specification as well as a mapping to corresponding Machine Companion Specifications as solution to that problem. Together with this mapping the Enterprise Domain Companion Specification allows to create an interactive process view for the IT and allows a recombination of process steps without changing the IT perspective to the production.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{fuentealba2018spectral, title = {Spectral-based Analysis of Progressive Dynamical Changes in the Fetal Heart Rate Signal During Labor by Using Empirical Mode Decomposition}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {http://www.cinc.org/2018/preprints/95_CinCFinalPDF.pdf}, year = {2018}, date = {2018-12-01}, booktitle = {2018 Computing in Cardiology (CinC)}, pages = {1--4}, organization = {IEEE}, abstract = {In this work, we propose to study the progressive fetal response along the fetal heart rate (FHR) signal by using empirical mode decomposition and time-varying spectral-based analysis. The main idea is to investigate if a particular FHR signal episode in the time-domain reflects dynamical changes in the frequency-domain that can help to assess the fetal condition. Results show that the spectral components associated with the neural sympathetic fetal reactivity exhibit significant spectral energy differences between normal and acidotic fetuses.}, keywords = {cardiotocograph, empirical mode decomposition, FHR signal, spectral analysis, time-varying}, pubstate = {published}, tppubtype = {inproceedings} } @article{NielebockComments2018, title = {Commenting Source Code: Is It Worth It For Small Programming Tasks?}, author = {Sebastian Nielebock and Dariusz Krolikowski and Jacob Kr\"{u}ger and Thomas Leich and Frank Ortmeier}, editor = {Springer Science+Business Media, LLC, part of Springer Nature}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/10/paper-influence-comments_preliminary.pdf}, doi = {10.1007/s10664-018-9664-z}, issn = {1382-3256}, year = {2018}, date = {2018-11-16}, journal = {Springer Empirical Software Engineering (EMSE)}, volume = {24}, number = {3}, pages = {1418--1457}, publisher = {Springer US}, abstract = {Maintaining a program is a time-consuming and expensive task in software engineering. Consequently, several approaches have been proposed to improve the comprehensibility of source code. One of such approaches are comments in the code that enable developers to explain the program with their own words or predefined tags. Some empirical studies indicate benefits of comments in certain situations, while others find no benefits at all. Thus, the real effect of comments on software development remains uncertain. In this article, we describe an experiment in which 277 participants, mainly professional software developers, performed small programming tasks on differently commented code. Based on quantitative and qualitative feedback, we i) partly replicate previous studies, ii) investigate performances of differently experienced participants when confronted with varying types of comments, and iii) discuss the opinions of developers on comments. Our results indicate that comments seem to be considered more important in previous studies and by our participants than they are for small programming tasks. While other mechanisms, such as proper identifiers, are considered more helpful by our participants, they also emphasize the necessity of comments in certain situations.}, keywords = {Comments, Documentation and Maintenance, Empirical Study, Program Comprehension}, pubstate = {published}, tppubtype = {article} } @inproceedings{Gonschorek2018c, title = {A very first Glance on the Safety Analysis of Self-learning Algorithms for Autonomous Cars}, author = {Tim Gonschorek and Marco Filax and Frank Ortmeier}, editor = {J\'{e}r\'{e}mie Guiochet}, url = {https://hal.archives-ouvertes.fr/hal-01878562v1 https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2019/01/fastAbstractGonschorekEtAl_AVery-FirstGlanceOnThe-SafetyAnalysisOfSelf-learningAlgorithmsForAutonomousCars.pdf}, year = {2018}, date = {2018-09-26}, booktitle = {37th International Conference on Computer Safety, Reliability, & Security. SAFECOMP2018.}, publisher = {HAL}, series = {Fast Abstracts}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @article{fuentealba2018foetal, title = {Foetal heart rate signal spectral analysis by using time-varying autoregressive modelling}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2019/11/fuentealba2018foetal.pdf}, doi = {10.1515/cdbme-2018-0139}, year = {2018}, date = {2018-09-22}, journal = {Current Directions in Biomedical Engineering}, volume = {4}, number = {1}, pages = {579--582}, publisher = {De Gruyter}, abstract = {During labour, foetal monitoring enables clinicians to prevent potential adverse outcomes, whose surveillance procedure is commonly based on analysis of cardiotocographic (CTG) signals. Unfortunately, this procedure is difficult because it involves human interpretation of highly complex signals. In order to improve the CTG assessment, different approaches based on signal processing techniques have been proposed. However, most of them do not consider the progression of the foetal response over time. In this work, we propose to study such progression along the foetal heart rate (FHR) signal by using spectral analysis based on time-varying autoregressive modelling. The main idea is to investigate if a particular FHR signal episode in the time-domain reflects dynamical changes in the frequency-domain that can help to assess the foetal condition. Results show that each FHR deceleration leaves a particular time-varying frequency signature described by the spectral energy components which could help to distinguish between a normal and a pathological foetus.}, keywords = {Autoregressive model, cardiotocograph, Fetal monitoring, FHR, Time-varying spectral analysis}, pubstate = {published}, tppubtype = {article} } @inproceedings{NielebockAPIMisuseCommits2018, title = {Commits as a Basis for API Misuse Detection}, author = {Sebastian Nielebock and Robert Heum\"{u}ller and Frank Ortmeier}, editor = {ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/08/nielebock-et-al-Commits_as_a_Basis_for_API_Misuse_Detection.pdf}, doi = {10.1145/3242887.3242890}, year = {2018}, date = {2018-09-03}, booktitle = {Proceedings of the 7th International Workshop on Software Mining (SoftwareMining ’18), September 3, 2018, Montpellier, France.}, journal = {Proceedings of the 7th International Workshop on Software Mining (SoftwareMining ’18), September 3, 2018, Montpellier, France.}, pages = {4}, publisher = {ACM}, address = {New York, NY, USA}, abstract = {Programmers frequently make use of APIs. However, these usages can result in unintended, negative behavior, when developers are not aware of the correct usage or side effects of that API. Detecting those API misuses by means of automatic testing is challenging, as many test suites do not cover this unintended behavior. Instead, API usage patterns are used as specifications to verify the correctness of applications. However, to find meaningful patterns, i.e., those capable of fixing the misuse, the context of the misuse must be considered. Since the developer usually does not know which API is misused, a much larger code section has to be verified against many potential patterns. In this paper, we present a new idea to enhance API misuse detection by means of commits. We discuss the potential of using commits (1) to decrease the size of the code to be considered, (2) to identify suspicious commits, and (3) to contain API usages which can be used to shepherd API specification mining. This paper shows first results on the usability of commits for API misuse detection and some insights into what makes a commit suspicious in terms of exhibiting potential API misuses.}, keywords = {Application programming interfaces, Commits, Misuse Detection, Misuses}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{HeumuellerInteractionPatterns2018, title = {Who plays with Whom? ... and How? Mining API Interaction Patterns from Source Code}, author = {Robert Heum\"{u}ller and Sebastian Nielebock and Frank Ortmeier}, editor = {ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/08/paper-mining-api-interactions.pdf}, doi = {10.1145/3242887.3242888}, year = {2018}, date = {2018-09-03}, booktitle = {Proceedings of the 7th International Workshop on Software Mining (SoftwareMining ’18)}, pages = {4}, publisher = {ACM}, address = {New York, NY, USA}, abstract = {State-of-science automated software engineering techniques increasingly rely on specification mining to provide API usage patterns for numerous applications, e.g. context sensitive code-completion, bug-detection or bug-fixing techniques. While some existing approaches already yield good results with respect to diverse tasks, the focus has always been on the inference of high-quality, reusable specifications for single APIs. However, in contemporary software development it is commonplace to combine a multitude of different libraries in order to increase efficiency by avoiding the reimplementation of the wheel. In contrast to prior research, in this idea paper we propose to explicitly study the patterns of interaction between multiple different APIs. First, we introduce a method for mining API interactions patterns from existing applications. Then, we give an overview of our preliminary investigation, in which we applied the method to a case-study of nearly 500 Android applications. The exemplary results show that there definitely exist valuable interaction patterns which can be helpful for various traditional and automated software engineering tasks.}, keywords = {api interaction, library interaction, specification mining}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Gonschorek2018b, title = {Fault Trees vs. Component Fault Trees: An Empirical Study}, author = {Tim Gonschorek and Marc Zeller and Frank Ortmeier and Kai H\"{o}fig}, editor = {Gallina B., Skavhaug A., Schoitsch E., Bitsch F. }, doi = {https://doi.org/10.1007/978-3-319-99229-7_21}, isbn = {978-3-319-99228-0}, year = {2018}, date = {2018-08-21}, booktitle = { Computer Safety, Reliability, and Security. SAFECOMP 2018.}, volume = {11094}, publisher = {Springer, Cham}, series = {Lecture Notes in Computer Science}, keywords = {Component Fault Trees, model-based safety assessment}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Schillreff2018, title = { Learning-based Kinematic Calibration using Adjoint Error Model }, author = {Nadia Schillreff and Frank Ortmeier}, editor = {SciTePress}, doi = {10.5220/0006870403820389}, isbn = {978-989-758-321-6}, year = {2018}, date = {2018-08-01}, booktitle = {Proceedings of the 15th International Conference on Informatics in Control, Automation and Robotics - Volume 2: ICINCO}, abstract = {A learning-based robot kinematic calibration approach based on the product-of-exponentials (POE) formula and Adjoint error model is introduced. To ensure high accuracy this approach combines the geometrical and non-geometrical influences like for e.g. elastic deformations without explicitly defining all physical processes that contribute to them using a polynomial regression method. By using the POE formula for kinematic modeling of the manipulator it is ensured that kinematic parameters vary smoothly and used method is robust and singularity-free. The introduced error parameters are presented in the form of Adjoint transformations on nominal joint twists. The calibration process then becomes finding a set of polynomial functions using regression methods that are able to reflect the actual kinematics of the robot. The proposed method is evaluated on a dataset obtained using a 7-DOF manipulator (KUKA LBR iiwa 7 R800). The experimental results show that this approach significantly reduc es positional errors of the robotic manipulator after calibration.}, keywords = {Modeling, Parameter Identification}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Gonschorek2018, title = {Automatic Model-based Verification of Railway Interlocking Systems using Model Checking}, author = {Tim Gonschorek and Ludwig Bedau and Frank Ortmeier}, editor = {Stein Haugen}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/02/Esrel2018_GonschorekEtAl_ModelCheckingRailMLInterlockings.pdf}, year = {2018}, date = {2018-06-17}, booktitle = {Proceedings of ESREL 2018}, pages = {741-748}, publisher = {CRC Press}, address = {London}, abstract = {The theoretic foundations for formally verifying railway interlocking systems have already been studied extensively. There exist a lot of work covering the application of methodologies like model checking in this context. However, some design faults still remain undetected until final on-track evaluation of the system. This is strongly related to missing automation solutions for real-world models and standards as well as the high theoretical expertise required. There exist many well-developed tools each requiring different modeling formalisms and focusing on a different question/scenario. Without specific experience in formal system modeling, it is extremely complicated to model such complex systems. In this paper, we present a methodology for the automatic model generation and verification of railway interlockings in a tool-independent(!) way. Therefore, we define a generic template set of atomic track elements and safety properties in a formal modeling language applicable with precise semantics. This generic template enables us to verify the structure of any given track layout. The already existing tool support of VECS allows to automatically translate these specifications into various model checkers for verification. More important, we present a robust transformation of the upcoming data exchange format for railway interlocking systems railML into the presented specification template. As a consequence, this approach really may help to bridge the gap between formal methods and system design in railway interlockings. We evaluate this approach on a real-world case studies train station of Brain l'Alleud. We also show the tool-independent modeling by automatically translating the specification to different verification engines and compare their performance.}, keywords = {model based verification, model-based safety assessment, Railway System Verification}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Koegel18, title = {Predictive Tracking Control of a Camera - Head Mounted Display System subject to Communication Constraints}, author = {Markus K\"{o}gel and Petar Andonov and Marco Filax and Frank Ortmeier and Rolf Findeisen}, year = {2018}, date = {2018-06-01}, booktitle = {16th European Control Conference (ECC)}, pages = {1035-1041}, keywords = {predictive control, virtual reality}, pubstate = {published}, tppubtype = {inproceedings} } @article{NielebockLambda2018, title = {Programmers do not Favor Lambda Expressions for Concurrent Object-Oriented Code}, author = {Sebastian Nielebock and Robert Heum\"{u}ller and Frank Ortmeier}, editor = {Springer}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/11/2018-11-06-ESEC-FSE-Lambda-In-Concurrency_prepared_publication.pdf https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/10/journal-emse-lambda-1.pdf https://bitbucket.org/SNielebock/lambdainconcurrentdataset}, doi = {10.1007/s10664-018-9622-9}, issn = {1382-3256}, year = {2018}, date = {2018-05-02}, journal = {Springer Empirical Software Engineering (EMSE)}, volume = {24}, number = {1}, pages = {103--138}, abstract = {Lambda expressions have long been state-of-the-art in the functional programming paradigm. Especially with regard to the use of higher-order functions, they provide developers with a means of defining predicate or projection functions locally, which greatly increases the comprehensibility of the resulting source code. This benefit has motivated language designers to also incorporate lambda expressions into object-oriented (OO) programming languages. In particular, they are claimed to facilitate concurrent programming. One likely reason for this assumption is their purity: pure lambda expressions are free of side effects, and therefore cannot cause, e.g., race conditions. In this paper, we present the first empirical analysis of whether or not this claim is true for OO projects. For this purpose, we investigated the application of lambda expressions in 2,923 open-source projects, implemented in one of the most common OO programming languages: C#, C++, and Java. We present three major findings. First, the majority of lambda expressions are not applied in concurrent code and most concurrent code does not make use of lambda expressions. Second, for all three of the languages, we observed that developers compromise their code by applying a significantly higher number of impure, capturing lambda expressions, which are capable of causing race conditions. Finally, we explored further use cases of lambda expressions and found out that testing, algorithmic implementation, and UI are far more common use-cases for the application of lambda expressions. Our results encourage to investigate in more detail the reasons that hinder programmers to apply lambda expressions in concurrent programming and to support developers, e.g., by providing automatic refactorings.}, keywords = {Concurrency, Empirical Software Engineering, Lambda Expression, Misuses, Object-Orientation, Programming Language, Static Analysis}, pubstate = {published}, tppubtype = {article} } @inproceedings{Heum\"{u}ller2018, title = {Leveraging project-specificity to find suitable specifications: student research abstract}, author = {Robert Heum\"{u}ller}, editor = {ACM}, doi = {10.1145/3167132.3167455}, isbn = {978-1-4503-5191-1}, year = {2018}, date = {2018-04-09}, booktitle = {Proceedings of the 33rd Annual ACM Symposium on Applied Computing}, abstract = {Automated inference of API specifications is crucial for scaling various automated software engineering tasks such as bug-detection or bug-fixing. Prior research has therefore focused on techniques to improve the quality, diversity and availability of specifications. Consequently, efficient ways of retrieving the appropriate specifications for a particular task from a specification database will be necessary in future. In our research, we analyse projects using information retrieval techniques (tf-idf), to determine which specifications are characteristic for a project. Our hypothesis is that this knowledge can be exploited to significantly optimize the search process, by focussing on projects which are similar in terms of their characteristic API usages. Initial results indicate that a project-specificity based selection of specifications from different sources can produce candidate sets featuring a larger variety of interesting specifications than support-based, project-agnostic heuristics.}, keywords = {api usage pattern, information retrieval, source code mining}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{SchottExtractionBA2018, title = {Extraktion relevanter API-spezifischer Informationen zur automatischen Korrektur von Softwarefehlern}, author = {Kevin Michael Schott}, editor = {Sebastian Nielebock and Frank Ortmeier}, year = {2018}, date = {2018-03-13}, school = {Otto-von-Guericke-Unviersit\"{a}t Magdeburg}, abstract = {Bekanntlich ist Softwareentwicklung nicht fehlerfrei. W\"{a}hrend der Fokus auf der Entwicklung neuer Komponenten liegen sollte, geht viel Zeit in die Behebung existierender Programmfehler verloren. Jene Programmfehler, welche das Softwaresystem in einen regressiven Zustand versetzen - sodass dieses nicht mehr ordnungsgem\"{a}\ss funktioniert - sind oftmals m\"{u}hselig zu beheben. Dadurch ist der zust\"{a}ndige Programmierer meist auf unbestimmte Zeit verpflichtet, diesen zu beheben, wodurch er dem Unternehmen nicht f\"{u}r die Weiterentwicklung zur Verf\"{u}gung steht. Dieser zeitliche Engpass kann monet\"{a}re R\"{u}ckschl\"{a}ge bedeuten, insbesondere wenn es sich um eine sicherheitskritische L\"{u}cke im Softwaresystem handelt. Die Dom\"{a}ne der automatischen Fehlerkorrektur will solchen Problemen entgegenwirken, indem versucht wird, ein Ansatz zu finden, der den entstandenen Fehler automatisch beheben kann oder dem Programmierer wenigstens einen L\"{o}sungsvorschlag liefert. Heutzutage ist der Umgang mit Application-Programming-Interfaces (kurz APIs) beinahe allt\"{a}glich f\"{u}r jeden Programmierer, was einen Ansatz zur automatischen Korrektur von Softwarefehlern schwer bis gar nicht generalisieren l\"{a}sst. Deren Dokumentation ist oft nicht hinreichend oder unverst\"{a}ndlich. Dadurch wird die API falsch benutzt, sodass unter anderem Methodenaufrufe vergessen oder in falscher Reihenfolge verwendet werden. J\"{u}ngste Studien zeigen zudem, dass bei circa jeder zweiten Fehlerbehebung mindestens eine API-spezifische \"{A}nderung vorgenommen werden muss, um den Fehler ganzheitlich zu beheben. Herk\"{o}mmliche Ans\"{a}tze zur automatischen Korrektur von Softwarefehlern k\"{o}nnen aufgrund ihrer generischen Natur diese Programmfehler oftmals nicht vollst\"{a}ndig beheben und m\"{u}ssen erweitert werden. Im Rahmen dieser Arbeit wird ein Konzept vorgestellt, mit dessen Hilfe diese Programmfehler m\"{o}glicherweise korrigiert werden k\"{o}nnen. Prim\"{a}r wird jedoch erforscht, welche Informationen aus einem API-spezifischen Kontext extrahiert und verarbeitet werden m\"{u}ssen, um eine Korrektur vornehmen zu k\"{o}nnen. Daf\"{u}r wird ein Prototyp entwickelt, der diese Informationsextraktion \"{u}bernimmt und \"{a}hnlichen Quellcode herunterl\"{a}dt, sodass in diesem potenziell eine richtige Benutzung der gesuchten API enthalten ist, welche als Korrektur des Fehlers dienen soll.}, keywords = {API, API Misuse, Correction, Fehler, Korrektur, Misuses}, pubstate = {published}, tppubtype = {mastersthesis} } @proceedings{Bedau2018, title = {Sicherheitsanalyse eines Bahnhofstellwerkes}, author = {Ludwig Bedau and Tim Gonschorek and Frank Ortmeier}, editor = {Andreas Brock and Christina Brock and Rudolf Barth}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/02/BedauEtAl_HST17.pdf}, year = {2018}, date = {2018-02-28}, volume = {35}, organization = {Horber Schienen Tage}, keywords = {model based verification, Railway System Verification}, pubstate = {published}, tppubtype = {proceedings} } @inproceedings{visapp18, title = {VIOL: Viewpoint Invariant Object Localizator - Viewpoint Invariant Planar Features in Man-Made Environments}, author = {Marco Filax and Frank Ortmeier}, doi = {10.5220/0006624005810588}, isbn = {978-989-758-290-5}, year = {2018}, date = {2018-01-01}, booktitle = {Conference on Computer Vision, Imaging and Computer Graphics Theory and Applications (VISAPP)}, pages = {581-588}, abstract = {Object detection is one of the fundamental issues in computer vision. The established methods, rely on different feature descriptors to determine correspondences between significant image points. However, they do not provide reliable results, especially for extreme viewpoint changes. This is because feature descriptors do not adhere to the projective distortion introduced with an extreme viewpoint change. Different approaches have been proposed to lower this hurdle, e.g., by randomly sampling multiple virtual viewpoints. However, these methods are either computationally intensive or impose strong assumptions of the environment. In this paper, we propose an algorithm to detect corresponding quasi-planar objects in man-made environments. We make use of the observation that these environments typically contain rectangular structures. We exploit the information gathered from a depth sensor to detect planar regions. With these, we unwrap the projective distortion, by transforming the planar patch into a fronto-parallel view. We demonstrate the feasibility and capabilities of our approach in a real-world scenario: a supermarket.}, keywords = {Augmented Reality, Fine-Grained Recognition, VIOL}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{klock18, title = {On the Similarities of Fingerprints and Railroad Tracks: Using Minutiae Detection Algorithms to digitize Track Plans.}, author = {Maximilian Klockmann and Marco Filax and Frank Ortmeier and Martin Rei\ss}, year = {2018}, date = {2018-01-01}, booktitle = {13th IAPR Workshop on Document Analysis Systems (DAS)}, abstract = {The complete track system of Germany covers more than 42.000 kilometers - some built before 1970. As a consequence, technical drawings are typical of manual origin. Newer plans are generated in a computer-aided way but remain drawings in the sense that semantics are not captured in the electronic files themselves. The engineer decides the meaning of a symbol while viewing the document. For project realization (e.g., engineering of some interlocking system), these plans are digitized manually into some machine interpretable format. In this paper, we propose an approach to digitize track layouts (semi-)automatically. We use fingerprint recognition techniques to digitize manually created track plans efficiently. At first, we detect tracks by detecting line endings and bifurcations. Secondly, we eliminate false candidates and irregularities. Finally, we translate the resulting graph into an interchangeable format RailML. We evaluate our method by comparing our results with different track plans. Our results indicate that the proposed method is a promising candidate, reducing the effort of digitization. }, keywords = {Digitization, Information Extraction, Minutiae Detection, rack Layout Analysis}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{fuentealba2017progressive, title = {Progressive Fetal Distress Estimation by Characterization of Fetal Heart Rate Decelerations Response Based on Signal Variability in Cardiotocographic Recordings}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/04/fuentealba2017progressive.pdf}, doi = {10.22489/CinC.2017.276-152}, issn = {2325- 887X}, year = {2017}, date = {2017-12-01}, booktitle = {2017 Computing in Cardiology (CinC)}, volume = {44}, pages = {1--4}, organization = {IEEE}, abstract = {In the current clinical practice, Cardiotocograph (CTG) is a standard and widely used tool for fetal surveillance. However, CTG interpretation is difficult since it involves human diagnosis of different patterns in highly complex signals. Fetal heart rate (FHR) decelerations and variability are known to be the most significant and difficult patterns to assess by the clinical staff. The main goal of this work is to analyze the fetal distress by tracking the evolution of the dynamical changes occurring in the CTG recording. The idea is to consider the direct mbox{input/output} relationship between uterine contraction (UC) and FHR signals by the characterization of FHR decelerations in terms of their signal variability as a sign of the fetal response corresponding to a UC event. Results show that the progression of the decelerations response over time can help the observer to track fetal distress.}, keywords = {cardiotocograph, FHR decelerations, progressive CTG analysis, signal variability}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{nielebock-towards-asaprepair-ASE-DS-2017, title = {Towards API-Specific Automatic Program Repair}, author = {Sebastian Nielebock}, editor = {IEEE/ACM}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/ase17doc-doc1-p-02d3c79-34274-preprint-1.pdf https://dl.acm.org/citation.cfm?id=3155697&CFID=827043089&CFTOKEN=40273216}, year = {2017}, date = {2017-10-30}, booktitle = {Proceedings of the IEEE/ACM 32nd International Conference on Automated Software Engineering (ASE 2017 ) - Doctoral Symposium}, keywords = {API-specific Bugs, automatic program repair, Specification Mining}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Gonschorek2017, title = {On Improving Rare Event Simulation for Probabilistic Safety Analysis}, author = {Tim Gonschorek and Ben Lukas Rabeler and Frank Ortmeier and Dirk Schomburg}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/GonschorekEtAl_OnImprovingRareEventSimulationForProbabilisticSafetyAnalysis_memocode17.pdfhttps://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/GonschorekEtAl_OnImprovingRareEventSimulationForProbabilisticSafetyAnalysis_memocode17.pdf}, doi = {10.1145/3127041.3127057}, isbn = {978-1-4503-5093-8}, year = {2017}, date = {2017-09-29}, booktitle = {Proceedings of the 15th ACM-IEEE International Conference on Formal Methods and Models for System Design}, pages = {15-24}, publisher = {ACM New York, NY, USA ©2017 }, series = {Lecture Notes in Computer Science}, abstract = {This paper presents a new approach for generating probability distributions for Monte Carlo based stochastic model checking. Stochastic approaches are used for quantitative analysis of safety critical systems if numerical model checking tools get overwhelmed by the complexity of the models and the exploding state space. However, sample based stochastic measures get problems when the estimated event is very unlikely (e.g. 10-6 and below). Therefore, rare event techniques, like importance sampling, increase the likelihood of samples representing model executions which depict the desired rare event, e.g., a path leading into a hazardous state. The presented approach uses qualitative counter examples to derive appropriate distributions for the sampling, i.e., a distribution that increases the likelihood of the examined rare event and decreases the variance Therefore, we define the sampling distribution such that transitions included in the counter example become more likely. For keeping the variance small, the new distributions is calculated within an optimization problem which minimizes the variance on the counter example paths by calculating new probability distributions for the related transitions. We have evaluated this approach in four different real-world case studies and compare it to other leading stochastic and numeric model checking tools. It turned out that in the case of rare event properties, the proposed approach outperforms other techniques in run time and, slightly more important, in the accuracy of the estimation result. }, keywords = {Monte Carlo Simulation, Probabilistic Model Checking}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Filax2017, title = {Building Models we can rely on: Requirements Traceability for Model-based Verification Techniques}, author = {Marco Filax and Tim Gonschorek and Frank Ortmeier}, editor = {Bozzano M., Papadopoulos Y.}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/08/imbsa17_preprint.pdf}, doi = {10.1007/978-3-319-64119-5_1}, isbn = {978-3-319-64118-8 }, year = {2017}, date = {2017-09-11}, booktitle = {Proceedings of the 5th International Symposium on Model-Based Safety and Assessment (IMBSA 2017)}, volume = {10437}, pages = {3-18}, publisher = {Springer, Cham}, series = { Lecture Notes in Computer Science}, abstract = {Proving the safety of a critical system is a complex and complicated task. Model-based formal verification techniques can help to verify a System Requirement Specification (SRS) with respect to normative and safety requirements. Due to an early application of these methods, it is possible to reduce the risk of high costs caused by unexpected, late system adjustments. Nevertheless, they are still rarely used. One reason among others is the lack of an applicable integration method in an existing development process. In this paper, we propose a process to integrate formal model-based verification techniques into the development life-cycle of a safety critical system. The core idea is to systematically refine informal specifications by 1) categorization, 2) structural refinement, 3) expected behavioral refinement, and finally, 4) operational semantics. To support modeling, traceability is upheld through all refinement steps and a number of consistency checks are introduced. The proposed process has been jointly developed with the German Railroad Authority (EBA) and an accredited safety assessor. We implemented an Eclipse-based IDE with connections to requirement and systems engineering tools as well as various verification engines. The applicability of our approach is demonstrated via an industrial-sized case study in the context of the European Train Control System with ETCS Level 1 Full Supervision. }, keywords = {Practical Experiences, Traceability, verification}, pubstate = {published}, tppubtype = {inproceedings} } @misc{Gonschorek2017b, title = {A Verification Environment for Critical Systems: Integrating Formal Methods into the Safety Development Life-cycle}, author = {Tim Gonschorek and Marco Filax and Frank Ortmeier}, editor = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/GonschorekEtAl_-VECS_imbsa17_poster.pdf https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/GonschorekEtAl_-VECS_imbsa17_poster.pdf}, year = {2017}, date = {2017-09-11}, howpublished = {Invited Paper: 5th International Symposium on Model-Based Safety and Assessment (IMBSA 2017)}, keywords = {formal verification for safety critical systems, model checking, model-based safety assessment}, pubstate = {published}, tppubtype = {misc} } @article{fuentealba3analysis, title = {Analysis of the foetal heart rate in cardiotocographic recordings through a progressive characterization of decelerations}, author = {Patricio Fuentealba and Alfredo Illanes and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/04/fuentealba3analysis.pdf}, doi = {10.1515/cdbme-2017-0089}, issn = { 2364-5504}, year = {2017}, date = {2017-09-07}, journal = {Current Directions in Biomedical Engineering}, volume = {3}, number = {2}, pages = {423--427}, publisher = {De Gruyter}, abstract = {The main purpose of this work is to propose a new method for characterization and visualization of FHR deceleration episodes in terms of their depth, length and location. This is performed through the estimation of a progressive baseline computed using a median filter allowing to identify and track the evolution of decelerations in cardiotocographic CTG recordings. The proposed method has been analysed using three representative cases of normal and pathological CTG recordings extracted from the CTU-UHB database freely available on the PhysioNet Website. Results show that both the progressive baseline and the parameterized deceleration episodes can describe different time-variant behaviour, whose characteristics and progression can help the observer to discriminate between normal and pathological FHR signal patterns. This opens perspectives for classification of non-reassuring CTG recordings as a sign of foetal acidemia.}, keywords = {baseline, cardiotocograph, CTG decelerations, floating-line, foetal monitoring, progressive and time variant analysis}, pubstate = {published}, tppubtype = {article} } @inproceedings{Schillreff_2017, title = {Towards High Accuracy Robot-Assisted Surgery}, author = {Nadia Schillreff and Mykhaylo Nykolaichuk and Frank Ortmeier}, editor = {IFAC-PapersOnLine}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/08/Schillreff_cameraReady_IFAC2017.pdf}, year = {2017}, date = {2017-08-28}, journal = {Proceedings of the 20th World Congress of the International Federation of Automatic Control (IFAC 2017)}, abstract = {In this article, we propose a new error modeling approach for robot manipulators in order to improve the absolute accuracy of a tools pose by using polynomial regression method. The core idea is based on a well-known fact: accuracy of repeatedly reaching a given position is much higher than the accuracy of absolute positioning (i.e. moving the manipulator to a given position). The underlying reason is, that positioning errors are dominated by systematic errors - while stochastic errors are significantly smaller. This fact is then exploited to apply some learning algorithm to derive an error-compensation model. Technically, this means that the robot is being calibrated a priori with some external sensor once. Afterwards it can operate with a much better quality. In detail, we propose to first perform a coordinate transformation using a least mean square approach (for registration). Then, to account for deviations of measured position in comparison to nominal (robot) position, the frame transformation model at each robots joint is extended by translational and rotational error parameters. This is then used to built an error compensation model with regression techniques. We evaluate the method on a data set obtained using a 7DOF robot manipulator and show that this approach brings positioning error to the order of repeatability errors for this manipulator.}, keywords = {error modelling, robotics}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{Veera2017, title = {Multi-Objective Issue Tracking Framework}, author = {Balaji Satya Pradeep Veera}, editor = {Sebastian Nielebock and Frank Ortmeier}, year = {2017}, date = {2017-07-28}, school = {Otto-von-Guericke-Unviersit\"{a}t Magdeburg}, abstract = {Assigning an issue reports to individual developers is typically a manual, time-consuming and tedious task for the triager. Many automated issue tracking systems have been proposed over the time to choose an optimal developer for an issue report. These Single-Objective approaches try to find a developer who has fixed similar issue reports in the past. Thus they find an optimal developer for an issue report. While choosing an optimal developer, the Single-Objective approaches don’t consider the severity of the issue report and the workload of the developer. The assigned issue report might reassign to another developer if the first assigned developer is busy with other issue reports. To solve this problem, we propose a framework of automated issue tracking system called Multi-Objective Issue Tracking Framework (MOITF). MOITF considers the workload of the developer along with the severity of the issue report while choosing the developer. This approach analyzes the history of previously closed issue reports for making predictions to the new issue report. The problem of selecting a developer who has fixed similar issue reports in the past and had less workload is viewed as a Multi objective problem. The traditional non-dominated sorting algorithm is used to find non-dominated devel- opers who have knowledge about the problem mentioned in the issue report and also have time to fix the issue report.}, keywords = {Framework, Issue Tracking, multi-objective}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{eiserlohJSearchRepair2017, title = {Semantische Suche f\"{u}r automatische Fehlerkorrekturen im objektorientierten Paradigma}, author = {Matthias Eiserloh}, editor = {Sebastian Nielebock and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/07/BA_2017_EiserlohMatthias-1.pdf}, year = {2017}, date = {2017-07-03}, school = {Otto-von-Guericke Universit\"{a}t Magdeburg}, type = {Bachelor Thesis}, keywords = {automatic program repair, object orientation}, pubstate = {published}, tppubtype = {mastersthesis} } @article{Bitsch2017, title = {Effiziente Sicherheitsnachweisf\"{u}hrung mithilfe modellbasierter Systemanalyse}, author = {Friedemann Bitsch and Marco Filax and Tim Gonschorek and Frank Ortmeier and Rolf Schumacher}, editor = {DVV Media Group}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/2017_BitschEtAl_EffizienteSicherheitsnachweisf\"{u}hrung.pdf}, year = {2017}, date = {2017-06-01}, journal = {Signal + Draht}, abstract = {This paper introduces a model-based computer-aided methodology for safety analyses in development and as- sessment processes for signalling and train control systems. The objective is to realise the application of model-based for- mal methods and simultaneously to reduce the outlay for us- ing them to a degree acceptable in industrial practice. Instead of performing safety analyses in parallel with the develop- ment process, existing system analyses and design models are used for deriving the necessary safety analyses models as au- tomatically as possible. Therefore, statements on safety and correctness can already be calculated in early development phases with precise results. Those early statements are deci- sive, because they make it possible to adapt and correct the system design at an early stage and to do it cost-efficiently.}, keywords = {integration of formal methods, model-based safety assessment}, pubstate = {published}, tppubtype = {article} } @inproceedings{spl-in-cps-splc-vision-2017, title = {Beyond Software Product Lines: Variability Modeling in Cyber-Physical Systems}, author = {Jacob Kr\"{u}ger and Sebastian Nielebock and Sebastian Krieter and Christian Diedrich and Thomas Leich and Gunter Saake and Sebastian Zug and Frank Ortmeier}, url = {https://dl.acm.org/citation.cfm?id=3106217}, year = {2017}, date = {2017-05-11}, booktitle = {Proceedings of the 21st International Systems and Software Product Line Conference (SPLC 2017) - Vision Track}, journal = {21st International Systems and Software Product Line Conference (SPLC 2017) - Vision Track}, pages = {237-241}, abstract = {Smart IT has an increasing influence on the control of daily life. For instance, smart grids manage power supply, autonomous automobiles take part in traffic, and assistive robotics support humans in production cells. We denote such systems as Cyber-physical Systems (CPSs), where cyber addresses the controlling software, while physical describes the controlled hardware. One key aspect of CPSs is their capability to adapt to new situations autonomously or with minimal human intervention. To achieve this, CPSs reuse, reorganize and reconfigure their components during runtime. Some components may even serve in different CPSs and different situations simultaneously. The hardware of a CPS usually consists of a heterogeneous set of variable components. While each component can be designed as a software product line (SPL), which is a well established approach to describe software and hardware variability, it is not possible to describe CPSs' variability solely on a set of separate, non-interacting product lines. To properly manage variability, a CPS must specify dependencies and interactions of its separate components and cope with variable environments, changing requirements, and differing safety properties. In this paper, we i) propose a classification of variability aspects, ii) point out current challenges in variability modeling, and iii) sketch open research questions. Overall, we aim to initiate new research directions for variable CPSs based on existing product-line techniques.}, keywords = {Cyber-physical system, Modeling, Software product line}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{Bedau2017, title = {Modellbasierte Sicherheitsanalyse eines Bahnhofsstellwerks}, author = {Ludwig Bedau}, editor = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2018/02/BA_2017_BedauLudwig.pdf}, year = {2017}, date = {2017-03-31}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, abstract = {Stellwerke stellen eine zentrale Komponente fu¨r den sicheren Schienenverkehr dar. Um die korrekte Funktionsweise sicherzustellen und sicherheitskritische Zust¨ande auszuschlie\ssen, sind formale Methoden angebracht. In dieser Arbeit soll gezeigt werden, wie in der Modellierungssprache SAML (Sys-tem Analaysis and Modeling Language) ein Baukastensystem erstellt wird, mit dem sich beliebige Stellwerke modellieren lassen. In Rahmen der Arbeit wird beschrie-ben, wie das reale System abstrahiert wurde, um das zu untersuchende Verhal-ten herauszustellen. Die Arbeit beschreibt weiterhin die Sicherheits- und Liveness-Spezifikationen, die zur Verifikation der generierten Bahnhofsstellwerke genutz wer-den. Im Anschluss wird an einem Beispiel die Modellierung eines Bahnhofsstellwerkes auf Basis des vorgestellten Baukastensystems gezeigt. Bei der Verifikation wurde ei-ne kleine Auswahl an Model-Checking-Tools genutzt. Ein Vergleich dieser Tools in Bezug auf das hier gestellte Problem sowie eine Auswertung der Erfahrungen, die w¨ahrend des Projektes gesammelt werden konnten, sollen den Abschluss bilden.}, keywords = {interlocking systems, Railway System Verification}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Wehmeier2017, title = {Konzeption einer Architektur f\"{u}r (Big-)Data-Analysen im Industrie 4.0-Kontext}, author = {Leon Wehmeier}, editor = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, year = {2017}, date = {2017-01-30}, address = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, keywords = {Big Data, Industrie 4.0}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{filax17-wscg, title = {QuadSIFT: Unwrapping Planar Quadrilaterals to Enhance Feature Matching}, author = {Marco Filax and Tim Gonschorek and Frank Ortmeier}, url = {http://wscg.zcu.cz/wscg2017/short/I07-full.PDF https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/wscg2017_FilaxEtAl_QuadSIFT.pdf}, year = {2017}, date = {2017-01-01}, booktitle = {Proceedings of the 25rd International Conference in Central Europe on Computer Graphics, Visualization and Computer Vision, WSCG 2017 - Short Papers Proceedings}, volume = {25}, abstract = {Feature matching is one of the fundamental issues in computer vision. The established methods, however, do not provide reliable results, especially for extreme viewpoint changes. Different approaches have been proposed to lower this hurdle, e. g., by randomly sampling different viewpoints to obtain better results. However, these methods are computationally intensive. In this paper, we propose an algorithm to enhance image matching under the assumption that an image, taken in man-made environments, typically contains planar, rectangular objects. We use line segments to identify image patches and compute a homography which unwraps the perspective distortion for each patch. The unwrapped image patches are used to detect, describe and match SIFT features. We evaluate our results on a series of slanted views of a magazine and augmented reality markers. Our results demonstrate, that the proposed algorithm performs well for strong perspective distortions.}, keywords = {Feature Detection, Fine-Grained Recognition, Perspective Distortion, Projective Transformation, SIFT}, pubstate = {published}, tppubtype = {inproceedings} } @unpublished{Nielebock-Lambda-2016, title = {Adoption of Lambda-Expressions in Object-Oriented Programs does not necessarily decrease Source Code Size}, author = {Sebastian Nielebock and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/11/paper-lambda-adoption-final.pdf}, year = {2016}, date = {2016-08-31}, abstract = {Lambda-expressions are state-of-the-art in the functional programming paradigm, e.g. to easily handle functional behavior as a parameter or to temporarily implement a functionality, where it is actually needed. These benefits motivate the developers of object oriented programming languages to extend their language by Lambda-expressions. When doing this, one out of several claims is that Lambda-expression are capable to decrease the source code size. Up to now, only little validation of this claim was conducted. Thus, within this paper, we analyze, whether this claim is valid or not. For this purpose, we investigated the adoption of Lambda-expression and their effect on the source code size based on 110 open source projects, written in one of the three commonly applied object oriented programming languages C#, C++ and Java. Our results, obtained so far, seem to contradict this common claim. Moreover, the results indicate the opposite: the addition of Lambda-expressions is correlated with a more increased source code size than usual. Thus, we conclude that Lambda-expression does not necessarily decrease source code size. Within this paper, we discuss potential reasons for this result, whereas a valid explanation is still an open research question.}, note = {Presented at the 28th Symposium on Implementation and Application of Functional Languages 2016 in Leuven, Belgium (unreviewed pre-proceedings)}, keywords = {Adoption, Lambda-Expression, Object-Oriented Programming Languages, Source Code Size}, pubstate = {published}, tppubtype = {unpublished} } @mastersthesis{krolikowski2016, title = {Einfluss unterschiedlicher Kommentararten auf die Lesbarkeit des Quellcodes}, author = {Dariusz Krolikowski}, editor = {Sebastian Nielebock and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/11/masterarbeit-krolikowski-1.pdf}, year = {2016}, date = {2016-08-19}, school = {Otto-von-Guericke Universit\"{a}t Magdeburg}, keywords = {Kommentare, Masterarbeit, Quellcodelesbarkeit}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{MF16-RSSR, title = {Correct Formalization of Requirement Specifications: A V-Model for Building Formal Models}, author = {Marco Filax and Tim Gonschorek and Frank Ortmeier }, editor = {Springer International Publishing }, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/RSSR_CorrectFormalizationOfRequirementSpecifications.pdf}, doi = {10.1007/978-3-319-33951-1_8}, isbn = {978-3-319-33951-1}, year = {2016}, date = {2016-06-15}, booktitle = {Reliability, Safety, and Security of Railway Systems. Modelling, Analysis, Verification, and Certification First International Conference, RSSRail 2016, Paris, France, June 28-30, 2016, Proceedings}, pages = {106 - 122}, abstract = {In recent years, formal methods have become an important approach to ensure the correct function of complex hardware and software systems. Many standards for safety critical systems recommend or even require the use of formal methods. However, building a formal model for a given specification is challenging. This is, because verification results must be considered with respect to the validity of the model. This leads to the question: “Did I build the right model?”. For system development the analogous question “Did I build the right system?”. This is often answered with requirements traceability through the whole development cycle. For formal verification this question often remains unanswered. The standard model, which is used in development of safety critical applications is the V-model. The core idea is to define tests for each phase during system development. In this paper, we propose an approach - analogously to the V-model for development - which ensures correctness of the formal model with respect to requirements. We will illustrate the approach on a small example from the railways domain.}, keywords = {Formal Modelling Process, Railway System Verification, Requirements Traceability, System Verification, VIP-MoBaSA}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{Anders2016, title = {Indoor-Positionsbestimmung mit Hilfe von Bluetooth-Low-Energy-Beacons und Pedestrian Dead Reckoning}, author = {Anton Anders}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/BA_Anton_Anders.pdf}, year = {2016}, date = {2016-03-29}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, type = {Bachelorarbeit}, keywords = {Augmented Reality, Bluetooth}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Klockmann2016, title = {Analyse von Crowd-Simulation Algorithmen beim Durchqueren von Engstellen}, author = {Maximilian Klockmann}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/BA_Max_Klockmann.pdf}, year = {2016}, date = {2016-03-09}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, type = {Bachelorarbeit}, keywords = {virtual reality}, pubstate = {published}, tppubtype = {mastersthesis} } @article{ei2016, title = {Bringing formal methods “on the rail” - Modellbasierte Systemanalyse in der Sicherheitsnachweisf\"{u}hrung}, author = {Marco Filax and Tim Gonschorek and Tanja Hebecker and Michael Lipaczewski and Agnes Madalinski and Frank Ortmeier and Mario Fietze and Rolf Schumacher}, editor = {Verband Deutscher Eisenbahn-Ingenieure E.V.}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/Ver\"{o}ffentlichung-Eisenbahningeneur.pdf}, year = {2016}, date = {2016-01-01}, journal = {Der Eisenbahn Ingenieur}, pages = {24 -27}, abstract = {Funktionen sicherheitskritischer Systeme im Eisenbahnsektor werden entsprechend ihrer tolerierbaren Gef\"{a}hrdungsraten in sogenannte Sicherheitsintegrit\"{a}tslevel (SIL 1-4) eingestuft. In der europ\"{a}ischen Norm EN50129 besteht, f\"{u}r die Stufen drei und vier, die dringende Empfehlung, im Entwicklungs- und Spezifikationsprozess, formale Methoden anzuwenden. Die Otto-von-Guericke-Universit\"{a}t Magdeburg hat dazu, begleitet durch das Eisenbahn-Bundesamt (EBA) und Gutachtern des EBA, einen Satz unterst\"{u}tzender Werkzeuge und Verfahren entwickelt, welche in diesem Artikel, am Beispiel der Punktf\"{o}rmigen Zugbeeinflussung (PZB), vorgestellt werden sollen. Diese Werkzeuge erm\"{o}glichen die Portierung einer nat\"{u}rlichsprachlichen Systemanforderungsspezifikation in ein formales Modell, mit dessen Hilfe die Konsistenz und Vollst\"{a}ndigkeit der Systembeschreibung, sowie die definierten Sicherheitsanforderungen formal \"{u}berpr\"{u}ft werden k\"{o}nnen. Bereits in fr\"{u}hen Entwicklungsphasen k\"{o}nnen automatisiert qualitative und quantitative Absch\"{a}tzungen \"{u}ber die Sicherheit und Zuverl\"{a}ssigkeit mit pr\"{a}zisen und aussagekr\"{a}ftigen Resultaten berechnet werden. Gleichzeitig wird der Aufwand zur endg\"{u}ltigen, sicherheitstechnischen Bewertung durch vollst\"{a}ndige Traceability als Teil des Zertifizierungs- und Zulassungsprozesses reduziert.}, keywords = {Formal Modelling Process, Railway System Verification, Requirements Traceability, System Verification, VIP-MoBaSA}, pubstate = {published}, tppubtype = {article} } @mastersthesis{Bahl2015, title = {Konzepte zur Optimierung des Betriebshofmanagements mit pervasiven Assistenzsystemen}, author = {Benedikt Bahl}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/MA_Benedikt_Bahl.pdf}, year = {2015}, date = {2015-12-15}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, type = {Masterarbeit}, keywords = {Augmented Reality}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{BAHamann2015, title = {Kopplung formaler Modelle mit realweltlichen Umgebungsmodellen}, author = {Dominik Hamannn}, editor = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/12/BAHamann2015.pdf}, year = {2015}, date = {2015-12-14}, address = {Universit\"{a}tsplatz 2, 39106 Magdeburg}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, abstract = {Sicherheitskritische technische Systeme werden insbesondere in der Automobilindustrie und der Luftfahrt immer komplexer. Ein Ansatz, um die Sicherheit solcher Systeme zu \"{u}berpr\"{u}fen, besteht darin, ein Modell des Systems zu erstellen und dieses von einem Model Checker auf gef\"{a}hrdende Situationen zu \"{u}berpr\"{u}fen. Die Umgebung eines softwareintensiven Systems muss oft, aufgrund der Funktionsweise von Model Checkern und aufgrund von Restriktionen der verwendeten Sprache, mit einem hohen Abstraktionsgrad modelliert werden. Das betrif t insbesondere Systeme, die nicht nur rein logische Aufgaben ausf\"{u}hren, sondern mittels Sensorik und Aktorik mit der realen Welt interagieren. Herrscht eine zu gro\sse Diskrepanz zwischen der realen Umgebung und dem Modell der Umgebung, tritt in dem Modell Fehlverhalten nicht auf, dass in der realen Welt auftreten w\"{u}rde. Die Ergebnisse eines Model Checkers sind dann f\"{u}r dieses Modell nicht aussagekr\"{a}ftig. In dieser Arbeit wird eine Schnittstelle vorgestellt, mit der formale Modelle mit realweltlichen Umgebungsmodellen gekoppelt werden k\"{o}nnen. Dadurch wird getestet, inwiefern sich das Verhalten des formalen Modells in einer realistischen Umgebung von dem Verhalten einer abstrahierten Umgebung unterscheidet. Dieser Vergleich kann Aufschluss \"{u}ber die Validit\"{a}t des verwendeten Umgebungsmodells bringen.}, howpublished = {Bachelorarbeit}, keywords = {co-simulation of system models, model-based verfication}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Heum\"{u}ller2015, title = {Multi-Abstraction Model Based Software Development for Embedded Low-Cost Applications}, author = {Robert Heum\"{u}ller}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/01/thesis-1.pdf}, year = {2015}, date = {2015-11-06}, keywords = {arm, compiler, embedded, llvm, mdd, mde, model based}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Hebecker2015, title = {Safe Prediction-Based Local Path Planning using Obstacle Probability Sections}, author = {Tanja Hebecker and Frank Ortmeier}, editor = {Christian Laugier and Philippe Martinet and Urbano Nunes and Christoph stiller }, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/02/hebecker2015_SafePrediction-basedLocalPathPlanningUsingObstacleProbability.pdf}, year = {2015}, date = {2015-09-28}, booktitle = {Proceedings of the 7th IROS Workshop on Planning, Perception and Navigation for Intelligent Vehicles}, pages = {183 - 188}, abstract = {Autonomous mobile robots gain more and more importance. In the nearest future they will be a part of everyday life. Therefore, it is critical to make them as reliable and safe as possible. We present a local path planner that shall ensure safety in an environment cluttered with unexpectedly moving obstacles. In this paper, the motion of obstacles is predicted by generating probability sections, and collision risks of path configurations are checked by determining whether these configurations lead inevitably to a collision or not. The presented approach worked efficiently in scenarios with static and dynamic obstacles.}, keywords = {local path planning, Obstacle probability sections}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{BA-Orth, title = {Trace refinement with reduced interpolant automata for binary programs}, author = {Severin Orth}, url = {/cse/?attachment_id=1937}, year = {2015}, date = {2015-09-14}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, keywords = {binary programs, trace refinement}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Nykolaichuk2015, title = {Coverage Path Re-Planning for Processing Faults}, author = {Mykhaylo Nykolaichuk and Frank Ortmeier}, editor = {Springer}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/07/coveragePathReplanning_final.pdf https://link.springer.com/chapter/10.1007/978-3-319-22876-1_31}, year = {2015}, date = {2015-08-20}, booktitle = {The 8th International Conference on Intelligent Robotics and Applications (ICIRA2015) }, volume = {9245}, pages = {358-368 }, abstract = {Currently, an automated surface treatment or finishing (e.g., abrasive blasting, cleaning or painting) is performed in two consecutive steps: first processing by tool, second quality evaluation by sensor. Often, a finished surface has defects like areas not properly processed. This is caused by path inaccuracy or errors in tool deployment. The defected areas can be detected only during a subsequent quality evaluation. As a result, a complete surface reprocessing is required which is costly and time-consuming. We propose a new approach that is a combination of surface treatment and quality evaluation processes in a single deployment. In our approach, an initial coverage path for surface treatment by a tool is given or calculated using a state-of-the-art coverage path planning algorithm. In fact, we extend an off-line generated initial coverage path with an ability to react to sensor-based defect recognitions during surface processing by a tool.}, keywords = {coverage path planning, online path planning}, pubstate = {published}, tppubtype = {inproceedings} } @phdthesis{Alatartsev2015c, title = {Robot Trajectory Optimization for Relaxed Effective Tasks}, author = {Sergey Alatartsev}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/01/Alatartsev_PhD_thesis.pdf}, year = {2015}, date = {2015-07-07}, school = {Otto-von-Guericke University of Magdeburg}, abstract = {Industrial robots are flexible machines that are currently involved in multiple production domains. Mainly their workflow consists of two alternating stages. The first stage is effective movements that are required to perform a task, e.g., welding a seam. The second stage is supporting movements that are needed to move from one effective task to another, e.g., movements between welding seams. Many effective tasks allow a certain freedom during their execution, e.g., the robot’s tool might have a certain deviation during welding. This freedom is often ignored and robots are programmed manually based on the programmer’s intuition. Nonetheless, this freedom can be used as an extra degree of freedom for robot trajectory optimization. In this thesis, we propose a formalization of this freedom for effective tasks. We refer to an effective task with a formalized freedom of execution as a relaxed effective task. Having an infinite number of ways to execute a task raises several research questions: (i) how to optimize a sequence of entry points for relaxed effective tasks? (ii) how to find starting robot configurations for these tasks? (iii) how to optimize a robot trajectory for a certain relaxed task? We propose a solution concept that decomposes a problem containing all three questions into three components that can be applied in combination with each other or with other state-of-the-art approaches. The first component considers the problem of finding a sequence of effective tasks and their entry points. This problem is modeled as the Traveling Salesman Problem with Neighborhoods (TSPN) where a tour has to be found through a set of areas. We propose a Constricting Insertion Heuristic for constructing a tour and a Constricting 3-Opt for improving the tour. In the second component, the problem of adapting a tour for a robot to execute and searching for starting robot configurations is modeled as a Touring-a-sequence-of-Polygons Problem (TPP) where a tour has to be found through a given sequence of areas. We propose a modification of the Rubber-Band Algorithm (RBA). We refer to this extension as a Nested RBA. Optimization of a robot trajectory in the third component is also represented as a TPP. However, in contrast to the classic RBA where areas are constricted with a polyline, we propose an extension of the RBA called Smoothed RBA where areas are constricted with a smooth curve which leads to a minimal cost robot trajectory. }, keywords = {robotics, travelling salesman problem}, pubstate = {published}, tppubtype = {phdthesis} } @mastersthesis{Huehne15, title = {Innenraumlokalisierung von Mobilger\"{a}ten mittels Bluetooth LE}, author = {Patrick H\"{u}hne}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2016/08/BA_Patrick_H\"{u}hne.pdf}, year = {2015}, date = {2015-06-22}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, type = {Bachelorarbeit}, keywords = {Augmented Reality, Bluetooth}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Gonschorek2015DCDS, title = {Slice or Unfold - Experiments on Checking Synchronous Models with Backwards Slicing}, author = {Tim Gonschorek and Frank Ortmeier}, editor = {International Federation of Automatic Control}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/paper_bwmc_dcds15.pdf}, year = {2015}, date = {2015-05-31}, booktitle = {5th International Workshop on Dependable Control of Discrete Systems (DCDS 2015)}, abstract = {Nowadays, model checking approaches have one major drawback: They suffer from the state space explosion problem. We think one major problem is that the verification tools always must hold some (symbolic) representation, e.g. a BDD or a adjacency list, in the memory during the verification process. In this paper we propose a verification approach that does not need to hold a complete representation while checking the model. This is done by slicing backward the transition rules, which defines the complete state system.%144 Here, we present the basic backward slicing approach for verifying CTL safety properties traversing the state machine by using a given transition model. In the following , we give a little evaluation with special test models and compare the results to the state of the art model checker nuXmv.}, note = {accepted, will be published this year}, keywords = {safety analysis, verification}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{BARabeler2015, title = {Abstraktion kontinuierlichen Verhaltens zur modellbasierten Sicherheitsanalyse}, author = {Ben Rabeler}, editor = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/06/ba_thesis_rabeler.pdf}, year = {2015}, date = {2015-05-18}, abstract = {Die Sicherheit von technischen System ist in vielen Gebieten wie der Luftfahrt oder im Stra\ssenverkehr von elementarer Bedeutung f\"{u}r die Hersteller solcher Systeme wie f\"{u}r die Anwender. Ein Ansatz um die Sicherheit von diskreten Systemen zu zeigen, besteht darin, diese in einem diskreten Modell zu abstrahieren und anschlie\ssend mit einem Model Checker auf gef\"{a}hrdende Situationen zu \"{u}berpr\"{u}fen. Viele Szenarien in den Anwendungsbereichen wie der Luftfahrt oder dem Stra\ssenverkehr beruhen jedoch auf kontinuierlichem Systemverhalten. In dieser Arbeit wird ein Verfahren vorgestellt, das ein kontinuierliches System in diskrete Automaten abstrahiert, sodass diese Automaten mit einem symbolischen Model Checker auf eine Sicherheitsspezifi kation \"{u}berpr\"{u}ft werden k\"{o}nnen. Das Verfahren wurde in Umgebung, in der diskrete Modelle erstellt und \"{u}berpr\"{u}ft werden k\"{o}nnen, als Prototyp umgesetzt. Dieser dient dazu anhand von Experimenten Eigenschaften des Verfahrens herauszufi nden.}, keywords = {hybrid model checking, model based verification, model checking}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Doerfler2015, title = {Protokollautomat f\"{u}r die Kommunikation zwischen Ladestation und Elektrofahrzeug}, author = {Stephan D\"{o}rfler}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/thesis_doerfler_2015.pdf}, year = {2015}, date = {2015-04-09}, school = {Otto-von-Guericke-Universit\"{a}t Magdeburg}, type = {Bachelorarbeit}, keywords = {}, pubstate = {published}, tppubtype = {mastersthesis} } @article{saml-vs-altarica-ress-elsevier-2015, title = {Comparison of Modeling Formalisms for Safety Analyses: SAML and AltaRica}, author = {Michael Lipaczewski and Tatiana Prosvirnova and Frank Ortmeier and Antoine Rauzy and Simon Struck}, url = {http://www.sciencedirect.com/science/article/pii/S0951832015001040}, year = {2015}, date = {2015-04-08}, journal = {Reliability Engineering & System Safety}, abstract = {Many states/transitions formalisms have been proposed in the literature to perform Safety Analyses. In this paper we compare two of them: SAML and AltaRica. These formalisms have been developed by different communities. Their “look-and-feel” are thus quite different. Yet, their underlying mathematical foundations are very similar: both of them rely on state automata. It is therefore of interest to study their ability to assess the reliability of systems, their respective advantages and drawbacks and to seek for opportunities of a cross fertilization.}, keywords = {Altarica, model-based, safety analysis, SAML}, pubstate = {published}, tppubtype = {article} } @mastersthesis{Gonschorek2015, title = {A Backward Model Checking Approach with Slicing}, author = {Tim Gonschorek}, editor = {Tim Gonschorek }, year = {2015}, date = {2015-02-25}, address = {Universit\"{a}tsplatz 2, 39106 Magdeburg}, school = {Otto-von-Guericke-University Magdeburg}, abstract = {As software intensive safety critical systems are more and more introduced in our daily lives, it must be ensured that those systems safe. One method for ensuring that a system follows its given speci cation, i.e., does not have some unwanted behavior, is model checking. Although, nowadays larger models can be veri ed, model checking algorithms does not perform well in combination with complex real world problems. To overcome this diculties, this thesis proposes a new backward model checking algorithm using SMT programs instead of boolean encodings for representing the systems state. Additionally, it uses model based slicing to reduce the impact of the complexity of a given model. This thesis explains such an algorithm, which can be used for verifying safety properties. Further, this thesis presents a prototypical implementation of this algorithm. This prototype will be used to evaluate the algorithm in comparison to the BDD and the IC3 algorithms implemented in the model checker nuXmv. The evaluation shows that there are some cases where the proposed approach performs better than the given BDD algorithm.}, keywords = {formal verification, model checking}, pubstate = {published}, tppubtype = {mastersthesis} } @article{Alatartsev2015, title = {Robotic Task Sequencing Problem: A Survey}, author = { Sergey Alatartsev and Sebastian Stellmacher and Frank Ortmeier}, url = {http://link.springer.com/article/10.1007/s10846-015-0190-6 https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_JIRS2015.pdf}, doi = {10.1007/s10846-015-0190-6}, issn = {1573-0409}, year = {2015}, date = {2015-01-01}, booktitle = {Journal of Intelligent and Robotic Systems}, journal = {Journal of Intelligent and Robotic Systems}, publisher = {Springer}, abstract = {Today, robotics is an important cornerstone of modern industrial production. Robots are used for numerous reasons including reliability and continuously high quality of work. The main decision factor is the overall efficiency of the robotic system in the production line. One key issue for this is the optimality of the whole set of robotic movements for industrial applications, which typically consist of multiple atomic tasks such as welding seams, drilling holes, etc. Currently, in many industrial scenarios such movements are optimized manually. This is costly and error-prone. Therefore, researchers have been working on algorithms for automatic computation of optimal trajectories for several years. This problem gets even more complicated due to multiple additional factors like redundant kinematics, collision avoidance, possibilities of ambiguous task performance, etc. This survey article summarizes and categorizes the approaches for optimization of the robotic task sequence. It provides an overview of existing combinatorial problems that are applied for robot task sequencing. It also highlights the strengths and the weaknesses of existing approaches as well as the challenges for future research in this domain. The article is meant for both scientists and practitioners. For scientists, it provides an overview on applied algorithmic approaches. For practitioners, it presents existing solutions, which are categorized according to the classes of input and output parameters.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @mastersthesis{Plauschin2014, title = {Klassifikation latenter Fingerspuren mithilfe von Smartphones}, author = {Mathias Plauschin}, url = {/cse/wp-content/uploads/2015/03/thesis_plauschin_2014.pdf}, year = {2014}, date = {2014-11-30}, keywords = {}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Fritsch2014, title = {Erh\"{o}hung von Quellcode-Wartbarkeit durch Entwurfsmusterautomatisierung}, author = {Michael Fritsch}, editor = {Sebastian Nielebock and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/04/MA_Fritsch1.pdf}, year = {2014}, date = {2014-11-14}, keywords = {Automation, Design Patterns, Maintainability}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Dietze2014, title = {Semantische Veri kation von selbstbeschreibenden Geratekonfi gurationen}, author = {Denis Dietze}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/03/thesis_dietze_2014.pdf}, year = {2014}, date = {2014-09-30}, keywords = {}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Dittmann2014, title = {Visualisierungskomponente f\"{u}r ein Hochspannungs-Gleichstrom-\"{U}bertragungs-Netz \"{U}berwachungssystem}, author = {Denny Dittmann}, url = {/cse/wp-content/uploads/2015/03/thesis_dittmann_2014.pdf}, year = {2014}, date = {2014-09-30}, keywords = {}, pubstate = {published}, tppubtype = {mastersthesis} } @article{hebecker2014JIRS, title = {Model-Based Local Path Planning for UAVs}, author = { Tanja Hebecker and Robert Buchholz and Frank Ortmeier}, editor = {Springer Netherlands}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/10.1007_s10846-014-0097-72.pdf http://link.springer.com/article/10.1007/s10846-014-0097-7}, doi = {10.1007/s10846-014-0097-7}, issn = {1573-0409}, year = {2014}, date = {2014-08-01}, journal = {Journal of Intelligent & Robotic Systems}, volume = {78}, pages = {127-142 }, abstract = {Autonomous aviation continuously becomes more and more important. Algorithms that enable this autonomy have developed quickly in the last years. This paper describes a concept for a reactive path planning algorithm. The aim is to develop a method for static obstacle avoidance of an unmanned aerial vehicle (UAV) by calculating collision-free paths within the field of view of a UAV’s obstacle detection sensor. In contrast to other algorithms, this method considers the properties of the obstacle detection sensors, plans paths that the UAV is able to track, and is applied in three-dimensional space without access to an inner loop controller. In this work we represent the field of view of a UAV as a grid map and apply the wavefront algorithm as the local path planning algorithm. We reduce the configuration space of UAVs within the field of view by calculating an approximated worst-case reachable set based on a linearized reference model. We evaluate the method with approximated specifications for the unmanned helicopters ARTIS and Yamaha RMAX, and with specifications for the obstacle detection sensors LIDAR \textendash and stereo camera. Experiments show that this method is able to generate collision-free paths in a region constricted by obstacles.}, keywords = {Grid map, Obstacle avoidance, Reachable set, Wavefront algorithm}, pubstate = {published}, tppubtype = {article} } @mastersthesis{thesis-jagla-2014, title = {Cross-Platform Remote Control of Web-Based Media}, author = {Tim Benedict Jagla}, url = {/cse/wp-content/uploads/2015/03/thesis-jagla-2014.pdf}, year = {2014}, date = {2014-06-19}, abstract = {A multitude of network and media related topics will be discussed in this work. From the basics of networking and different web communication techniques as well as media preparation and provision a concept will be derived to solve the problem of the general lack in web standard compliant browsers for mobile devices within the domain of web applications. A prototypical implementation will be presented and critically examined.}, keywords = {mobile devices, web communication}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{Heum\"{u}ller2014, title = {Parsing Variant C Code: An Evaluation on Automotive Software}, author = {Robert Heum\"{u}ller and Jochen Quante and Andreas Thums}, editor = {GI}, year = {2014}, date = {2014-04-30}, booktitle = {16. Workshop Software-Reengineering & Evolution}, keywords = {c, parsing, preprocessor, Software product line}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{thesis-heumueller-2014, title = {VECS Dataflow - A SAML Language Extension Allowing Intuitive Creation of Formal Automata Specifications}, author = {Robert Heum\"{u}ller}, url = {/cse/wp-content/uploads/2015/03/thesis.pdf}, year = {2014}, date = {2014-04-10}, type = {Bachelor\'s Thesis}, keywords = {dataflow, model-based, model-checking, safety}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{FODB:80901605, title = {Bringing VECS to the World - Challenges and Accomplishments in Teaching of Formal Model Analysis}, author = { Michael Lipaczewski and Marco Filax and Frank Ortmeier}, year = {2014}, date = {2014-01-01}, booktitle = {European Conference on Software Engineering Education. - Herzogenrath : Shaker}, pages = {217-228}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {inproceedings} } @inbook{FODB:80901373, title = {VECS - Verification Enviroment for Critical Systems - Tool Supported Formal Modeling an Verification}, author = { Tim Gonschorek and Marco Filax and Michael Lipaczewski and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/07/gonshorek_VECS_2014.pdf}, year = {2014}, date = {2014-01-01}, booktitle = {IMBSA 2014: short & tutorial proceedings of the 4th international symposium on model based safety assessment. - Magdeburg : Univ.}, pages = {63-64}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {inbook} } @inproceedings{FODB:80901349, title = {A Dataflow Notation for SAML - Formal Modeling Without Fearing Timing Constraints}, author = { Robert Heum\"{u}ller and Michael Lipaczewski and Frank Ortmeier}, year = {2014}, date = {2014-01-01}, booktitle = {IMBSA 2014: short & tutorial proceedings of the 4th international symposium on model based safety assessment. - Magdeburg : Univ.}, pages = {43-50}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{FODB:80901202, title = {On Traceability of Informal Specifications for Model-Based Verification}, author = { Marco Filax and Tim Gonschorek and Michael Lipaczewski and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/07/filax_IMBSA_2014.pdf}, year = {2014}, date = {2014-01-01}, booktitle = {IMBSA 2014: short & tutorial proceedings of the 4th international symposium on model based safety assessment.}, pages = {11-18}, publisher = {Magdeburg : Univ.}, abstract = {Safety critical systems are often specified by a set of informal requirements. The fulfillment of those requirements has, in general, to be verified through external assessors. In most cases models (i.e. UML diagrams) are used to communicate system architectures and decide whether a given system meets its requirements. Even though, there already exist multiple approaches to verify safety critical systems and assessors would benefit from the usage of model-based system verifications, they are not commonly used in industry. We propose a traceable modeling approach for verifying safety critical systems specified with a set of informal unstructured requirements. The proposed process is divided into three phases: the analysis of the given set of informal requirements, semi-formalization and formalization.}, keywords = {model-based, VECS, VIP-MoBaSA}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{FODB:80901022, title = {On Bringing Object-Oriented Software Metrics into the Model-Based World - Verifying ISO 26262 Compliance in Simulink}, author = { Lukas M\"{a}urer and Tanja Hebecker and Torben Stolte and Michael Lipaczewski and Uwe M\"{o}hrst\"{a}dt and Frank Ortmeier}, editor = {Springer International Publishing}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/maeurer2014SAM.pdf http://link.springer.com/chapter/10.1007%2F978-3-319-11743-0_15}, doi = {10.1007/978-3-319-11743-0_15}, isbn = {978-3-319-11743-0}, year = {2014}, date = {2014-01-01}, booktitle = {System analysis and modeling: models and reusability. - Berlin [u.a.] : Springer}, pages = {207-222}, publisher = {Springer}, series = {Lecture notes in computer science; 8769}, abstract = {For ensuring functional safety of electrical/electronic systems, it is necessary to exclude malfunctions from hardware and software as well as from the interaction of both. In today’s passenger vehicles, more and more safety critical functionalities are implemented in software. Thus, its importance for functional safety increases. The dominating safety standard for the automotive domain (ISO 26262) considers the software part and defines requirements for safety critical software. However, applying and fulfilling the standard is a major problem in industry. In this context, the paper presents a novel metric-based approach to evaluate dataflow-oriented software architectures used in many model-driven processes regarding the fulfillment of requirements defined by ISO 26262 (in particular part 6). The core idea is to derive metrics for model-based software from already existing, well-performing metrics elaborated for other programming paradigms. To link metrics to requirements fulfillment of ISO 26262, we briefly sketch the factor-criteriametrics paradigm for this problem. Technically, this paper presents a generic meta-model for dataflow systems, which is used to define the metrics. We implemented this meta-model and the metrics as a prototype for Matlab Simulink. As examples, two models of a 400 kW full Drive-by- Wire experimental vehicle with all-wheel-steering, all-wheel-drive, and electro-mechanical brakes are analyzed using this prototype.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{nykolaychukIMBSA_2014, title = {On Efficiently Specifying Models for Model Checking}, author = { Mykhaylo Nykolaichuk and Michael Lipaczewski and Tino Liebusch and Frank Ortmeier}, url = {http://link.springer.com/chapter/10.1007/978-3-319-12214-4_2 https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Nykolaychuk_IMBSA_2014.pdf }, year = {2014}, date = {2014-01-01}, booktitle = {Proceedings of 4th International Symposium on Model Based Safety and Assessment (IMBSA 2014)}, abstract = {Using formal methods for quality assurance is recommended in many standards for safety critical applications. In most industrial contexts, model checking is the only viable option for formal verification, as interactive approaches often require very highly specialized experts. However, model checking typically suffers from the well-known state-space explosion problem. Due to this problem, engineers typically have to decide on a trade-off between readability and completeness of the model on one side, and the state space size, and thus, computational feasibility on the other. In this paper, we propose a method for reducing the state space by restructuring models. The core idea is to introduce as few additional states as possible by model design making state transitions more complex. To avoid unreadability and infeasible model sizes, we introduce a concept for hierarchical boolean formulas to efficiently specify state transitions. For evaluation purposes, we applied this approach to a case study using the VECS toolkit. In this exemplary case study, we were able to reduce the state space size significantly and make verification time feasible.}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{nielebock_et_al_verisure_2014, title = {A Graphical Notation for Probabilistic Specifications}, author = { Sebastian Nielebock and Tim Gonschorek and Frank Ortmeier}, url = {http://www.easychair.org/smart-program/VSL2014/VeriSure-2014-07-23.html#talk:3203 https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/nielebock_verisure_20142.pdf}, year = {2014}, date = {2014-01-01}, abstract = {Nowadays formal methods represent a powerful but in practice not well supported way for verification. One reason among others for this is that such methods are often conceived of being too theoretical, can likely be misunderstood by non-experts, and notations are therefore misapplied. For example to specify formal specifications, complex logics are state-of-the-art, especially for probabilistic verification properties. It is not only that users are unable to construct correct specifications but they also miss to understand what is meant by the verification results based on the specification. In this paper we address this problem and endorse the usage of a graphical notation to construct such specification properties, especially probabilistic ones. We present how such a notation may look and emphasize that one single notation is sufficient to solve both lack of comprehensibility of the specification as well as of the verification results. Moreover, we extract some future research directions.}, note = {Verification and Assurance - Second VeriSure Workshop 2014}, keywords = {graphical specification patterns, specification, specification patterns, verification}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Alatartsev2014_1, title = {Robot Trajectory Optimization for the Relaxed End-Effector Path}, author = { Sergey Alatartsev and Anton Belov and Mykhaylo Nykolaichuk and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_ICINCO2014.pdf}, year = {2014}, date = {2014-01-01}, booktitle = {Proceedings of the 11th International Conference on Informatics in Control, Automation and Robotics (ICINCO)}, abstract = {In this paper we consider the trajectory optimization problem for the effective tasks performed by industrial robots, e.g., welding, cutting or camera inspection. The distinctive feature of such tasks is that a robot has to follow a certain end-effector path with its motion law. For example, welding a line with a certain velocity has an even influence on the surface. The end-effector path and its motion law depend on the industrial process requirements. They are calculated without considering robot kinematics, hence, are often “awkward” for the robot execution, e.g., cause high jerks in the robot’s joints. In this paper we present the trajectory optimization problem where the end-effector path is allowed to have a certain deviation. Such path is referred to as relaxed path. The goal of the paper is to make use of this freedom and construct the minimal-cost robot trajectory. To demonstrate the potential of the problem, jerk of the robot joint trajectory was minimized.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{Alatartsev2014, title = {Improving the Sequence of Robotic Tasks with Freedom of Execution}, author = { Sergey Alatartsev and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_IROS2014.pdf}, year = {2014}, date = {2014-01-01}, booktitle = {IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, abstract = {An industrial robot’s workflow typically consists of a set of tasks that have to be repeated multiple times. A task could be, for example, welding a seam or cutting a hole. The efficiency with which the robot performs the sequence of tasks is an important factor in most production domains. The more often a set of tasks can be performed by a robot the more advantages it provides to the company. In most practical scenarios, the majority of tasks have a certain freedom of execution. For example, closed-contour welding task can often be started and finished at any point of the curve. Also the exact orientation of the welding torch is not fixed, but may be in a certain range, e.g., between 85◦ and 95◦ . Currently, these degrees of freedom are used to manually generate robot trajectories. However, their quality highly depends on skills and experience of the robot programmer. In this paper we propose a method that is able to automatically improve the given sequence of robotic tasks by adding certain freedom to (i) the position of the starting point along the curve, (ii) the orientation of the end-effector and (iii) the robot configuration. The proposed approach does not depend on the production domain and could be combined with any algorithm for constructing the initial task sequence. We evaluate the algorithm on a realistic case study and show that it could significantly improve the production time on the test instances from the cutting-deburring domain.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{thesis-nielebock-2013, title = {Komposition gew\"{o}hnlicher Differentialgleichungen mit sicherheitsrelevanten Zustandsautomaten}, author = {Sebastian Nielebock}, url = {/cse/wp-content/uploads/2015/03/MA_Nielebock_final_version.pdf}, year = {2013}, date = {2013-09-17}, keywords = {differential equations, safety, state-based}, pubstate = {published}, tppubtype = {mastersthesis} } @mastersthesis{Gonschorek2013, title = {Methodik zur Abstraktion kontinuierlicher Differentialgleichungen}, author = {Tim Gonschorek}, editor = {Tim Gonschorek}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/BA_2013_Gonschorek.pdf}, year = {2013}, date = {2013-06-19}, address = {Universit\"{a}tsplatz 2, 39106 Magdeburg}, school = {Otto-von-Guericke-University Magdeburg}, abstract = {Reale Phanomene aus Natur und Technik konnen oft nur durch kontinuierliche Modelle abgebildet werden. Mithilfe eines Modelchecker konnen Modelle auf Grundlage einer vorgegebenen Spezi kation automatisiert veri ziert werden. Jedoch konnen die meisten Modelchecker nur mit diskreten Modellen umgehen. Deshalb kann ein kontinuierliches Modell nicht direkt mit einem Modelchecker gepruft werden. Diese Arbeit prasentiert eine Methodik, mit der es moglich ist, kontinuierliche Modelle sowohl im Zeitbereich, als auch im Wertebereich zu diskretisieren. Das Modell wird dazu in einen endlichen, diskreten Zustandsautomaten umgewandelt. Es wird ein konkretes Vorgehen erklart, um kontinuierliche Di erentialgleichungen in Di erenzengleichungen umzuwandeln, die in Zustandsubergangen eingebunden werden konnen. Dadurch wird es moglich, die Vorteile eins Modelcheckers fur die Veri kation kontinuierlicher Modelle zu nutzen. Um sicherzustellen, dass der erstellte Automat das kontinuierliche Modell hinreichend abstrahiert, um mithilfe eines Modelcheckers Aussagen tre en zu konnen, wird die Methodik anhand eines Beispielmodells evaluiert. Um die praktische Anwendbarkeit zu zeigen, wird ein komplexe Fallstudie von einem Modelica-Modell in einen SAML-Automaten transformiert. DesWeiteren wird die Architektur einer Java-Komponente vorgestellt, mit der eine Modelica-Datei in eine SAML-Datei transformiert werden kann, um sie mithilfe eines Modelcheckers gegen eine vorgegebene Spezi kation zu veri zieren. Diese Komponente soll als erste Implementation eines Importers fur Modelica-Dateien in eine SAML Umgebung dienen.}, keywords = {hybrid model checking, model based verification}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{SE2013-Param, title = {A Pragmatic Approach For Debugging Parameter-Driven Software}, author = { Frank Ortmeier and Simon Struck and Jens Meinicke and Jochen Quante}, editor = {Stefan Kowalewski and Bernhard Rumpe}, url = {https://subs.emis.de/LNI/Proceedings/Proceedings213/199.pdf}, year = {2013}, date = {2013-02-01}, booktitle = {Software Engineering 2013}, volume = {213}, pages = {199-212}, series = {Lecture Notes in Informatics}, abstract = {Debugging a software system is a difficult and time consuming task. This is in particular true for control software in technical systems. Such software typically has a very long life cycle, has been programmed by aengineers and not computer scientists, and has been extended numerous times to adapt to a changing environment and new technical products. As a consequence, the software is often not in an ideal condition. Additionally, such software often faces real-time requirements, which often makes it impossible to use dynamic techniques (e.,g., single-stepping or tracing). Technically, such software is often realized in C/C++ in a rather imperative programming style. Adaptation and extension is often done by adding configuration parameters. As a consequence, checking for correctness as well as debugging requires to consider the source code as well as its configuration parameters. In this paper, we propose a pragmatic approach to debugging such Software. The approach was designed such that (a) it does not require any understanding of the software before starting, and (b) that it can be easily used by programmers and not only by experts. We evaluated the approach on an artificial but realistic case study provided by Robert Bosch GmbH.}, keywords = {debugging, embedded software, parameter driven software}, pubstate = {published}, tppubtype = {inproceedings} } @article{2013-safety-opti-jss, title = {Efficient Optimization of Large Probabilistic Models}, author = { Frank Ortmeier and Simon Struck and Matthias G\"{u}demann}, url = {http://hal.inria.fr/docs/00/81/66/36/PDF/Struck-Gudemann-Ortmeier-13.pdf}, year = {2013}, date = {2013-01-01}, journal = {Elsevier Journal of Systems and Software}, volume = {03/78}, abstract = {The development of safety critical systems often requires design decisions which influence not only dependability, but also other properties which are often even antagonistic to dependability, e.g., cost. Finding good compromises c onsidering different goals while at the same time guaranteeing sufficiently high saf ety of a system is a very difficult task. We propose an integrated approach for modeling, analysis and optim ization of safety critical systems. It is fully automated with an implementat ion based on the Eclipse platform. The approach is tool-independent, different a nalysis tools can be used and there exists an API for the integration of different optimiza- tion and estimation algorithms. For safety critical systems, a very important criterion is the hazard occurrence probability, whose computation can be quite costly. Therefore we also provide means to speed up optimization by devising different combinations of stochastic estimators and illustrate how t hey can be integrated into the approach. We illustrate the approach on relevant case-studies and provide ex perimental details to validate its effectiveness and applicability.}, keywords = {model checking, optimization, safety}, pubstate = {published}, tppubtype = {article} } @conference{Trojahn.2013m, title = {Authentication with Keystroke Dynamics on Touchscreen Keypads - Effect of different N-Graph Combinations}, author = { Matthias Trojahn and Florian Arndt and Frank Ortmeier}, year = {2013}, date = {2013-01-01}, booktitle = {MOBILITY 2013, The Third International Conference on Mobile Services, Resources, and Users}, pages = {114-119}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013l, title = {KeyGait Framework for Continuously Biometric Authentication during Usage of a Smartphone}, author = { Matthias Trojahn and Frank Ortmeier}, year = {2013}, date = {2013-01-01}, booktitle = {MOBILITY 2013, The Third International Conference on Mobile Services, Resources, and Users}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{MIT2013, title = {Path Planning for Industrial Robots Among Multiple Underspecified Tasks}, author = {Sergey Alatartsev and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_DoctoralDay2013.pdf}, year = {2013}, date = {2013-01-01}, booktitle = {Proceedings of the Magdeburger-Informatik-Tage 2. Doktorandentagung (MIT)}, abstract = {This paper is an overview of the currently going PhD project. The main goal of the project is to optimize a sequence of industrial robotic tasks. We want to make use of the fact that robotic tasks often allow some freedom during the execution. In contrast, this extra freedom is ignored by state-of-the-art approaches. This work refers to extra freedom as under-specification. This paper presents a comprehensive overview of the state-of-the-art approaches in task sequencing. We state the thesis goals and make a short introduction to the methods developed within the PhD project. Work packages are formed based on the list of unaccomplished goals. }, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{teaching-saml_SEAA2013, title = {Teaching and Training Formal Methods for Safety Critical Systems}, author = { Frank Ortmeier and Michael Lipaczewski}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Teaching_and_Training_Formal_Methods_for_Safety_Critical-_ystems.pdf}, year = {2013}, date = {2013-01-01}, booktitle = {Proceedings of the 39th Euromicro Conference on Iv Software Engineering and Advanced Applications (SEAA 2013)}, abstract = {Embedded systems become a major part in many domains. This also involves systems which might create heavy damages and injuries when they fail. However, because of the rising number of software components used within this embedded hardware, safety-related problems are hard to discover, and it is even harder to prove that there are none. One approach to guarantee the correctness of a system is model-based safety analysis. They rely on an abstract representation of the system which can then be analyzed using model checkers. The results of these analysis are in general much more precise and often reveal surprising results of failure combinations, where no one had ever thought about before. Nevertheless model-based safety analysis is not used widely. Mainly because it is not well-known and hard to apply to current safety standards which rely on manual approaches. Another fact might be, that most approaches are scientific and in most cases prototypes that are hard to use. In this paper we present some ideas and first steps towards an easy to learn and easy to use model based safety approach. Additionally we present different user-interfaces that are supposed to support the user in his learning.}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {conference} } @inproceedings{IROS2013, title = {On Optimizing a Sequence of Robotic Tasks}, author = { Sergey Alatartsev and Vera Mersheeva and Marcus Augustine and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_IROS2013.pdf}, year = {2013}, date = {2013-01-01}, booktitle = {Proceedings of the International Conference on Intelligent Robots and Systems (IROS)}, publisher = {IEEE}, abstract = {Production speed and energy efficiency are crucial factors for any application scenario in industrial robotics. The most important factor for this is planning of an optimized sequence of atomic subtasks. In a welding scenario, an atomic subtask could be understood as a single welding seam/spot while the sequence could be the ordering of these atomic tasks. Optimization of a task sequence is normally modeled as the Traveling Salesman Problem (TSP). This works well for simple scenarios with atomic tasks without execution freedom like spot welding. However, many types of tasks allow a certain freedom of execution. A simple example is seam welding of a closed-contour, where typically the starting-ending point is not specified by the application. This extra degree of freedom allows for much more efficient task sequencing. In this paper, we describe an extension of TSP to model a problem of finding an optimal sequence of tasks with such extra degree of freedom. We propose a new, efficient heuristic to solve such problems and show its applicability. Obtained computational results are close to the optimum on small instances and outperforms the state of the art approaches on benchmarks available in literature.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{Trojahn.2013k, title = {Biometric Keystroke Authentication on Smartphones Using a Capacitive Display}, author = { Matthias Trojahn and Frank Ortmeier}, url = {http://www2.cs.uni-magdeburg.de/inf_media/bilder/Forschung/MIT/2013/Tagungsband_MIT2013.pdf#page=29}, year = {2013}, date = {2013-01-01}, booktitle = {2. Doktorandentagung Magdeburger-Informatik-Tage 2013 (MIT 2013)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{Trojahn.2013j, title = {Authentication with Time Features of Keystroke Dynamics on Touchscreens}, author = { Matthias Trojahn and Florian Arndt and Frank Ortmeier}, url = {https://link.springer.com/chapter/10.1007/978-3-642-40779-6_17}, doi = {10.1007/978-3-642-40779-6}, isbn = {978-3-642-40778-9}, year = {2013}, date = {2013-01-01}, booktitle = {14th Conference on Communications and Multimedia Security (CMS)}, pages = {197-199}, abstract = {Keystroke authentication is a well known method to secure the mobile devices. Especially, the increasing amount of personal and sensitive data stored on these devices makes a secure authentication system necessary. Traditional security techniques like the four-digit PIN-input are insufficient and do not correspond to the present password standards. A keystroke behavior based authentication system could increase the security. Different researches have been published based on keystroke authentication systems with traditional PC keypads. But the keystroke behavior on touchscreens, as they are nowadays used on smartphones, are not analyzed before.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{Trojahn.2013i, title = {Keystroke Authentication with a Capacitive Display using Different Mobile Devices}, author = { Matthias Trojahn and Christian Schadewald and Frank Ortmeier}, url = {http://ieeexplore.ieee.org/abstract/document/7223219/}, year = {2013}, date = {2013-01-01}, booktitle = {10th International Conference on Security and Cryptography (SECRYPT)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013h, title = {Biometrische Authentifizierung zur Fahrererkennung in Lkws}, author = { Matthias Trojahn and Alexander Beck and Frank Ortmeier}, year = {2013}, date = {2013-01-01}, booktitle = {D-A-CH security 2013}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013g, title = {Biometrische Alternativen zum Habenfaktor bei Smartphones}, author = { Matthias Trojahn and Alexander Beck and Frank Ortmeier}, year = {2013}, date = {2013-01-01}, booktitle = {D-A-CH security 2013}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013e, title = {Designing an Enterprise Security Strategy for Mobile Intranet Access}, author = { Matthias Trojahn and Frank Ortmeier}, url = {http://ieeexplore.ieee.org/abstract/document/6616319/}, year = {2013}, date = {2013-01-01}, booktitle = {Seventh International Conference on Software Security and Reliability Companion (SERE 2013)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @incollection{Trojahn.2013f, title = {Emotion Recognition Through Keystroke Dynamics on Touchscreen Keyboards}, author = { Matthias Trojahn and Florian Arndt and Markus Weinmann and Frank Ortmeier}, url = {https://www.researchgate.net/publication/257603545_Emotion_Recognition_Through_Keystroke_Dynamics_on_Touchscreen_Keyboards}, year = {2013}, date = {2013-01-01}, booktitle = {15th International Conference on Enterprise Information Systems}, abstract = {Automatic emotion recognition through computers offers a lot of advantages, as the interaction between human and computers can be improved. For example, it is possible to be responsive to anger or frustration of customers automatically while working with a webpage. Mouse cursor movements and keystroke dynamics were already used and examined for such a recognition on conventional keyboards. The aim of this work is to investigate keystroke dynamics on touchscreen keyboards which gets a cumulative relevance through the increasingly further circulation of smartphones and tablets. Furthermore, it is possible to record additional information like pressure and size of keystrokes. This could increase the recognition rate for emotions. In order to record the keystroke dynamics, an application and keyboard layout for Android OS were developed. In addition, hypotheses were established on the basis of Yerkes-Dodson-Law and Flow theory and besides, a study with 152 test persons for the data collection was implemented. Subsequently, a data evaluation with the SPSS software was accomplished. Most of the hypotheses were confirmed and the results of the study show that emotions can be explained by the keystroke dynamics and recognized in this way. }, keywords = {}, pubstate = {published}, tppubtype = {incollection} } @conference{Meyer.2013, title = {Using Crowdsourced Geographic Information from OpenStreetMap for Discrete Event Simulation of Logistic Systems}, author = { Torben Meyer and Matthias Trojahn and Steffen Strassburger}, url = {https://dl.acm.org/citation.cfm?id=2499606}, year = {2013}, date = {2013-01-01}, booktitle = {SpringSim 2013 ANSS}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013c, title = {Keystroke Dynamics Authentication on Mobile Devices with a Capacitive Display}, author = { Matthias Trojahn and Frank Ortmeier}, year = {2013}, date = {2013-01-01}, booktitle = {Biometrics: Technologies, Systems and Applications - BTSA 2013}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{ICAPS13, title = {Constricting Insertion Heuristic for Traveling Salesman Problem with Neighborhoods}, author = { Sergey Alatartsev and Marcus Augustine and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_ICAPS2013.pdf https://www.youtube.com/watch?v=oT9ngCfLFq8}, year = {2013}, date = {2013-01-01}, booktitle = {Proceedings of the 23rd International Conference on Automated Planning and Scheduling (ICAPS-2013)}, publisher = {AAAI}, abstract = {Sequence optimization is an important problem in many production automation scenarios involving industrial robots. Mostly, this is done by reducing it to Traveling Salesman Problem (TSP). However, in many industrial scenarios optimization potential is not only hidden in optimizing a sequence of operations but also in optimizing the individual operations themselves. From a formal point of view, this leads to the Traveling Salesman Problem with Neighborhoods (TSPN). TSPN is a generalization of TSP where areas should be visited instead of points. In this paper we propose a new method for solving TSPN efficiently. We compare the new method to the related approaches using existing test benchmarks from the literature. According to the evaluation on instances with known optimal values, our method is able to obtain a solution close to the optimum. }, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{Trojahn.2013b, title = {Developing a Cloud Computing Based Approach for Forensic Analysis using OCR}, author = { Matthias Trojahn and Lei Pan and Fabian Schmidt}, url = {http://ieeexplore.ieee.org/abstract/document/6568554/}, year = {2013}, date = {2013-01-01}, booktitle = {7th International Conference on IT Security Incident Management & IT Forensics}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{Trojahn.2013, title = {Toward Mobile Authentication with Keystroke Dynamics on Mobile Phones and Tablets}, author = { Matthias Trojahn and Frank Ortmeier}, url = {http://ieeexplore.ieee.org/abstract/document/6550478/}, year = {2013}, date = {2013-01-01}, booktitle = {The 7th International Symposium on Security and Multimodality in Pervasive Environment (SMPE-2013)}, abstract = {Security and protection of personal data are becoming more and more important. At the same time, we see a steady rise of very powerful mobile devices like smartphones and tablets. These devices offer most capabilities of desktop computers. Even today, they are often used for storing or accessing individual data. In the near future, it is obviously desirable to use their Internet capability to access private data or even business intra-nets from everywhere. Therefore, new security mechanisms must be designed. In this paper, we only focus on the authentication part (and ignore OS and network issues). Traditional methods for authentication (like smartcards or face recognition) may not be used in every context because of hardware and environment limitations (dark environments or camera restrictions in business). Therefore, we suggest to develop a mixture of a keystroke-based and a handwriting-based authentication method using capacitive displays. In this paper, we will briefly discuss limitations of existing approaches and why we believe that keystroke and handwriting authentication is a possible way for improving the security on mobile devices. First experiments will demonstrate the effectivity of this new approach.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @techreport{Trojahn.2013d, title = {Re-Authentication Model for Mobile {D}evices}, author = { Matthias Trojahn and Frank Ortmeier}, url = {http://spring13.seecurity.de/SPRING8_Proceedings.pdf#page=9}, year = {2013}, date = {2013-00-01}, address = {Proceedings of the Eight GI SIG SIDAR Graduate Workshop on Reactive Security (SPRING)}, institution = {GI FG SIDAR}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @article{Beck.2012, title = {Dynamic validity period calculation of digital certificates based on aggregated security assessment}, author = { Alexander Beck and Frank Ortmeier and Jens Graupmann}, url = {http://wireilla.com/papers/ijcis/V2N4/2412ijcis01.pdf}, year = {2012}, date = {2012-12-01}, journal = {International Journal On Cryptography And Information Security (IJCIS)}, volume = {2}, number = {4}, abstract = {The paper proposes a method based on different security-related factors to dynamically calculate the validity period of digital certificates. Currently validity periods are most often defined statically without scientific justification. This approach is not sufficient to objectively consider the actual need for security. Therefore the approach proposed in this paper considers relevant security criteria in order to calculate a meaningful validity period for digital certificates. This kind of security assessment can be executed periodically in order to dynamically respond to changing conditions. Especially in the context of complex systems and infrastructures that have an increased need for security, privacy and availability this issue is highly relevant.}, keywords = {authentication, crypto period, digital certificates, risk assessment, security assessment, security engineering, security metrics and measurement, validity period}, pubstate = {published}, tppubtype = {article} } @inproceedings{se2012-certification, title = {Using Model-Based Analysis in Certification of Critical Software-Intensive Systems}, author = { Frank Ortmeier and Simon Struck and Michael Lipaczewski}, url = {https://subs.emis.de/LNI/Proceedings/Proceedings199/155.pdf}, isbn = {978-3-88579-293-2}, year = {2012}, date = {2012-02-01}, booktitle = {Softwareengineering 2012 Workshopband}, number = {199}, series = {GI-Edition-Lecture Notes in Informatics (LNI)}, abstract = {Software is taking over more and more functionality in most technical systems, which leads to the term software-intensive or cyber-physical systems. Although this offers many exciting new opportunities, it also makes precise analysis of safety and reliability goals much more complicated. Well-known traditional techniques often reach their limits. Model-based approaches on the other hand can be useful for solving some of these problems. However, in industrial practice answering the question alone is often not sufficient. It is also necessary to explain how answers were found. In this paper, we will show some of the capabilities of modern model-based analysis methods and highlight how they possibly could be used in safety engineering resp. what obstacles need to be avoided.}, keywords = {model-based, SAML, VECS, VIP-MoBaSA}, pubstate = {published}, tppubtype = {inproceedings} } @mastersthesis{thesis-struck-2011, title = {Approaches to Multi-Objective Optimization of Formal Specifications}, author = {Simon Struck}, url = {/cse/wp-content/uploads/2015/03/masterarbeit-struck.pdf}, year = {2012}, date = {2012-01-17}, keywords = {model-based, multi-objective, optimization}, pubstate = {published}, tppubtype = {mastersthesis} } @inproceedings{IWDE2012_NielebockOSW, title = {From Discrete Event Simulation to Virtual Reality Environments}, author = { Sebastian Nielebock and Frank Ortmeier and Marco Schumann and Andr\'{e} Winge}, editor = {Frank Ortmeier and Peter Daniel}, url = {http://dx.doi.org/10.1007/978-3-642-33675-1_48 https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/11/nielebock-iwde-2012.pdf}, doi = {10.1007/978-3-642-33675-1_48}, isbn = {978-3-642-33674-4}, year = {2012}, date = {2012-01-01}, booktitle = {Computer Safety, Reliability, and Security}, volume = {7613}, pages = {508-516}, publisher = {Springer Berlin Heidelberg}, series = {Lecture Notes in Computer Science}, abstract = {Today’s technical systems are often very complex. System dynamics are often hard to predict for humans. However, understanding system behavior is crucial for evaluating design variants and finding errors. One way to cope with this problem is to build logical or virtual simulations. Logical simulations are often very abstract, but can simulate complex behavioral sequences. Virtual reality (VR) simulation is very good for experiencing the system in a view close to reality. However, it is very often static or has only limited dynamics. Until now both approaches exist in relative isolation. In this paper, we report on our experiences in building a mixed simulation, here a discrete event simulator (DES) is coupled with a virtual reality (VR) environment. We will focus on technical and conceptual challenges, but also present possible use cases for user interaction in this strategy to make more detailed investigations possible. Finally a prototype based on the simulation tool ”SLX” and the virtual reality environment ”Virtual Development and Training Platform” is used to evaluate the approach.}, keywords = {coupling, discrete event simulation, model-based, synchronization, virtual reality}, pubstate = {published}, tppubtype = {inproceedings} } @conference{Trojahn.2012b, title = {Towards Coupling User and Device Locations Using Biometrical Authentication on Smart Phones}, author = { Matthias Trojahn and Philipp Marcus}, url = {http://ieeexplore.ieee.org/abstract/document/6470915/}, year = {2012}, date = {2012-01-01}, booktitle = {The 7th International Conference for Internet Technology and Secured Transactions (ICITST-2012)}, abstract = {The mobile access to resources via smartphones has caused the development of location-based access control (LBAC) policies: A permission is granted, if a user is within an authorized area. This needs to be checked for the time the permission is used by consulting the location of the user's device. The assumption that both locations coincide for this timespan can be asserted by continuous and implicit biometrical authentication, as knowledge or ownership factors easily allow a user to move away from his device after passing or dropping it. In this paper, requirements for such biometrical authentication schemes are presented that are necessary for asserting the assumption that device and user location coincide, i.e. are coupled. A carefully conducted literature survey is presented, where existing biometrical authentication schemes and modalities on smartphones are evaluated against these requirements. Finally, a discussion shows that in most cases a fusion of modalities is necessary but the chosen modalities should be carefully selected based on the characteristics of geographical areas where permissions are granted by the LBAC policy. The results allow for an improved coupling of user and device locations in LBAC scenarios and thus help to improve the security of such systems.}, keywords = {biometric authentication, lbac, smartphone}, pubstate = {published}, tppubtype = {conference} } @conference{Assistenz-Informatik2012, title = {Handlungsadaptive Produktionsassistenz}, author = { Michael Lipaczewski and Frank Ortmeier}, url = {http://cs.emis.de/LNI/Proceedings/Proceedings208/585.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {Proceedings 208 - 42. Jahrestagung der Gesellschaft f\"{u}r Informatik e.V. (GI) (INFORMATIK 2012)}, abstract = {In fast allen Industrienationen stellt der demographische Wandel und die erh"ohte Lebenserwartung die Gesellschaftssysteme vor gro\sse Herausforderungen. Die Verl"angerung des durchschnittlichen Lebensalters f"uhrt dazu, dass auch die Arbeitsf"ahigkeit l"anger und gesundheitsvertr"aglicher gew"ahrleistet werden muss. Im Besonderen T"atigkeiten in der industriellen Produktion sind hiervon betroffen. Sowohl physisch als auch psychisch sind "uber lange Zeitr"aume ausgef"uhrte, monotone Handlungen, wie sie beispielsweise in der Flie\ssbandproduktion "ublich sind, extrem belastend. In Zukunft werden die steigende Automatisierung sowie die fortschreitende Mensch-Maschine-Kooperation die wenigen Flexibilit"aten und Freir"aume der Werker weiter reduzieren. Dies liegt in erster Linie daran, dass heutige Automatisierungssysteme nur "au\sserst starre Abl"aufe beherrschen. In diesem Papier diskutieren wir Ans"atze und Herausforderungen, wie im Kontext industrieller Produktion einerseits Handlungsflexibilit"at erh"oht werden kann und gleichzeitig Produktivit"atssteigerungen durch Automatisierung weiter m"oglich sind.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{S3E-HASE2012, title = {Using Tool-Supported Model Based Safety Analysis - Progress and Experiences in SAML Development}, author = { Michael Lipaczewski and Simon Struck and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/11/HAZE2012.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {IEEE 14th International Symposium on High-Assurance Systems Engineering (HASE 2012)}, abstract = {Software controls in technical systems are becoming more and more important and complex. Model based safety analysis can give provably correct and complete results, often in a fully automatic way. These methods can answer both logical and probabilistic questions. In common practice, the needed models must be specified in different input languages of different tools depending on the chosen verification tool for the desired aspect. This is time consuming and error-prone. To cope with this problem we developed the safety analysis modeling language (SAML). In this paper, we present a new tool to intuitively create probabilistic, non-deterministic and deterministic specifications for formal analysis. The goal is to give tool-support during modeling and thus make building a formal model less errorprone. The model is then automatically transformed into the input language of state of the art verification engines. We illustrate the approach on a case-study from nuclear power plant domain.}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {conference} } @conference{Opti-HASE2012, title = {Multi-Objective Optimization of Formal Specifications}, author = { Simon Struck and Matthias G\"{u}demann and Michael Lipaczewski and Frank Ortmeier}, url = {https://hal.inria.fr/hal-00735640/document}, year = {2012}, date = {2012-01-01}, booktitle = {IEEE 14th International Symposium on High-Assurance Systems Engineering (HASE 2012)}, abstract = {Even in the domain of safety critical systems, safety and reliability are not the only goals and a developing engineer is faced with the problem to find good compromises wrt. other antagonistic objectives, in particular economic aspects of a system. Thus there does not exist a single optimal design variant of a system but only compromises each ``best'' in its own rights . With the rising complexity, especially of cyber-physical systems, the process of manually finding best compromises becomes even more difficult. To cope with this problem, we propose a model-based optimization approach which uses quantitative model-based safety analysis. While the general approach is tool-independent, we implement it technically by introducing well defined variation points to a formal system model. These allow enough variability to cover whole families of systems while still being rigorous enough for formal analysis. From the specification of this family of system variants and a set of objective functions, we compute Pareto optimal sets, which represent best compromises. In this paper we present a framework which allows for optimization of arbitrary quantitative goal functions, in particular probabilistic temporal logic properties used for model-based safety analysis. Nevertheless, the approach itself is well applicable to other domains.}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {conference} } @conference{augustine12, title = {Landmark-Tree Map: a Biologically Inspired Topological Map for Long-Distance Robot Navigation}, author = { Marcus Augustine and Elmar Mair and Annett Stelzer and Frank Ortmeier and Darius Burschka and Michael Suppa}, url = {https://s3.amazonaws.com/academia.edu.documents/30714676/Landmark-Tree-Augustine_copyright.pdf?AWSAccessKeyId=AKIAIWOWYYGZ2Y53UL3A&Expires=1508610710&Signature=snLmNa7mdedYrIYvejsS4nk%2BVe4%3D&response-content-disposition=inline%3B%20filename%3DLandmark-Tree_Map_a_Biologically_Inspire.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {Proceedings of the International Conference on Robotics and Biomimetics (ROBIO) 2012}, pages = {128-135}, publisher = {IEEE}, keywords = {bio-inspired, mobile, navigation, robotics}, pubstate = {published}, tppubtype = {conference} } @conference{saml-sde-ICSE2012, title = {SAML goes Eclipse - Combining Model-Based Safety Analysis and High-Level Editor Support}, author = { Michael Lipaczewski and Simon Struck and Frank Ortmeier}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/261095247_SAML_goes_eclipse_-_Combining_model-based_safety_analysis_and_high-level_editor_support/links/5704fb7408ae13eb88b937cb.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {Proceedings of the 2nd International Workshop on Developing Tools as Plug-Ins (TOPI)}, pages = {67-72}, publisher = {IEEE}, abstract = {Software-intensive systems become more and more important in safety critical applications, mainly because of the rising number and complexity of embedded system. Many traditional safety analysis techniques where developeddecades ago and thus cannot cope with the complexity of modern systems. Model based analysis techniques where developed to deal with the complexity of software-intensive systems. However, due to the lack of tool support these techniques are currently limited to highly skilled experts. Thus model-based analysis is rarely used by system engineers. Based on the safety analysis modeling language (SAML) framework we propose the S3E , which integrates a complete safety analysis environment into the eclipse platform. S3E covers the whole safety analysis work flow. This implies a powerful editor for model creation, a seamless integration of model-analysis tools and presentation as well as evaluation of the analysis results into one environment. In this paper we present the current state of S3E and first experiences with the eclipse plug-in development.}, keywords = {model-based, VECS}, pubstate = {published}, tppubtype = {conference} } @conference{Robotik2012, title = {Trajectory Description Conception for Industrial Robots}, author = { Sergey Alatartsev and Matthias G\"{u}demann and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/05/Alatartsev_Robotik2012.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {7th German Conference on Robotics (ROBOTIK 2012)}, pages = {365-370}, address = {Munich, Germany}, abstract = {In this paper we observe the difficulties one can face when using different MPLs (Motion Planning Library) in a single application, and propose a new conception and a language which goal is to solve these problems. The idea is to present an interface between robot programming instruments and MPLs. Our goal is to provide a powerful tool for developers of software approaches for programming industrial robots that would allow an easy combination of different MPLs in one application. In addition the proposed conception hides the inner structure of libraries and eliminates the need to investigate algorithms before applying. That would increase the speed and the quality of the newly developed software systems.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{saml-process-MBEES2012, title = {Unifying Probabilistic and Traditional Formal Model-Based Analysis}, author = { Matthias G\"{u}demann and Michael Lipaczewski and Simon Struck and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/11/MBEES2012.pdf}, year = {2012}, date = {2012-01-01}, booktitle = {Proceedings of 8. Dagstuhl-Workshop on Model-Based Development of Embedded Systems (MBEES)}, abstract = {The increasing complexity of modern software-intensive systems makes their analysis much more difficult. At the same time, more and more of theses systems are used in safety-critical environment. Model based safety analysis can help with this problem by giving provably correct and complete results, very often in a fully automatic way. Today, such methods can cope with logical as well as probabilistic questions. However, very often the models used in many model based approaches must be specified in different input languages depending on the chosen verification tool for the desired aspect, which is time consuming and often error-prone. In this paper, we report on our experiences in designing a tool independent specification language (SAML) for model based safety analysis. This allows to use only one model and analyze it with different methods and different verification engines, while guaranteeing the equivalence of the analyzed models. In particular, we discuss challenges and possible solutions to integrate SAML in the development process of real systems.}, keywords = {model-based, SAML, VECS}, pubstate = {published}, tppubtype = {conference} } @inbook{ForMoSA-IGI-2012, title = {The ForMoSA Approach to Qualitative and Quantitative Model-Based Safety Analysis (in Railway Safety, Reliability and Security: Technologies and System Engineering)}, author = { Axel Habermaier and Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn}, editor = {Francesco Flammini}, url = {https://www.igi-global.com/viewtitlesample.aspx?id=66668&ptid=61619&t=The%20ForMoSA%20Approach%20to%20Qualitative%20and%20Quantitative%20Model-Based%20Safety%20Analysis}, year = {2012}, date = {2012-01-01}, publisher = {IGI Global}, abstract = {This chapter presents ForMoSA (FORmal MOdels and Safety Analysis), an integrated approach for the safety assessment of safety-critical embedded systems. The approach brings together the best of engineering practice, formal methods, and mathematics: traditional safety analysis, temporal logics and verification, as well as statistics and optimization. These three orthogonal techniques cover three different aspects of safety: fault tolerance, functional correctness, and quantitative analysis. The For-MoSA approach combines these techniques to assess system safety in a structured and formal way. Furthermore, the tight combination of methods from different analysis domains results in mutual bene-fits. The combined approach yields results which cannot be produced by any single technique on its own. The methodology was applied to several case studies from different industrial domains. One of them is an autonomous control of level crossings using radio-based communication, which is used in this chapter to describe the individual steps of the ForMoSA methodology.}, note = {to appear}, keywords = {}, pubstate = {published}, tppubtype = {inbook} } @article{Trojahn.2012, title = {Biometric Authentication Through a Virtual Keyboard for Smartphones}, author = { Matthias Trojahn and Frank Ortmeier}, url = {http://www.airccse.org/journal/jcsit/4512ijcsit01.pdf}, issn = {09753826}, year = {2012}, date = {2012-01-01}, journal = {International Journal of Computer Science & Information Technology (IJCSIT)}, volume = {4}, number = {5}, abstract = {Security through biometric keystroke authentication on mobile phones with a capacitive display and aQWERTZ-layout is a new approach. Keystroke on mobile phones with a 12-key layout has already shownthe possibility for authentication on these devices. But with hardware changes, new general requirementshave been arisen.In this paper, we focus on the authentication with keystroke dynamics. Therefore, we are presenting newimplemented keyboard layouts to show differences between a 12-key layout and a QWERTZ-layout. Inaddition, we compare a numerical (PIN) and alphabetic (password) input for mobile phones. For this, weadded new features for a keystroke authentication with a capacitive display. With the knowledge of the faultrates, we discuss the improvement of the security for keystroke dynamics with different virtual keyboardlayouts. Our results show, even with new hardware factors, that an authentication via keystroke dynamicsis possible.}, keywords = {authentication, biometrics, keystroke, smartphone, virtual keyboard}, pubstate = {published}, tppubtype = {article} } @mastersthesis{thesis-lipaczewski-2011, title = {Untersuchung von Prefetching-Strategien in Objekt-relationalen Mappern}, author = {Michael Lipaczewski}, url = {/cse/wp-content/uploads/2015/03/thesis-lipaczewski-2011.pdf}, year = {2011}, date = {2011-04-03}, keywords = {databases, mapper, object-oriented, prefetching}, pubstate = {published}, tppubtype = {mastersthesis} } @conference{IWDE2011, title = {Practical Experiences in Model-Based Safety Analysis}, author = { Frank Ortmeier and Michael Lipaczewski and Matthias G\"{u}demann}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/265976291_Practical_Experiences_in_Model-Based_Safety_Analysis/links/55ae256208ae98e661a4d26c.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {proceedings: International Workshop on Digital Engineering}, publisher = {ACM Proceedings}, abstract = {The formal modeling framework Safety Analysis and Modelling Language (SAML) allows for a combined specification of qualitative (non-deterministic) and quantitative (probabilistic) aspects. Using semantically founded model transformations, SAML models can be analyzed using current stateof- the art verification tools while guaranteeing that the analysis results of the different tools are computed on equivalent models. The significance of the results, in particular of computed quantitative occurrence probabilities heavily depends on the accuracy of the modeling. The most important factors are the temporal resolution of the model, the considered mission time for the analysis and if applicable also factors like spatial resolution. The overall results always have to be interpreted depending on the character of the system, the properties under consideration and choice of modeling parameters. In general, unfortunately, an increase in system accuracy is strongly correlated to a more complex, in particular resource intensive, analysis. In this paper we discuss qualitatively, how the dependencies between the relevenat model parameters of SAML models afflict the accuracy of the quantitative analysis results. We conduct some experiments with different parameters and survey the effects on the overall results. This work is triggered primarily with regard to optimization of such systems, where the analysis of many different system variants is necessary. In this use case, in early phases of optimization, a compromise between accuracy and running time can be used to explore many variants, identify the most promising ones and then analyze these with more accuracy.}, keywords = {model-based, safety, safety analysis, SAML}, pubstate = {published}, tppubtype = {conference} } @phdthesis{Diss-Guedemann-2011, title = {Qualitative and Quantitative Formal Model-Based Safety Analysis}, author = { Matthias G\"{u}demann}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/11/Guedemann-Dissertation.pdf http://nbn-resolving.de/urn:nbn:de:gbv:ma9:1-385}, year = {2011}, date = {2011-01-01}, keywords = {probabilistic, qualitative, quantitative, safety analysis}, pubstate = {published}, tppubtype = {phdthesis} } @conference{modelBasedAnalysisToolPRDC2011, title = {Tool Supported Model-Based Safety Analysis and Optimization}, author = { Matthias G\"{u}demann and Michael Lipaczewski and Frank Ortmeier}, url = {http://ieeexplore.ieee.org/abstract/document/6133100/}, year = {2011}, date = {2011-01-01}, booktitle = {Proceedings of the 17th IEEE Pacific Rim International Symposium on Dependable Computing (PRDC 2011)}, abstract = {Although model-based approaches can yield very precises safety analyses, they are rarely used in practice. The reason is, that most techniques are very difficult to apply and almost always require separate models and tools. In this paper we present an outline for the integration of different model-based safety analysis and safety optimization methods into a single tool framework. We present the envisioned workflow and some of the requirements for the tool integration. Because of its wide acceptance, platform independence and its well-documented API, we chose the Eclipse platform as framework foundation.}, keywords = {model-based, SAML, VECS}, pubstate = {published}, tppubtype = {conference} } @conference{ModelBasedDesignPRDC2011, title = {Towards Making Dependability Visual - Combining Model-Based {D}esign and Virtual Realities}, author = { Frank Ortmeier and Matthias G\"{u}demann and Michael Lipaczewski and Marco Schumann and Robert Eschbach}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/220700027_Towards_Making_Dependability_Visual_-_Combining_Model-Based_Design_and_Virtual_Realities/links/0c96052c949dedbb6b000000.pdf}, year = {2011}, date = {2011-01-01}, booktitle = {Proceedings of the 17th IEEE Pacific Rim International Symposium on Dependable Computing (PRDC 2011)}, abstract = {Dependability is often a very abstract concept. The reason is that dependability implications shall be very rare and are often not even wanted to happen during testing. In particular for software-intensive systems, it is very hard to find correct causal relationships/minimal cut sets. Modern modelbased approaches help here by computing for example minimal cut sets automatically. However, these methods always rely on a correct model of the environment. In addition, the results are often not traceable or understandable for humans. Therefore, we suggest combining model-based analysis for deriving safety properties with virtual realities for ensuring model validity and trace-ability of results.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{safecomp-saml-opt, title = {Model-Based Multi-Objective Safety Optimization}, author = {Matthias G\"{u}demann and Frank Ortmeier}, editor = {Francesco Flammini and Sandro Bologna and Valeria Vittorini}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Model-Based_Multi-Objective_Safety_Optimization.pdf http://www.safecomp2011.unina.it/}, isbn = {978-3642242694}, year = {2011}, date = {2011-01-01}, booktitle = {Proceedings of the 30th International Conference on Computer Safety, Reliability and Security (SAFECOMP 2011)}, volume = {6894}, publisher = {Springer}, series = {LNCS}, abstract = {It is well-known that in many safety critical applications safety goals are antagonistic to other design goals or even antagonistic to each other. This is a big challenge for the system designers who have to find the best compromises between different goals. In this paper, we show how model-based safety analysis can be combined with multi-objective optimization to balance a safety critical system wrt. different goals. In general the presented approach may be combined with almost any type of (quantitative) safety analysis technique. For additional goal functions, both analytic and black-box functions are possible, derivative information about the functions is not necessary. As an example, we use our quantitative model-based safety analysis in combination with analytical functions describing different other design goals. The result of the approach is a set of emphbest compromises of possible system variants. Technically, the approach relies on genetic algorithms for the optimization. To improve efficiency and scalability to complex systems, elaborate estimation models based on artificial neural networks are used which speed up convergence. The whole approach is illustrated and evaluated on a real world case study from the railroad domain.}, keywords = {model-based, optimization, safety}, pubstate = {published}, tppubtype = {conference} } @inproceedings{dcds2011, title = {Towards Model-Driven Safety Analysis}, author = { Matthias G\"{u}demann and Frank Ortmeier}, url = {https://www.researchgate.net/publication/229033693_Towards_model-driven_safety_analysis}, year = {2011}, date = {2011-01-01}, booktitle = {Proceedings of the 3rd international Workshop on Dependable Control of Discrete Systems (DCDS 11)}, publisher = {IEEE}, abstract = {Model-based safety analysis allows very high quality analysis of safety requirements. Both qualitative (i.e. what must go wrong for a system failure) and quantitative aspects (i.e. how probable is a system failure) are of great interest for safety analysis, . Traditionally, the analysis of these aspects requires separate, tool-dependent formal models. However, building adequate models for each analysis requires a lot of effort and expertise. Model-driven approaches support this by automating the generation of analysis models. SAML, is a tool-independent modeling framework that allows for the construction of models with both non-deterministic and probabilistic behavior. SAML models can automatically be transformed into different state of the art formal analysis tools -- while preserving the semantics -- to analyze different aspects of safety. As a consequence both -- qualitative and quantitative -- model-based safety analysis can be done without any additional generation of models and with transferable results. This approach makes SAML an ideal intermediate language for a model-driven safety analysis approach. Every higher-level language that can be transformed into SAML can be analyzed with all targeted formal analysis tools. New analysis tools can be added and the user benefits from every advancement of the analysis tools.}, note = {to appear 15.6.2011}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{FT10, title = {Metadaten-Modell f\"{u}r ein sicheres eingebettetes Datenmanagement}, author = { Jana Fruth and Jana Dittmann and Frank Ortmeier and Janet Feigenspan}, editor = {Patrick Horster}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/metamodell.pdf}, isbn = {978-3-00-031441-4}, year = {2010}, date = {2010-09-01}, booktitle = {Proceedings DACH Security}, abstract = {In diesem Artikel wird das grobe Konzept eines ganzheitlichen Metadatenmodells f\"{u}r die sichere Datenhaltung eingebetteter Systeme vorgestellt, das anhand eines Logistikszenarios, dem Entladen eines Flugzeugs auf einem Flugfeld, veranschaulicht wird. Das Modell ist speziell an die Eigenschaften komplexer, heterogener Systeme angepasst, da die Daten nicht separat, sondern in Zusammenhang mit den spezifischen Komponenteneigenschaften (z.B. Art des Designs) betrachtet werden. So kann das Modell z.B. robust gegen\"{u}ber dynamischen Prozessen sein, die das Sicherheitsniveau komplexer Systeme beeinflussen k\"{o}nnen. Es kann u.a. auch der Absch\"{a}tzung von aktuellen und zuk\"{u}nftigen Gef\"{a}hrdungen (Security und Safety), der Definition von Schutzma\ssnahmen dienen und Grundlage f ̈ur die Sicherheits- sensibilisierung sein. Das Metadatenmodell basiert auf einem Komponentenmodell nach BSI-Standard 100-2 (Grundschutz) und wird durch die Abbildung von Personen, die mit der Technik interagieren und potentiellen Umfeldeinfl\"{u}ssen (z.B. Wetter) erweitert. Weiterhin werden auch verschiedene Datenklassen, die auf Komponenten gespeichert und zwischen diesen kommuniziert werden, Sicherheitsanforderungen (Safety, Security) an die Daten und umzusetzende Sicherheitsmechanismen zum Schutz der Daten betrachtet. Das Modell kann u.a. als Grundlage dienen, Funktionsnetze und Informationsfl\"{u}ssen zwischen Komponenten abzubilden und (globale) Schutzprofile f\"{u}r bestimmte Anwendungsbereiche, wie z.B. Logistik, zu entwickeln. Es wurde prototypisch mit SysML mit dem Werkzeug Topcased auf Grundlage von [GKO + 10] erstellt, das speziell f\"{u}r die Modellierung komplexer sicherheitskritischer Systeme entwickelt wurde.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{SAML-HASE2010, title = {A Framework for Qualitative and Quantitative Model-Based Safety Analysis}, author = { Matthias G\"{u}demann and Frank Ortmeier}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2015/11/HASE20101.pdf}, year = {2010}, date = {2010-01-01}, booktitle = {Proceedings of the 12th High Assurance System Engineering Symposium (HASE 2010)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{probabilistic-qapl2010, title = {Probabilistic Model-Based Safety Analysis}, author = { Matthias G\"{u}demann and Frank Ortmeier}, url = {https://arxiv.org/pdf/1006.5101v1.pdf}, year = {2010}, date = {2010-01-01}, booktitle = {Proceedings of the 8th Workshop on Quantitative Aspects of Programming Languages (QAPL 2010)}, abstract = {Model-based safety analysis approaches aim at finding critical failure combinations by analysis of models of the whole system (i.e. software, hardware, failure modes and environment). The advantage of these methods compared to traditional approaches is that the analysis of the whole system gives more precise results. Only few model-based approaches have been applied to answer quantitative questions in safety analysis, often limited to analysis of specific failure propagation models, limited types of failure modes or without system dynamics and behavior, as direct quantitative analysis is uses large amounts of computing resources. New achievements in the domain of (probabilistic) model-checking now allow for overcoming this problem. This paper shows how functional models based on synchronous parallel semantics, which can be used for system design, implementation and qualitative safety analysis, can be directly re-used for (model-based) quantitative safety analysis. Accurate modeling of different types of probabilistic failure occurrence is shown as well as accurate interpretation of the results of the analysis. This allows for reliable and expressive assessment of the safety of a system in early design stages.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{OG+10, title = {ProMoSA - Probabilistic Models for Safety Analysis}, author = { Frank Ortmeier and Matthias G\"{u}demann}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/ProMoSa.pdf}, year = {2010}, date = {2010-01-01}, booktitle = {Proceedings of 6th Dagstuhl-Workshop MBEES 2010: Model-Based Development of Embedded Systems}, abstract = {Das Ziel des ProMoSA Vorhabens st die Entwicklung eines durchgehend modell-basierten Ansatzes zur Sicherheitsanalyse. Dabei sollen sowohl quantitative als auch qualitative Aspekte aus einem gemeinsamen Modell abgeleitet werden. Zusaetzlich soll es moeglich sein aus einer Menge von Modellen optimale Kandidaten automatisch zu bestimmen. Die Modellerstellung soll weitgehend durch semantisch-fundierte Modelltransformationen unterstuetzt werden.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{sicherheit2010, title = {Quantitative Model-Based Safety Analysis: A Case Study}, author = { Matthias G\"{u}demann and Frank Ortmeier}, url = {https://www.researchgate.net/profile/Jan_Goebel2/publication/221307190_Amun_Automatic_Capturing_of_Malicious_Software/links/0f3175397267218325000000.pdf#page=54}, year = {2010}, date = {2010-01-01}, booktitle = {Proceedings of 5th conference for Sicherheit, Schutz und Zuverlaessigkeit (SICHERHEIT 10)}, publisher = {Lecture Notes in Informatics (LNI)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{GKO+10, title = {SysML in Digital Engineering}, author = { Matthias G\"{u}demann and Stefan Kegel and Frank Ortmeier and Olaf Poenicke and Klaus Richter}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/SysML_in_Digital_Engineering.pdf}, year = {2010}, date = {2010-01-01}, booktitle = {Proceedings of 1st International Workshop on Digital Engineering}, abstract = {Today, most projects are interdisciplinary. They require expertise from various domains like legal regulations, mechanical restrictions and software engineering. Digital engineering is a relatively new discipline, which aims at minimizing friction losses, when different disciplines meet each other. This paper shows how model-based development on the basis of SysML can be used in digital engineering to develop a common system model. Such a common system model helps to track requirements, provides precise specification of (sub-)components together with necessary interfaces, intended interactions and information flow among them. This information supports the overall development process as it is a consistent specification of the whole system. Parts of this model can then be used as input for disciplines dealing with specific subproblems. It is also providing means to test the interaction of different components and can even be used for formal proofs of correctness using semantically well-founded models and automatic proof techniques. The paper is centered around a real-world case study: a logistic monitoring system at an airport cargo hub. This case study can be seen as a benchmarking scenario for research questions as well as an illustrative example for using SysML in digital engineering.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{nafz2009gsf, title = {A Generic Software Framework for Role-Based Organic Computing Systems}, author = { Florian Nafz and Frank Ortmeier and Hella Seebach and Jan-Philipp Stegh\"{o}fer and Wolfgang Reif}, url = {https://pdfs.semanticscholar.org/20af/53cd78dabb3d7d131a8995daf4a843b73753.pdf}, year = {2009}, date = {2009-01-01}, booktitle = {SEAMS 2009: ICSE 2009 Workshop Software Engineering for Adaptive and Self-Managing Systems}, abstract = {An Organic Computing system has the ability to autonomously (re-)organize and adapt itself. Such a system exhibits so called self-x properties (e.g. self-healing) and is therefore more dependable as e.g. some failures can be compensated. Furthermore, it is easier to maintain as it automatically configures itself and more convenient to use because of its automatic adaptation to new situations. Design and construction of Organic Computing systems are, however, challenging tasks. The Organic Design Pattern (ODP) is a design guideline to aid engineers in these tasks. This paper introduces a generic software framework that allows for easy implementation of ODP-based Organic Computing Systems. The communication and service infrastructure of the multi-agent system Jadex is leveraged to provide interaction facilities and services to the application. The concepts of ODP are provided as generic, extensible elements that can be augmented with domain-specific behavior. The dynamic behavior of an ODP system is implemented and a generic observer/controller facility is provided. A real-world case study shows the applicability of the proposed approach and the handling of the software.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{nafz2009kodkod, title = {A Universal Self-Organization Mechanism for Role-Based Organic Computing Systems}, author = { Florian Nafz and Frank Ortmeier and Hella Seebach and Jan-Philipp Stegh\"{o}fer and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/ATC_Paper.pdf}, year = {2009}, date = {2009-01-01}, booktitle = {Proceedings of the 6th International Conference on Autonomic and Trusted Computing (ATC-09)}, abstract = {An Organic Computing system has the ability to autonomously (re-)organize and adapt itself. Such a system exhibits so called self-x properties (e.g. self-healing) and is therefore more dependable as e.g. some failures can be compensated. Furthermore, it is easier to maintain as it automatically configures itself and more convenient to use because of its automatic adaptation to new situations. On the other hand, design and construction of Organic Computing systems is a challenging task. The Organic Design Pattern (ODP) is a design guideline to aid engineers in this task. This paper describes a universal reconfiguration mechanism for role-based Organic Computing systems. If a system is modeled in accordance with the ODP guideline, reconfiguration can be implemented generically on the basis of an of-the-shelf constraint solver. The paper shows how Kodkod can be used for this and illustrates the approach on an example from production automation.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{SSVAbstractSpec09, title = {An Abstract Specification Language for Static Program Analysis}, author = { Michael Vistein and Frank Ortmeier and Wolfgang Reif and Ralf Huuck and Ansgar Fehnker}, url = {http://www.sciencedirect.com/science/journal/15710661}, year = {2009}, date = {2009-01-01}, booktitle = {Proceedings of 4th International Workshop on System Software Verification (SSV'09)}, abstract = {Static program analysers typically come with a set of hard-coded checks, leaving little room for the user to add additional properties. In this work, we present a uniform approach to enable the specification of new static analysis checks in a concise manner. In particular, we present our GPSL/GXSL language to define new control flow checks. The language is closely related to CTL specifications that are combined with XPath queries. Moreover, we provide a number of specifications as implemented in our tool Goanna, and report on our experiences of adding new checks.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{IROShidingRT09, title = {Hiding Real-Time: A new Approach for the Software Development of Industrial Robots}, author = { Alwin Hoffmann and Andreas Angerer and Frank Ortmeier and Michael Vistein and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/hiding-realtime.pdf}, year = {2009}, date = {2009-01-01}, booktitle = {Proceedings of IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, abstract = {The application of industrial robots is strongly limited by the use of old-style robot programming languages. Due to these languages, the development of robotic software is a complex and expensive task requiring technical expertise and time. Hence, the use of industrial robots is often not a question of technical feasibility but of economic efficiency. This paper introduces a new architectural approach making available modern concepts of software engineering for industrial robots. The core idea is to hide the real-time critical robot control from application developers. Instead, common functionality is provided by a generic and extensible application programming interface and can be easily used. Hence, this approach can lead to an industrialization of software development for industrial robotics.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{ICALobjectCentric09, title = {Object-Centric Programming: A New Modeling Paradigm for Robotic Applications}, author = { Andreas Angerer and Alwin Hoffmann and Frank Ortmeier and Michael Vistein and Wolfgang Reif}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/224595825_Object-Centric_Programming_A_New_Modeling_Paradigm_for_Robotic_Applications/links/0deec51936122da045000000.pdf}, year = {2009}, date = {2009-01-01}, booktitle = {Proceedings of IEEE International Conference on Automation and Logistics}, abstract = {During the last 15 years the way how software products are being developed has changed dramatically. Today software is developed very efficiently in an industrialized manner. One of the cornerstones is that new development processes introduced a domain-centric view rather than a technology-centric view. As a result, big and complex software systems can be built very fast, reliable and according to customers' requirements. Unfortunately, these advances in software engineering have had little to no effect on the software development process for robotic applications. This paper explains how domain-centric design can be introduced in the domain of industrial robotics and which possible benefits it might yield.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{CandARobotikRedmond09, title = {Robotik aus Redmond - eine neue \"{A}ra?}, author = { Andreas Angerer and Alwin Hoffmann and Frank Ortmeier and Michael Vistein}, url = {http://www.computer-automation.de/nachrichten/steuerungsebene/bedienen-beobachten/article/das_potential_des_microsoft_robotics_developer_studio/12184/1639952e-237b-11de-ab89-001ec9efd5b0}, year = {2009}, date = {2009-01-01}, booktitle = {Computer & Automation 04/2009}, publisher = {WEKA FACHMEDIEN GmbH}, abstract = {Mit dem "Robotics Developer Studio" adressiert Microsoft erstmals die Dom"ane klassischer Maschinenbauer. Serviceorientierung, grafische Programmierung und visuelle Simulation soll ihnen neue M"oglichkeiten f"ur die Erstellung von Robotik-Applikationen er"offnen.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{RIA-SASO08, title = {A Specification and Construction Paradigm for Organic Computing Systems}, author = { Matthias G\"{u}demann and Florian Nafz and Frank Ortmeier and Hella Seebach and Wolfgang Reif}, editor = {Sven Br\"{u}ckner and Paul Robertson and Umesh Bellur}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/A_Specification_and_Construction_Paradigm_for_Organic_Computing_Systems.pdf http://ieeexplore.ieee.org/document/4663427/}, year = {2008}, date = {2008-01-01}, booktitle = {Proceedings of the 2nd IEEE International Conference on Self-Adaptive and Self-Organizing Systems}, pages = {233-242}, publisher = {IEEE Computer Society Press (2008)}, abstract = {Organic Computing systems are systems which have the capability to autonomously (re-)organize and adapt themselves. The benefit of such systems with self-x properties is that they are more dependable, as they can compensate for some failures. They are easier to maintain, because they can automatically configure themselves and are more convenient to use because of automatic adaptation to new situations. While Organic Computing systems have a lot of desired properties, there still exists only little knowledge on how they can be designed and built. In this paper an approach for specification and construction of a certain class of Organic Computing systems is presented, called the Restore Invariant Approach (RIA). The core idea is that the behaviour of an Organic Computing system can be split into productive phases and self-x phases where adaptation is necessary. This allows for a generic description of how organic'' aspects can be specified and implemented. The approach will be illustrated by applying it to a design methodology for Organic Computing systems and further refining it to an explicit case study in the domain of production automation.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{ordered-dcca-2008, title = {Computing Ordered Minimal Critical Sets}, author = { Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif}, editor = {E. Schnieder and G. Tarnai}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/200505959_Computing_Ordered_Minimal_Critical_Sets/links/02e7e5224add1b851b000000.pdf}, year = {2008}, date = {2008-01-01}, booktitle = {Proceedings of the 7th Symposium on Formal Methods for Automation and Safety in Railway and Automotive Systems (FORMS/FORMAT 08)}, abstract = {Fault Tree Analysis is widely used in industry for safety analysis purposes. Correct manual construction of fault trees is difficult, therefore automatic techniques like DCCA have been developed. Nevertheless the resulting minimal critical sets do not contain any information about possible temporal ordering of the failures. On the other hand, in safety critical application, systems are often equipped with backup facilities. This means that a failure in the main system has no longer consequences after the backup system has started to work. In this situation an analysis that does not incorporate temporal ordering of failures would be too pessimistic. In these cases information about temporal ordering of failures like temporal fault tree gates can greatly enhance the accuracy of the results of a safety analysis. We present a method to automatically deduce temporal ordering information for failures directly from system models and critical failure combinations resulting from FTA or DCCA, based on temporal logic and model checking.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @incollection{hni-safety-in-mechatronical-system08, title = {Developing Safety-critical Mechatronical Systems}, author = {Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif}, editor = {J\"{u}rgen Gausemeier and Franz Rammig and Wilhelm Sch\"{a}fer}, url = {https://www.researchgate.net/profile/Frank_Ortmeier/publication/200505965_Developing_Safety-critical_Mechatronical_Systems/links/00b7d5224a7f3677a0000000/Developing-Safety-critical-Mechatronical-Systems.pdf}, year = {2008}, date = {2008-01-01}, booktitle = {Self-optimizing Mechatronic Systems: Design the Future}, volume = {223}, publisher = {HNI-Verlagsschriftenreihe}, abstract = {Developing high-assurance systems is always a challenging task. This is in par-ticular true for safety-critical mechatronical systems. For these systems it is not only necessary to develop efficient software, which must often run on processors with limited resources but also to take carefully into account what environment is to be controlled and how this environment can be monitored. Esterel Technologies SCADE Suite is a state-of-the-art development tool for safety-critical software. It is widely used in avionics and space applications. In this paper we show, how a model driven approach for software development can be used for mechatronical systems and what benefits can be achieved compared to traditional development processes. We illustrate the process on a real world case study: the height control system of the Elbe-Tunnel in Hamburg.}, keywords = {}, pubstate = {published}, tppubtype = {incollection} } @techreport{Ortmeier2008EmbeddingCTLin, title = {Embedding CTL* in an Extension to Interval Temporal Logic (ITL)}, author = { Frank Ortmeier and Michael Balser and Andriy Dunets and Simon B\"{a}umler}, year = {2008}, date = {2008-01-01}, number = {2008-16}, institution = {Univerity of Augsburg}, abstract = {In this paper we present an embedding of the most common branching time logics (CTL/CTL*) in an extension of interval temporal logic (ITL+). The significance of this result is threefold: first the theoretical aspect is, that branching time and linear time are not so much different. A more practical aspect is that the intuitive interactive proof method of symbolic execution of ITL+ can be used for branching time logics as well. The opposite direction is interesting as well, for a subset of finite state systems, interactive verification of ITL+ formulas can be translated into a model checking problem. The proof presented in this paper has been done with the interactive theorem prover KIV. So this contribution can also be seen as a case study on reasoning about temporal logics in an interactive verification environment.}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @conference{oc-enase07, title = {Implementing Organic Systems with AgentService}, author = { Florian Nafz and Frank Ortmeier and Hella Seebach and Jan-Philipp Stegh\"{o}fer and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Implementing_Organic_Systems_with_AgentService.pdf}, year = {2008}, date = {2008-01-01}, booktitle = {Evaluation of Novel Approaches to Software Engineering ENASE 2008}, publisher = {Springer}, abstract = {Designing and implementing Organic Computing systems is typically a difficult task. To facilitate the construction a design pattern for Organic Computing systems has been developed. This organic design pattern (ODP) helps in modeling and designing a broad class of Organic Computing systems. This paper focuses on the implementation of Organic Computing systems with the help of this pattern. The core idea is to provide a generic implementation by mapping ODP artifacts to artifacts of a multi-agent framework. The used framework “AgentService“ is one of the few C# multi-agent frameworks. In this paper a possible implementation as well as benefits and limitations are described.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{DBLP:conf/biostec/NafzOSR08, title = {Organic Computing for Health Care Systems - Possible Benefits and Challenges}, author = { Florian Nafz and Frank Ortmeier and Hella Seebach and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Organic_Computing_for_Health_Care_Systems.pdf}, year = {2008}, date = {2008-01-01}, booktitle = {HEALTHINF (2)}, pages = {286-290}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{SDIRPrototyping08, title = {Prototyping Plant Control Software with Microsoft Robotics Studio}, author = { Alwin Hoffmann and Florian Nafz and Frank Ortmeier and Andreas Schierl and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Prototyping_Plant_Control_Software_MRS.pdf}, year = {2008}, date = {2008-01-01}, booktitle = {Proceedings of the 3rd International Workshop on "Software Development and Integration in Robotics" (SDIR-III), IEEE International Conference on Robotics and Automation}, abstract = {The development of industrial robotics applications is a complex and often a very expensive task. One of the core problems is that a lot of implementation and adaptation effort can only be done after the robotic hardware has been installed. This paper shows how Microsoft Robotics Studio (MSRS) can facilitate the fast prototyping of novel industrial applications and thus lower the overall development costs. Microsoft Robotics Studio is a development tool for creating software for robotic applications. It includes an asynchronous, service-oriented runtime and a realistic physics-based simulation environment. This allows for testing and improving software prototypes before any hardware is installed. As an example control software for a vision of tomorrow^a€˜s production automation systems has been implemented and evaluated in the simulation environment of MSRS.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{ForumMechaSimulation08, title = {Simulations-basierte Programmierung von Industrierobotern}, author = { Frank Ortmeier and Alwin Hoffmann and Wolfgang Reif and Ulrich Huggenberger and Thomas Stumpfegger}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/IFM-2008-Final.doc}, year = {2008}, date = {2008-01-01}, booktitle = {Proceedings of Internationales Forum Mechatronik}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{oc-odp-cec07, title = {Design and Construction of Organic Computing Systems}, author = { Hella Seebach and Frank Ortmeier and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Design_and_Construction_of_Organic_Computing_Systems.pdf}, year = {2007}, date = {2007-01-01}, booktitle = {Proceedings of the IEEE Congress on Evolutionary Computation 2007}, publisher = {IEEE Computer Society Press}, abstract = {The next generation of embedded computing systems will have to meet new challenges. The systems are expected to act mainly autonomously, to dynamically adapt to changing environments and to interact with one another if necessary. Such systems are called organic. Organic Computing systems are similar to Autonomic Computing systems. They behave life-like and adapt to changes in their environment. In addition Organic Computing systems are often inspired by nature/biological phenomena. Design and construction of such systems brings new challenges for the software engineering process. In this paper we present a framework for design, construction and analysis of Organic Computing systems. It can facilitate design and construction as well as it can be used to (semi-)formally define organic properties like self-configuration or self-adaptation. We illustrate the framework on a real-world case study from production automation.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{formal-failure-models-dcds07, title = {Formal Failure Models}, author = { Frank Ortmeier and Matthias G\"{u}demann and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/failure-modeling.pdf}, year = {2007}, date = {2007-01-01}, booktitle = {Proceedings of the 1st IFAC Workshop on Dependable Control of Discrete Systems (DCDS 07)}, publisher = {Elsevier}, abstract = {Formal safety analysis methods have gained a lot of importance during the last years. All these methods have in common, that they rely on a formal model of a system which describes desired, functional behavior as well as unwanted erroneous behavior correctly. Most of the time the formal models are created in an ad hoc manner. This is very error prone and therefore compromises the benefit of the following formal analysis. In this paper we present a systematic approach to formally model failure modes. The approach can be combined with most formal safety analysis. We apply the method to a real world case study: a radio-based railroad crossing. We illustrate the results by applying a formal safety analysis method on the model.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{oc_scade_iscas07, title = {Modeling of Self-Adaptive Systems with SCADE}, author = { Matthias G\"{u}demann and Andreas Angerer and Frank Ortmeier and Wolfgang Reif}, url = {https://www.researchgate.net/publication/224714868_Modeling_of_self-adaptive_systems_with_SCADE}, year = {2007}, date = {2007-01-01}, abstract = {An important property of embedded systems is dependability. Today this addresses mostly safety and reliability. Guaranteeing these properties is normally done by adding redundancy to the system. This approach is expensive and can not cope with changing environments. Therefore new designs are researched, which allow systems to self-adapt and self-heal. For broad acceptance in industry it is important, that organic systems can be modeled and analyzed with standard modeling tools and languages. We present a case study of an adaptive production automation cell modelled in the Lustre language using the SCADE Suite and the verification of functional properties. SCADE is used widely in industry, especially in safety critical applications. Being able to model and verify adaptive systems in SCADE could increase their acceptance for these target areas.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @inproceedings{DCCA-SCADE-SafeComp-07, title = {Using Deductive Cause Consequence Analysis (DCCA) with SCADE}, author = {Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif}, url = {https://pdfs.semanticscholar.org/3862/b1fefcfbd287635bfcdf69704dd6667a0480.pdf}, year = {2007}, date = {2007-01-01}, booktitle = {Proceedings of SAFECOMP 2007}, publisher = {Springer LNCS 4680}, abstract = {Esterel Technologies' SCADE Suite is one of the most important development tools for software for safety-critical systems. It is used for designing many critical components of aerospace, automotive and transportation applications. For such systems safety analysis is a key requirement in the development process. In this paper we show how one formal safety analysis method -- Deductive Cause-Consequence Analysis (DCCA) -- can be integrated in the SCADE framework. This method allows for performing safety analysis mainly automatically. It uses SCADE's semantical model and SCADE's built in verification engine emphDesign Verifier. So the whole analysis can be done within one tool. This is of big importance, as a key feature for the acceptance of formal methods in broad engineering practice is, that they can be applied in an industrial development suite.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{selfxGOR2006full, title = {Formal Modeling and Verification of Systems with Self-x Properties}, author = { Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif}, editor = {Laurence T. Yang and Hai Jin and Jianhua Ma and Theo Ungerer}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Formal_Modeling_and_Verification_of_Systems_with_Self-x_Properties.pdf}, isbn = {978-3-540-38619-3}, year = {2006}, date = {2006-09-01}, booktitle = {Proceedings of the 3rd International Conference on Autonomic and Trusted Computing (ATC-06)}, volume = {4158}, pages = {38--47}, publisher = {Springer}, address = {Berlin/Heidelberg}, series = {Lecture Notes in Computer Science}, abstract = {In this paper we present a case study in formal modeling and verification of systems with self-x properties. The example is a flexible robot production cell reacting to system failures and changing goals. The self-x mechanisms make the system more flexible and robust but endanger its functional correctness or other quality guarantees. We show how to verify such adaptive systems with a restore-invariant'' approach.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{GuNaReSe:2006:SafeSecureApplications, title = {Towards Safe and Secure Organic Computing Applications}, author = { Matthias G\"{u}demann and Florian Nafz and Wolfgang Reif and Hella Seebach}, editor = {Christian Hochberger and R\"{u}diger Liskowsky}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/GI-Workshop-OC-paperID_10.pdf}, isbn = {978-3-88579-187-4}, year = {2006}, date = {2006-09-01}, booktitle = {INFORMATIK 2006 -- Informatik f"ur Menschen}, volume = {P-93}, pages = {153--160}, publisher = {K}, address = {Bonn, Germany}, series = {GI-Edition -- Lecture Notes in Informatics}, abstract = {In this paper we present our ongoing work on organic computing''. We present an illustrative case study from program automation that uses OC-paradigms to be failure tolerant and to produce effectively. We present a way to build and verify a formal model of a self-adaptive system. We also give further ideas for formal modeling and our ideas of safety analysis of such systems. Another topic is how to build descriptive models and devising development processes for them.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{FTAFFB06, title = {Formal Fault Tree Analysis - Practical Experiences}, author = { Frank Ortmeier and Gerhard Schellhorn}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Formal_Fault_Tree_Analysis-Practical_Experiences.pdf}, year = {2006}, date = {2006-01-01}, booktitle = {Proceedings of 6th International Workshop On Automated Verification of Critical Systems (AVoCS 06)}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{ISoLA06, title = {Safety and Dependability Analysis of Self-Adaptive Systems}, author = { Matthias G\"{u}demann and Frank Ortmeier and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Safety_and_Dependability_Analysis_of_Self-Adaptive_Systems.pdf}, year = {2006}, date = {2006-01-01}, booktitle = {Proceedings of the 2nd International Symposium on Leveraging Applications of Formal Methods, Verification and Validation (ISoLA 06)}, publisher = {IEEE CS Press}, abstract = {In this paper we present a technique for safety analysis of self-adaptive systems with formal methods. Self-adaptive systems are characterized by the ability to dynamically (self-)adapt and reorganize. The aim of this approach is to make the systems more dependable. But in general it is unclear how big the benefit is - compared to a traditional design. We propose a dependability analysis based on the results of safety analysis to measure the quality of self-x capabilities of an adaptive system with formal methods. This is important for unbiased and evidence-based decision making in early design phases. To illustrate the results we show the application of the method to a case study from the domain of production automation.}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @techreport{oc_workshop06, title = {Applications and Architectures in Organic Comupting}, author = { Matthias G\"{u}demann and Florian Nafz and Andreas Pietzowski and Wolfgang Reif and Benjamin Satzger and Hella Seebach and Theo Ungerer}, year = {2006}, date = {2006-01-01}, journal = {DFG SPP 1183 "Organic Computing''}, number = {21}, abstract = {This technical report documents a summary of the SPP-OC Workshop \"Architectures and Applications\" at the University of Augsburg. Architectures and applications of organic computing systems are two of the profile topics of the priority program \"Organic Computing\" of the German Research Foundation (DFG SPP 1183). The report contains the discussion results to the several questions discussed on the mentioned workshop.}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @techreport{oc_casestudies06, title = {Applications in Organic Computing}, author = { Florian Nafz and Matthias G\"{u}demann and Wolfgang Reif and Hella Seebach}, year = {2006}, date = {2006-01-01}, journal = {DFG SPP 1183 Organic Computing}, number = {22}, abstract = {This technical report provides a summary of the applications and case studies used in the priority program \"Organic Computing\" of the German Research Foundation (DFG SPP 1183). All projects were asked to fill out a consistent form with several points to the respective application. These are presented in this report.}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @book{ortmeier-diss05, title = {Formale Sicherheitsanalyse}, author = { Frank Ortmeier}, editor = {Frank Ortmeier}, url = {http://www.logos-verlag.de/cgi-bin/engbuchmid?isbn=1277&lng=eng&id=}, year = {2006}, date = {2006-01-01}, publisher = {Logos Verlag Berlin}, key = {ISBN 3-8325-1277-2}, keywords = {}, pubstate = {published}, tppubtype = {book} } @article{entcs06, title = {The User Interface of the KIV Verification System - A System Description}, author = {Dominik Haneberg and Simon B\"{a}umler and Michael Balser and Holger Grandy and Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn and Jonathan Schmitt and Kurt Stenzel}, url = {https://www.researchgate.net/publication/200505968_The_User_Interface_of_the_KIV_Verification_System_---_A_System_Description}, year = {2006}, date = {2006-01-01}, journal = {Electronic Notes in Theoretical Computer Science UITP special issue}, abstract = {This article describes the sophisticated graphical user interface (GUI) of the KIV verification system. KIV is a verification system that works on structured algebraic specifications. The KIV GUI provides means for developing and editing structured algebraic specifications and for developing proofs of theorems. The complete development process is performed through the GUI. For editing the specification files XEmacs is used, and for the management of the structured algebraic specifications we use daVinci, an extendable graph drawing tool which has been integrated into the KIV user interface. As proving is the most time-consuming part of formal verification, the most important part of the KIV GUI is our user interface for proof development. The proof is represented as a tree and can be manipulated through context menus. The main proof work is done in a proof window where the sequent of the current goal, the applicable rules and the main menu are displayed. Which rules are applicable depends on the current goal. KIV also supports the context-sensitive application of proof rules.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @conference{IFAC05, title = {Deductive Cause-Consequence Analysis (DCCA)}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/DCCA.pdf}, isbn = {978-3-902661-75-3}, year = {2005}, date = {2005-01-01}, booktitle = {Proceedings of the 16th IFAC World Congress}, publisher = {Elsevier}, key = {ISBN: 978-0-08-045108-4 and 0-08-045108-X}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{EDCC05, title = {Formal Safety Analysis of a Radio-Based Railroad Crossing using Deductive Cause-Consequence Analysis (DCCA)}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.147.2193&rep=rep1&type=pdf}, year = {2005}, date = {2005-01-01}, booktitle = {Proceedings of 5th European Dependable Computing Conference (EDCC 05)}, volume = {3463}, publisher = {Springer}, series = {LNCS}, abstract = {In this paper we present the formal safety analysis of a radio-based railroad crossing. We use deductive cause-consequence analysis (DCCA) as analysis method. DCCA is a novel technique to analyze safety of embedded systems with formal methods. It substitutes error-prone informal reasoning by mathematical proofs. DCCA allows to rigorously prove whether a failure on component level is the cause for system failure or not. DCCA generalizes the two most common safety analysis techniques: failure modes and effects analysis (FMEA) and fault tree analysis (FTA). We apply the method to a real world case study: a radio-based railroad crossing. We illustrate the results of DCCA for this example and compare them to results of other formal safety analysis methods like formal FTA.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @conference{fehlermodellierung05, title = {Formal Safety Analysis of Transportation Control Systems}, author = { Frank Ortmeier and Wolfgang Reif}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.147.3208&rep=rep1&type=pdf}, year = {2005}, date = {2005-01-01}, booktitle = {Workshop at SEFM 2005}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @article{elbtunneldeutsch04, title = {Formale Sicherheitsanalyse - Eine Anwendungsfallstudie: Das H\"{o}henkontrollsystem des Elbtunnels in Hamburg}, author = { Frank Ortmeier and Gerhard Schellhorn and Andreas Thums and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/atp.doc https://www.researchgate.net/publication/200505988_Formale_Sicherheitsanalyse_-_Eine_Anwendungsfallstudie_Das_Hohenkontrollsystem_des_Elbtunnels_in_Hamburg}, year = {2005}, date = {2005-01-01}, journal = {atp - Automatisierungstechnische Praxis}, volume = {2}, pages = {52-59}, keywords = {}, pubstate = {published}, tppubtype = {article} } @conference{UITP05, title = {The User Interface of the KIV Verification System --- A System Description}, author = { Dominik Haneberg and Simon B\"{a}umler and Michael Balser and Holger Grandy and Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn and Jonathan Schmitt and Kurt Stenzel}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/uitp05.pdf}, year = {2005}, date = {2005-01-01}, booktitle = {Proceedings of UITP 05}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @incollection{formosa04-approach, title = {Combining Formal Methods and Safety Analysis -- The ForMoSA Approach}, author = { Frank Ortmeier and Andreas Thums and Gerhard Schellhorn and Wolfgang Reif}, url = {http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.93.9263&rep=rep1&type=pdf}, year = {2004}, date = {2004-01-01}, booktitle = {Integration of Software Specification Techniques for Applications in Engineering (INT 04)}, publisher = {Springer LNCS 3147}, abstract = {In the ForMoSA project [18] an integrated approach for safety analysis of critical, embedded systems has been developed. The approach brings together the best of engineering practice, formal methods and mathematics: traditional safety analysis, temporal logics and verification, and statistics and optimization. These three orthogonal techniques cover three different aspects of safety: fault tolerance, functional correctness and quantitative analysis. The ForMoSA approach combines these techniques to answer the safety relevant questions in a structured and formal way. Furthermore, the tight combination of methods from different analysis domains yields results which can not be produced by any single technique. The methodology was applied in form of case studies to different industrial domains. One of them is the height control of the Elbtunnel in Hamburg [17] from the domain of electronic traffic control, which we present as an illustrating example.}, keywords = {}, pubstate = {published}, tppubtype = {incollection} } @techreport{ortmeier04:fs-spec, title = {Failure-Sensitive Specification: A Formal Method for Finding Failure Modes}, author = { Frank Ortmeier and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Failure-Sensitive_Specification_A_Formal_Method_for_Finding_Failure_Modes.pdf}, year = {2004}, date = {2004-01-01}, number = {3}, institution = {Institut f}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @conference{wcc04-toulouse-long, title = {Integrated Formal Methods for safety analysis of train systems}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn and Andreas Thums}, editor = {Ren\'{e} Jacquart}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Integrated_Formal_Methods_for_safety_analysis_of_train_systems.pdf}, isbn = {1-4020-8156-1}, year = {2004}, date = {2004-01-01}, publisher = {Kluwer Academic Press}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @inproceedings{formosa04-chartverif, title = {Interactive Verification of Statecharts}, author = { Andreas Thums and Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn}, editor = {Hartmut Ehrig}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Interactive_Verification_of_Statecharts.pdf https://link.springer.com/chapter/10.1007/978-3-540-27863-4_20}, year = {2004}, date = {2004-01-01}, booktitle = {Integration of Software Specification Techniques for Applications in Engineering}, pages = {355 -- 373}, publisher = {Springer LNCS 3147}, abstract = {In this paper, we present an approach to the interactive verification of statecharts. We use STATEMATE statecharts for the formal specification of safety critical systems and Interval Temporal Logic to formalize the proof conditions. To handle infinite data, complex functions and predicates, we use algebraic specifications. Our verification approach is a basis for the aim of the project ForMoSA to put safety analysis techniques on formal grounds. As part of this approach, fault tree analysis (FTA) has been formalized yielding conditions that can be verified using the calculus defined in this paper. Verification conditions resulting from the formal FTA of the radio-based level crossing control have been successfully verified.}, keywords = {}, pubstate = {published}, tppubtype = {inproceedings} } @incollection{formosa04-intro, title = {Introduction to Subject Area "Verification"}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn}, url = {https://www.isse.uni-augsburg.de/publications/reif/2004-verification_intro/2004-verification-intro-pdf.pdf}, year = {2004}, date = {2004-01-01}, booktitle = {Integration of Software Specification Techniques for Applications in Engineering}, publisher = {Springer LNCS 3147}, abstract = {Over the last two decades the use of software in technical applications has dramatically increased. Almost all real-world systems are now embedded systems consisting of hardware and software components. Just think of modern automobiles; every new car comes equipped with computers that have many tasks in almost all parts of the car: fuel injection rates of the engine, airbags, anti-blocking systems (ABS) for brakes or the anti-theft device are some examples.}, keywords = {}, pubstate = {published}, tppubtype = {incollection} } @conference{safety-opti-ffb04, title = {Safety Optimization of a radio-based railroad crossing}, author = { Frank Ortmeier and Gerhard Schellhorn and Wolfgang Reif}, editor = {E. Schnieder and G. Tarnai}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Safety_Optimization_forms.pdf}, isbn = {3-9803363-8-7}, year = {2004}, date = {2004-01-01}, booktitle = {Proceedings of Formal Methods for Automation and Safety in Railway and Automotive Systems (FORMS/FORMAT 2004)}, key = {ISBN 3-9803363-8-7}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{safety-opt04, title = {Safety Optimization: A Combination of Fault Tree Analysis and Optimization Techniques}, author = { Frank Ortmeier and Wolfgang Reif}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/safety-opti.pdf}, isbn = {0-7695-2052-9}, year = {2004}, date = {2004-01-01}, booktitle = {Proceedings of the Conference on Dependable Systems and Networks (DSN 04)}, publisher = {IEEE Computer Society}, address = {Florence}, key = {ISBN 0-7695-2052-9}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @conference{TO03, title = {Formal Safety Analysis in {T}ransportation Control}, author = { Andreas Thums and Frank Ortmeier}, editor = {E. Schnieder}, year = {2003}, date = {2003-01-01}, booktitle = {International Workshop on Software Specification of Safety Relevant Transportation Control Tasks}, volume = {12 (no. 535)}, publisher = {VDI Verlag GmbH}, series = {VDI Fortschritt-Berichte}, keywords = {}, pubstate = {published}, tppubtype = {conference} } @article{OSTRress03, title = {Safety Analysis of the {H}eight Control System for the Elbtunnel}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn and Andreas Thums and Bernhard Hering and Helmut Trappschuh}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Safety_Analysis_of_the_Height_Control_System_for_the_Elbtunnel.pdf}, year = {2003}, date = {2003-01-01}, journal = {Reliability Engineering and System Safety}, volume = {81}, number = {3}, pages = {259--268}, abstract = {A new tunnel tube crossing the river Elbe is currently built in Hamburg. Therefore, a new height control system is required. A computer examines the signals from light barriers and overhead sensors to detect vehicles, which try to drive into a tube with insufficient height. If necessary, it raises an alarm that blocks the road. This paper describes the application of two safety analysis techniques on this embedded system: model checking has been used to prove functional correctness with respect to a formal model. Fault tree analysis has validated the model and considered technical defects. Their combination has uncovered a safety flaw, led to a precise requirement specification for the software, and showed various ways to improve system safety.}, keywords = {}, pubstate = {published}, tppubtype = {article} } @techreport{OT02, title = {Formale Methoden und Sicherheitsanalyse}, author = { Frank Ortmeier and Andreas Thums}, url = {https://cse.cs.ovgu.de/cse-wordpress/wp-content/uploads/2017/10/Formale_Methoden_und_Sicherheitsanalyse.pdf}, year = {2002}, date = {2002-01-01}, number = {15}, institution = {Universit}, note = {(in German)}, keywords = {}, pubstate = {published}, tppubtype = {techreport} } @conference{OSTRSafecomp2002, title = {Safety Analysis of the Height Control System for the Elbtunnel}, author = { Frank Ortmeier and Wolfgang Reif and Gerhard Schellhorn and Andreas Thums and Bernhard Hering and Helmut Trappschuh}, url = {http://www.springerlink.com/content/50lvk1grndxja1kd/?p=9599f6e547e44be691c61207e379d3d2&pi=0}, year = {2002}, date = {2002-01-01}, booktitle = {SafeComp 2002}, pages = {296 -- 308}, publisher = {Springer LNCS 2434}, address = {Catania, Italy}, keywords = {}, pubstate = {published}, tppubtype = {conference} }