Bibtex export

 

@incollection{ Ruschemeier2023,
 title = {The Problem of the Automation Bias in the Public Sector: A Legal Perspective},
 author = {Ruschemeier, Hannah},
 year = {2023},
 booktitle = {Proceedings of the Weizenbaum Conference 2023: AI, Big Data, Social Media, and People on the Move},
 pages = {1-11},
 address = {Berlin},
 publisher = {Weizenbaum Institute for the Networked Society - The German Internet Institute},
 doi = {https://doi.org/10.34669/wi.cp/5.6},
 abstract = {The automation bias describes the phenomenon, proven in behavioural psychology, that people place excessive trust in the decision suggestions of machines. The law currently sees a dichotomy - and covers only fully automated decisions, and not those involving human decision makers at any stage of the process. However, the widespread use of such systems, for example to inform decisions in education or benefits administration, creates a leverage effect and increases the number of people affected. Particularly in environments where people routinely have to make a large number of similar decisions, the risk of automation bias increases. As an example, automated decisions providing suggestions for job placements illustrate the particular challenges of decision support systems in the public sector. So far, the risks have not been sufficiently addressed in legislation, as the analysis of the GDPR and the draft Artificial Intelligence Act show. I argue for the need for regulation and present initial approaches.},
 keywords = {Diskriminierung; discrimination; Arbeitsmarkt; labor market; Datenschutz; data protection; künstliche Intelligenz; artificial intelligence; Algorithmus; algorithm; Entscheidungsfindung; decision making}}