{"id":5,"date":"2018-10-30T13:27:26","date_gmt":"2018-10-30T13:27:26","guid":{"rendered":"https:\/\/the7.io\/g-business\/?page_id=5"},"modified":"2026-03-31T21:44:33","modified_gmt":"2026-03-31T19:44:33","slug":"home-3","status":"publish","type":"page","link":"https:\/\/klaeslab.de\/de\/","title":{"rendered":"Home"},"content":{"rendered":"<div class=\"wpb-content-wrapper\"><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\">\n\t<div class=\"wpb_raw_code wpb_raw_html wpb_content_element\" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<!DOCTYPE html>\r\n<html lang=\"en\">\r\n<head>\r\n    <meta charset=\"UTF-8\">\r\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\r\n    <title>KlaesLab - Neurotechnology at Ruhr-University Bochum<\/title>\r\n    <script src=\"https:\/\/cdn.tailwindcss.com\"><\/script>\r\n    <link href=\"https:\/\/cdnjs.cloudflare.com\/ajax\/libs\/font-awesome\/6.0.0\/css\/all.min.css\" rel=\"stylesheet\">\r\n    <link rel=\"preconnect\" href=\"https:\/\/fonts.googleapis.com\">\r\n    <link rel=\"preconnect\" href=\"https:\/\/fonts.gstatic.com\" crossorigin>\r\n    <link href=\"https:\/\/fonts.googleapis.com\/css2?family=Varela+Round&display=swap\" rel=\"stylesheet\">\r\n    \r\n    <script>\r\n        tailwind.config = {\r\n            theme: {\r\n                extend: {\r\n                    colors: {\r\n                        rub: {\r\n                            blue: '#003560',\r\n                            green: '#8dae10', \/\/ RUB Green\r\n                            light: '#f3f6e5'  \/\/ Subtle green tint for background\r\n                        },\r\n                        slate: {\r\n                            850: '#15202e',\r\n                        }\r\n                    },\r\n                    fontFamily: {\r\n                        sans: ['\"Varela Round\"', 'sans-serif'],\r\n                    }\r\n                }\r\n            }\r\n        }\r\n    <\/script>\r\n    <style>\r\n        .gradient-text {\r\n            background: linear-gradient(to right, #8dae10, #003560);\r\n            -webkit-background-clip: text;\r\n            -webkit-text-fill-color: transparent;\r\n        }\r\n    <\/style>\r\n<\/head>\r\n<body class=\"font-sans text-slate-700 bg-gradient-to-br from-slate-50 via-white to-rub-light antialiased min-h-screen flex items-center\">\r\n\r\n    <!-- Main Content: Welcome & Team -->\r\n    <main class=\"w-full\">\r\n        <div class=\"w-full max-w-[1920px] mx-auto px-4 lg:px-8 py-8 lg:py-12\">\r\n            <div class=\"grid grid-cols-1 lg:grid-cols-2 gap-8 lg:gap-12 items-center\">\r\n                <!-- Text Section -->\r\n                <div class=\"order-2 lg:order-1 flex flex-col justify-center h-full\">\r\n                    <h1 class=\"text-4xl lg:text-6xl font-bold text-slate-900 mb-6 leading-tight\">\r\n                        Welcome to <br\/>\r\n                        <span class=\"text-rub-blue\">Klaes<\/span><span class=\"text-rub-green\">Lab<\/span>\r\n                    <\/h1>\r\n                    <div class=\"w-32 h-2 bg-rub-green mb-6 rounded-full\"><\/div>\r\n                    \r\n                    <div class=\"prose prose-slate prose-lg text-slate-600 leading-relaxed max-w-none font-sans\">\r\n                        <p class=\"mb-4\">\r\n                            Led by <strong>Professor Christian Klaes<\/strong>, the KlaesLab specializes in exploring the intersection between biological and artificial systems. Professor Klaes holds the professorship for neurotechnology at the <strong>Ruhr-University Bochum<\/strong> and is an expert in human and monkey brain-computer interfaces, both invasive and non-invasive. Currently, the lab is focused on using state-of-the-art machine learning approaches for brain-machine interfaces to control devices, developing advanced neural analysis methods to control human hand exoskeletons, and conducting virtual reality research using VR, EEG, and invasive neural recordings from humans.\r\n                        <\/p>\r\n                        <p>\r\n                            The lab has a broad network of collaborations within Ruhr-University Bochum, as well as with external partners such as Caltech, Ruhr University of Applied Sciences (Iossifidis Lab), German Primate Center, University of Madeira, Snap Gmbh, and many others.\r\n                        <\/p>\r\n                    <\/div>\r\n\r\n                    <div class=\"flex flex-wrap gap-3 mt-8\">\r\n                        <div class=\"flex items-center gap-2 px-5 py-2 bg-white rounded-xl shadow-sm text-slate-700 font-bold text-sm border border-slate-200 hover:border-rub-green\/50 hover:text-rub-blue transition duration-300\">\r\n                            <i class=\"fa-solid fa-microchip text-rub-green text-base\"><\/i> Invasive BCI\r\n                        <\/div>\r\n                        <div class=\"flex items-center gap-2 px-5 py-2 bg-white rounded-xl shadow-sm text-slate-700 font-bold text-sm border border-slate-200 hover:border-rub-green\/50 hover:text-rub-blue transition duration-300\">\r\n                            <i class=\"fa-solid fa-headset text-rub-green text-base\"><\/i> Non-Invasive BCI\r\n                        <\/div>\r\n                        <div class=\"flex items-center gap-2 px-5 py-2 bg-white rounded-xl shadow-sm text-slate-700 font-bold text-sm border border-slate-200 hover:border-rub-green\/50 hover:text-rub-blue transition duration-300\">\r\n                            <i class=\"fa-solid fa-vr-cardboard text-rub-green text-base\"><\/i> Virtual Reality\r\n                        <\/div>\r\n                    <\/div>\r\n                <\/div>\r\n                \r\n                <!-- Image Section -->\r\n                <div class=\"order-1 lg:order-2 h-full flex items-center\">\r\n                    <div class=\"relative w-full group\">\r\n                        <div class=\"absolute -inset-2 bg-gradient-to-tr from-rub-green via-lime-400 to-rub-blue rounded-3xl blur opacity-30 group-hover:opacity-50 transition duration-1000\"><\/div>\r\n                        <div class=\"relative w-full rounded-2xl overflow-hidden shadow-xl bg-slate-200 ring-4 ring-white aspect-[16\/10]\">\r\n                            <img decoding=\"async\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/07\/klaeslab_team-e1688475000148.jpeg\" \r\n                                 alt=\"KlaesLab Team\" \r\n                                 class=\"w-full h-full object-cover transform transition duration-1000 group-hover:scale-105\"\r\n                                 onerror=\"this.src='https:\/\/images.unsplash.com\/photo-1522071820081-009f0129c71c?ixlib=rb-4.0.3&auto=format&fit=crop&w=1600&q=80'\">\r\n                            <div class=\"absolute inset-0 bg-gradient-to-t from-black\/60 via-transparent to-transparent\"><\/div>\r\n                            <div class=\"absolute bottom-0 left-0 right-0 p-6 text-white\">\r\n                                <p class=\"font-bold text-xl tracking-wide\">The KlaesLab Team<\/p>\r\n                                <p class=\"text-slate-200 text-sm mt-1\">Ruhr-University Bochum<\/p>\r\n                            <\/div>\r\n                        <\/div>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/div>\r\n        <\/div>\r\n    <\/main>\r\n\r\n<\/body>\r\n<\/html>\n\t\t<\/div>\n\t<\/div>\n<div class=\"vc_empty_space\"   style=\"height: 32px\"><span class=\"vc_empty_space_inner\"><\/span><\/div><\/div><\/div><\/div><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\"><div class=\"vc_empty_space\"   style=\"height: 60px\"><span class=\"vc_empty_space_inner\"><\/span><\/div><\/div><\/div><\/div><\/div><div data-vc-full-width=\"true\" data-vc-full-width-temp=\"true\" data-vc-full-width-init=\"false\" class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<h1 style=\"text-align: center;\"><strong>News and Events<\/strong><\/h1>\n\n\t\t<\/div>\n\t<\/div>\n<div class=\"vc_separator wpb_content_element vc_separator_align_center vc_sep_width_100 vc_sep_border_width_3 vc_sep_pos_align_center vc_separator_no_text vc_custom_1701866478414 wpb_content_element  vc_custom_1701866478414 wpb_content_element\" ><span class=\"vc_sep_holder vc_sep_holder_l\"><span style=\"border-color:#17365c;\" class=\"vc_sep_line\"><\/span><\/span><span class=\"vc_sep_holder vc_sep_holder_r\"><span style=\"border-color:#17365c;\" class=\"vc_sep_line\"><\/span><\/span>\n<\/div><div id=\"ult-carousel-374615028569d5ee9367afd\" class=\"ult-carousel-wrapper   ult_horizontal\" data-gutter=\"30\" data-rtl=\"false\" ><div class=\"ult-carousel-268218853969d5ee9367ad1 \" ><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>13.03.2026<\/strong><\/p>\n<p><b>\ud83d\udee1\ufe0f Announcing the BrainGuard Project on Neuro-Cybersecurity!<\/b><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58591 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/2026_02_11_TK_Klaes_Christian-31-300x200.jpg\" alt=\"\" width=\"300\" height=\"200\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/2026_02_11_TK_Klaes_Christian-31-300x200.jpg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/2026_02_11_TK_Klaes_Christian-31-768x512.jpg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/2026_02_11_TK_Klaes_Christian-31-18x12.jpg 18w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/2026_02_11_TK_Klaes_Christian-31.jpg 804w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>We are thrilled to announce the launch of the <b>BrainGuard<\/b> project, an innovative research initiative focused on developing <i>Cybersecurity for Neurotechnologies<\/i>.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Conducted in collaboration with the research groups of Prof. Dr. Christian Klaes and Prof. Dr. Christian Zenger at Ruhr University Bochum, alongside industry partners PHYSEC and snap Discovery, this project aims to protect highly sensitive neural data and Brain-Computer Interfaces from emerging cyber threats. You can learn more about our mission to safeguard digital medicine in the <a href=\"https:\/\/news.rub.de\/presseinformationen\/wissenschaft\/2026-02-26-brainguard-cybersecurity-fuer-neurotechnologien\">official RUB press release<\/a>. \ud83e\udde0\ud83d\udd12\ud83d\ude80<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>28.02.2026<\/strong><\/p>\n<p><b>\ud83c\udf93 Congratulations to Jannis Steinert on his Master&#8217;s Thesis Defense!<\/b><\/p>\n<p>&nbsp;<\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58575 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-300x156.png\" alt=\"\" width=\"300\" height=\"156\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-300x156.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-1024x533.png 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-768x399.png 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-1536x799.png 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-2048x1065.png 2048w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/02\/Opera-Snapshot_2026-02-28_021953_blob-18x9.png 18w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>&nbsp;<\/p>\n<p>We are delighted to congratulate <b>Jannis Steinert<\/b> on the successful completion and defense of his Master&#8217;s thesis on <i>Adaptive Human-Robot Collaboration<\/i>.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Conducted in collaboration with the <a href=\"https:\/\/www.lps.ruhr-uni-bochum.de\">Lehrstuhl f\u00fcr Produktionssysteme (LPS)<\/a> at Ruhr University Bochum. We wish Jannis all the best for his next steps and are proud to have supported this exciting research at the intersection of neuroscience and production engineering! \ud83e\udd16\ud83d\udc41\ufe0f\ud83c\udf89<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>22.01.2026<\/strong><\/p>\n<p><b>Call for Papers: Virtual Bodies for Real Humans<\/b><\/p>\n<p>&nbsp;<\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58551 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-300x300.jpg\" alt=\"\" width=\"300\" height=\"300\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-300x300.jpg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-1024x1024.jpg 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-150x150.jpg 150w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-768x768.jpg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square-12x12.jpg 12w, https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/RT-promotion-Square.jpg 1200w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>&nbsp;<\/p>\n<p>We are excited to announce a new Research Topic in <i>Frontiers in Virtual Reality<\/i>, hosted by Prof. Christian Klaes, Dr. Michael Wiesing, and Dr. Artur Pilacinski.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Titled &#8220;Virtual Bodies for Real Humans &#8211; Avatars, Sensations and Emotions,&#8221; this collection investigates the bridge between technology and human experience, from phantom touch to emotional embodiment. We invite you to <a href=\"https:\/\/www.frontiersin.org\/research-topics\/75531\/virtual-bodies-for-real-humans---avatars-sensations-and-emotions\">submit your work<\/a>! \ud83e\udd7d\ud83d\udcdd<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><b>03.12.2025<\/b><\/p>\n<p><b>PhD Success: Dr. Susanne Dyck \u2013 Motor Sequence Learning<\/b><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58501 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-300x225.jpeg\" alt=\"\" width=\"300\" height=\"225\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-300x225.jpeg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-1024x768.jpeg 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-768x576.jpeg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-1536x1152.jpeg 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-2048x1536.jpeg 2048w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/A77B8D78-B43A-431C-BECA-8375243D7BD8-16x12.jpeg 16w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>We are thrilled to announce that Susanne Dyck has successfully defended her PhD thesis at the Ruhr University Bochum.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Her thesis, titled &#8220;Implicit and Explicit Motor Sequence Learning: Neural Oscillations and Behavioral Dynamics,&#8221; investigates the complex neural mechanisms and behavioral patterns underlying how the human brain learns and adapts to movement sequences. We congratulate her on this outstanding achievement! \ud83e\udde0\ud83c\udf93\ud83d\udcc9\ud83c\udf89<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><b>10.11.2025<\/b><\/p>\n<p><b>PhD Success: Dr. Aline Xavier Fid\u00eancio \u2013 Machine Learning in BCIs<\/b><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58497 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-300x200.jpeg\" alt=\"\" width=\"300\" height=\"200\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-300x200.jpeg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-1024x683.jpeg 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-768x512.jpeg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-1536x1024.jpeg 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM-18x12.jpeg 18w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/12\/WhatsApp-Image-2025-12-04-at-9.06.19-AM.jpeg 2048w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>We are proud to announce that Aline Xavier Fid\u00eancio has successfully defended her PhD thesis, titled &#8220;Machine learning-based strategies for detection and application of neurobiological error-related reinforcement signals in brain-computer interfaces.&#8221;<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>This interdisciplinary work represents a strong collaboration between the Iossifidis Lab and KlaesLab. Dr. Fid\u00eancio&#8217;s research advances the field of neurotechnology by exploring how human error signals can be used to improve machine learning strategies in Brain-Computer Interfaces. \ud83c\udf93\ud83e\udde0\ud83d\udcbb\ud83c\udf89<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>22.10.2025<\/strong><\/p>\n<p><strong>Seminar Organization: Dagstuhl Seminar 26262 \u2013 &#8220;Virtual Bodies for Real Humans&#8221;<\/strong><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58488 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-300x169.png\" alt=\"\" width=\"300\" height=\"169\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-300x169.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-1024x576.png 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-768x432.png 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-1536x864.png 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-2048x1152.png 2048w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/10\/Blue-Modern-Dance-Channel-Youtube-Banner7-18x10.png 18w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>Dr. Artur Pilacinski and Prof. Christian Klaes from our lab are leading the organization of the prestigious Dagstuhl Seminar 26262: &#8220;Virtual Bodies for Real Humans \u2013 Avatars, Sensations, Emotions&#8221; in Germany.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>The week-long seminar (June 21-26, 2026) will bring together international experts in neuroscience and human-computer interaction to explore how humans embody and interact with virtual avatars in extended reality. Topics will include agency, sensory feedback, phantom senses, and avatar biomechanics. \ud83e\udde0\ud83e\udd16\ud83c\udde9\ud83c\uddea<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>09.09.2025<\/strong><\/p>\n<p><strong>Conference Participation: Brain Day 2025<\/strong><\/p>\n<p><a href=\"http:\/\/www.brain-news.rub.de\"><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58478 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/09\/Screenshot_20250909_170543-273x300.png\" alt=\"BrainDay2025\" width=\"273\" height=\"300\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/09\/Screenshot_20250909_170543-273x300.png 273w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/09\/Screenshot_20250909_170543-11x12.png 11w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/09\/Screenshot_20250909_170543.png 457w\" sizes=\"auto, (max-width: 273px) 100vw, 273px\" \/><\/a><\/p>\n<p>The IGSN Brain Day returns on the 17th of September, an interactive conference and event intended to connect the research and researchers with the broader public.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Expect presentations, talks and participate in real neuroscientific experiments such as ours! Participation is free, come over, meet the team and participate in scientific research!<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>26.06.2025<\/strong><\/p>\n<p><strong>Conference Participation: Shonan Meeting 216<\/strong><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58459 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-300x225.jpg\" alt=\"\" width=\"300\" height=\"225\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-300x225.jpg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-1024x767.jpg 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-768x576.jpg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-1536x1151.jpg 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby-16x12.jpg 16w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/06\/bafkreiheqprrhzfihsjsbsicswf6cmstbx5gktb6bekwy5biib3fr2vnby.jpg 1600w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n<p>Dr. Artur Pilachinski from our lab participated in <em>Shonan Meeting 216: &#8220;HCI \u00d7 Neuroscience&#8221;<\/em> in Japan.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>The meeting brought together researchers exploring the interface between human-computer interaction and neuroscience for a week of interdisciplinary exchange and collaboration. \ud83e\udde0\ud83d\udcbb\ud83c\uddef\ud83c\uddf5<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p data-pm-slice=\"1 1 &#091;&#093;\">20.05.2025<\/p>\n<p dir=\"ltr\"><strong>Open Call for Masters and PhD Candidates<\/strong><\/p>\n<p dir=\"ltr\">KlaesLab at Ruhr University Bochum seeks motivated Masters and PhD candidates passionate about virtual reality, brain-computer interfaces, and AI in medicine. Join our cutting-edge neurotechnology projects! Contact us at <a href=\"mailto:neurotechnologie@rub.de\">neurotechnologie@rub.de<\/a><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-57547 \" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2022\/12\/FinalLogo_120.png\" alt=\"\" width=\"268\" height=\"163\" \/><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p>17.03.2025<\/p>\n<p data-start=\"18\" data-end=\"271\"><strong>Prof. Dr. Christian Klaes has been awarded the IOP Outstanding Reviewer award.<\/strong><\/p>\n<p data-start=\"18\" data-end=\"271\">The <em data-start=\"159\" data-end=\"185\">IOP Outstanding Reviewer<\/em> award recognizes exceptional dedication to peer review, highlighting a significant contribution to maintaining the quality and integrity of scientific literature.<\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58399 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/03\/Screenshot-2025-03-18-094355-300x271.png\" alt=\"\" width=\"300\" height=\"271\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/03\/Screenshot-2025-03-18-094355-300x271.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/03\/Screenshot-2025-03-18-094355-13x12.png 13w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/03\/Screenshot-2025-03-18-094355.png 395w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>11.02.2025<\/strong><\/p>\n<p><strong>KlaesLab Paper Trails \u2013 Our Podcast-Style Series<\/strong><\/p>\n<p><a href=\"https:\/\/www.youtube.com\/playlist?list=PL_dZzRKMjeiaMePHDgoTsVFwDkPYB4OhE\"><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58309 size-medium\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/02\/VR-Summit-2024-565-x-565-px2-300x300.png\" alt=\"\" width=\"300\" height=\"300\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/02\/VR-Summit-2024-565-x-565-px2-300x300.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/02\/VR-Summit-2024-565-x-565-px2-150x150.png 150w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/02\/VR-Summit-2024-565-x-565-px2-12x12.png 12w, https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/02\/VR-Summit-2024-565-x-565-px2.png 565w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/a><\/p>\n<p>Check out <strong>KlaesLab Paper Trails<\/strong>, our podcast-style series where we dive into the latest research from our lab! \ud83c\udf99\ufe0f<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>Explore groundbreaking neuroscience, brain-computer interfaces, and machine learning innovations in an accessible, engaging format.<\/p>\n<p>\ud83c\udfa7 <strong>Listen now:<\/strong> <a href=\"https:\/\/www.youtube.com\/playlist?list=PL_dZzRKMjeiaMePHDgoTsVFwDkPYB4OhE\">YouTube Playlist<\/a><\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p>17.12.2024<\/p>\n<p>Following up on the 2024 VRS Hackathon&#8217;s challenge, we are hosting a Special Research Topic at Frontiers in Virtual Reality, focused on interdisciplinary communication in virtual spaces. Contribute now!<\/p>\n<p><a href=\"https:\/\/www.frontiersin.org\/research-topics\/67789\/enhancing-interdisciplinary-and-cross-cultural-communication-in-virtual-collaborative-environments\">https:\/\/www.frontiersin.org\/research-topics\/67789\/enhancing-interdisciplinary-and-cross-cultural-communication-in-virtual-collaborative-environments<\/a><\/p>\n<p>&nbsp;<\/p>\n<p><a href=\"https:\/\/www.frontiersin.org\/research-topics\/67789\/enhancing-interdisciplinary-and-cross-cultural-communication-in-virtual-collaborative-environments\"><img loading=\"lazy\" decoding=\"async\" class=\"alignnone size-medium wp-image-58289\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/12\/frontiers-300x107.png\" alt=\"\" width=\"300\" height=\"107\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/12\/frontiers-300x107.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/12\/frontiers-18x6.png 18w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/12\/frontiers.png 736w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/a><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p>27.11.24<\/p>\n<p>&nbsp;<\/p>\n<p>Prof. Klaes participated in an interview at Oceanbuilders, the entire interview is up on both <a href=\"https:\/\/www.instagram.com\/reel\/DCzxF3ZNpkN\/\">Instagram<\/a> and <a href=\"https:\/\/youtu.be\/zoiLmwYrHH0?si=faKBpvIaFejuWbwk\">Youtube<\/a>, so take your pick if you are interested!<\/p>\n<p>&nbsp;<\/p>\n<p><a href=\"https:\/\/www.instagram.com\/reel\/DCzxF3ZNpkN\/\"><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58283\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/Screenshot-2024-11-27-172841-168x300.png\" alt=\"oceanbuilders \" width=\"228\" height=\"408\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/Screenshot-2024-11-27-172841-168x300.png 168w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/Screenshot-2024-11-27-172841-7x12.png 7w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/Screenshot-2024-11-27-172841.png 333w\" sizes=\"auto, (max-width: 228px) 100vw, 228px\" \/><\/a><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p>18.11.24<\/p>\n<p>&nbsp;<\/p>\n<p>Interested in the terahertz.NRW project?<a href=\"https:\/\/www.youtube.com\/watch?v=OtkCS77GjAI&amp;ab_channel=FraunhoferFHR\"> take a look at our newest video,<\/a> posted by our partners at the Frauenhofer Institute FHR!<\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone size-medium wp-image-57873\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/06\/terahertz_nrw_logo_768x512-300x200.png\" alt=\"terahertz.nrw\" width=\"300\" height=\"200\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/06\/terahertz_nrw_logo_768x512-300x200.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/06\/terahertz_nrw_logo_768x512.png 768w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p>06.11.2024<\/p>\n<p>&nbsp;<\/p>\n<p>Your body is probably more flexible than you think.<\/p>\n<p><a href=\"https:\/\/www.nature.com\/articles\/s44271-024-00153-x\">A new paper from our lab at Communications: Psychology!<\/a><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone size-medium wp-image-58280\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/stellvertreter_beweglichkeit_handgelenk-TK-300x200.jpg\" alt=\"\" width=\"300\" height=\"200\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/stellvertreter_beweglichkeit_handgelenk-TK-300x200.jpg 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/stellvertreter_beweglichkeit_handgelenk-TK-768x512.jpg 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/stellvertreter_beweglichkeit_handgelenk-TK-18x12.jpg 18w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/11\/stellvertreter_beweglichkeit_handgelenk-TK.jpg 804w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p><strong>10.05.2024<\/strong><\/p>\n<p><strong>New Paper: Beta Oscillations as a Proxy for Implicit Learning<\/strong><\/p>\n<p>Check out <a href=\"https:\/\/www.nature.com\/articles\/s41598-024-57285-7\">the new paper<\/a> from our lab by Susanne Dyck, who showed beta oscillations being a good proxy for implicit learning processes.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Show Figure \u25bc\" data-swaptitle=\"Hide Figure \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p><img loading=\"lazy\" decoding=\"async\" class=\"alignnone wp-image-58227 size-large\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-1024x544.png\" alt=\"\" width=\"1024\" height=\"544\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-1024x544.png 1024w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-300x159.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-768x408.png 768w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-1536x816.png 1536w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-2048x1088.png 2048w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/04\/modSRTT_EEG_figure_time-frequency_plot_png-18x10.png 18w\" sizes=\"auto, (max-width: 1024px) 100vw, 1024px\" \/><\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><div class=\"ult-item-wrap\" data-animation=\"animated no-animation\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<p style=\"text-align: center;\"><strong>17.04.2024<\/strong><\/p>\n<p style=\"text-align: center;\"><strong>IOP Trusted Reviewer Status Awarded to Prof. Dr. Christian Klaes<\/strong><\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div id=\"attachment_58233\" style=\"width: 310px\" class=\"wp-caption alignnone\"><img loading=\"lazy\" decoding=\"async\" aria-describedby=\"caption-attachment-58233\" class=\"size-medium wp-image-58233\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/05\/TOP-300x300.png\" alt=\"IOP\" width=\"300\" height=\"300\" srcset=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/05\/TOP-300x300.png 300w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/05\/TOP-150x150.png 150w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/05\/TOP-12x12.png 12w, https:\/\/klaeslab.de\/wp-content\/uploads\/2024\/05\/TOP.png 400w\" sizes=\"auto, (max-width: 300px) 100vw, 300px\" \/><p id=\"caption-attachment-58233\" class=\"wp-caption-text\">Awarded by IOP Publishing on April 17th, 2024<\/p><\/div><\/div>\n<p>Prof. Dr. Christian Klaes has been awarded the IOP Trusted Reviewer status.<\/p>\n<div class=\"ult-item-wrap\" data-animation=\"animated no-animation\"><div class=\"claps-toggle-text\" style=\"--collapse-hf:1px;--collapse-ht:1px;\" data-title=\"Read More \u25bc\" data-swaptitle=\"Read Less \u25b2\" data-icon=\"&#9660;\" data-swapicon=\"&#9650;\" data-ht=\"1px\" data-hf=\"1px\" data-above=\"0\"><div class=\"claps-text-inner claps-text-toggle-collapsed\"><\/p>\n<p>The &#8216;IOP Trusted Reviewer&#8217; status indicates a high level of peer review competence and the ability to constructively critique scientific literature to an exceptional standard.<\/p>\n<p><\/div><\/div><\/div>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><\/div><\/div>\t\t\t<script type=\"text\/javascript\">\n\t\t\t\tjQuery(document).ready(function ($) {\n\t\t\t\t\tif( typeof jQuery('.ult-carousel-268218853969d5ee9367ad1').slick == \"function\"){\n\t\t\t\t\t\t$('.ult-carousel-268218853969d5ee9367ad1').slick({dots: true,autoplay: true,autoplaySpeed: \"5000\",speed: \"300\",infinite: true,arrows: true,nextArrow: '<button type=\"button\" role=\"button\" aria-label=\"Next\" style=\"color:#333333; font-size:20px;\" class=\"slick-next default\"><i class=\"ultsl-arrow-right4\"><\/i><\/button>',prevArrow: '<button type=\"button\" role=\"button\" aria-label=\"Previous\" style=\"color:#333333; font-size:20px;\" class=\"slick-prev default\"><i class=\"ultsl-arrow-left4\"><\/i><\/button>',slidesToScroll:5,slidesToShow:5,swipe: true,draggable: true,touchMove: true,pauseOnHover: true,pauseOnFocus: false,responsive: [\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t  breakpoint: 1026,\n\t\t\t\t\t\t\t  settings: {\n\t\t\t\t\t\t\t\tslidesToShow: 5,\n\t\t\t\t\t\t\t\tslidesToScroll: 5,  \n\t\t\t\t\t\t\t  }\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t  breakpoint: 1025,\n\t\t\t\t\t\t\t  settings: {\n\t\t\t\t\t\t\t\tslidesToShow: 3,\n\t\t\t\t\t\t\t\tslidesToScroll: 3\n\t\t\t\t\t\t\t  }\n\t\t\t\t\t\t\t},\n\t\t\t\t\t\t\t{\n\t\t\t\t\t\t\t  breakpoint: 760,\n\t\t\t\t\t\t\t  settings: {\n\t\t\t\t\t\t\t\tslidesToShow: 2,\n\t\t\t\t\t\t\t\tslidesToScroll: 2\n\t\t\t\t\t\t\t  }\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t],pauseOnDotsHover: true,customPaging: function(slider, i) {\n                   return '<i type=\"button\" style= \"color:#333333;\" class=\"ultsl-record\" data-role=\"none\"><\/i>';\n                },});\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t<\/script>\n\t\t\t<\/div><\/div><\/div><\/div><div class=\"vc_row-full-width vc_clearfix\"><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\">\n\t<div class=\"wpb_raw_code wpb_raw_html wpb_content_element\" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<!DOCTYPE html>\r\n<html lang=\"en\">\r\n<head>\r\n    <meta charset=\"UTF-8\">\r\n    <meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">\r\n    <title>KlaesLab - Key Research Areas<\/title>\r\n    <link rel=\"preconnect\" href=\"https:\/\/klaeslab.de\">\r\n    <link rel=\"dns-prefetch\" href=\"https:\/\/klaeslab.de\">\r\n    \r\n    <script src=\"https:\/\/cdn.tailwindcss.com\"><\/script>\r\n    <link href=\"https:\/\/fonts.googleapis.com\/css2?family=Varela+Round&display=swap\" rel=\"stylesheet\">\r\n    <style>\r\n        body {\r\n            font-family: 'Varela Round', sans-serif;\r\n        }\r\n        .fade-in-img {\r\n            animation: fadeIn 0.5s ease-in-out;\r\n        }\r\n        @keyframes fadeIn {\r\n            from { opacity: 0; }\r\n            to { opacity: 1; }\r\n        }\r\n        .hide-scrollbar::-webkit-scrollbar {\r\n            display: none;\r\n        }\r\n        .hide-scrollbar {\r\n            -ms-overflow-style: none;\r\n            scrollbar-width: none;\r\n        }\r\n    <\/style>\r\n<\/head>\r\n<body class=\"bg-slate-50 text-slate-800\">\r\n\r\n    <section class=\"py-16 px-4 sm:px-6 lg:px-8 max-w-7xl mx-auto\">\r\n        \r\n        <div class=\"text-center mb-16 max-w-4xl mx-auto\">\r\n            <h2 class=\"text-base text-teal-600 font-semibold tracking-wide uppercase mb-2\">Our Focus<\/h2>\r\n            <h1 class=\"text-4xl font-bold text-slate-900 mb-6\">Key Research Areas<\/h1>\r\n            <p class=\"text-lg text-slate-600 leading-relaxed\">\r\n                We currently focus on the use of virtual reality in simulating human interactions with environment, brain-machine interfaces in control of devices, collaborative robotics, novel methods for assessment of brain activity and designing advanced neural analysis methods for controlling human hand exoskeleton.\r\n            <\/p>\r\n        <\/div>\r\n\r\n        <div class=\"flex flex-wrap justify-center gap-2 mb-16 hide-scrollbar overflow-x-auto px-4 pb-4\">\r\n            <button onclick=\"filterSelection('all')\" class=\"filter-btn active bg-slate-800 text-white hover:bg-slate-700 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm\">All<\/button>\r\n            <button onclick=\"filterSelection('bci')\" class=\"filter-btn bg-white text-slate-600 hover:bg-teal-50 hover:text-teal-600 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm border border-slate-200\">BCI & Neuro<\/button>\r\n            <button onclick=\"filterSelection('robotics')\" class=\"filter-btn bg-white text-slate-600 hover:bg-rose-50 hover:text-rose-600 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm border border-slate-200\">Robotics<\/button>\r\n            <button onclick=\"filterSelection('vr')\" class=\"filter-btn bg-white text-slate-600 hover:bg-purple-50 hover:text-purple-600 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm border border-slate-200\">VR & Sensory<\/button>\r\n            <button onclick=\"filterSelection('ai')\" class=\"filter-btn bg-white text-slate-600 hover:bg-blue-50 hover:text-blue-600 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm border border-slate-200\">AI & Embedded<\/button>\r\n            <button onclick=\"filterSelection('security')\" class=\"filter-btn bg-white text-slate-600 hover:bg-indigo-50 hover:text-indigo-600 px-6 py-2 rounded-full text-sm font-medium transition-colors shadow-sm border border-slate-200\">Security<\/button>\r\n        <\/div>\r\n\r\n        <div class=\"flex flex-col gap-10 max-w-5xl mx-auto\">\r\n\r\n            <!-- \u2705 MindMove \u2014 BMFTR sponsor logo bottom-right of text panel -->\r\n            <article class=\"filter-item bci robotics bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group relative cursor-pointer\">\r\n                <div class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/Logo-MindMove.png\" \r\n                         alt=\"MindMove System\" \r\n                         fetchpriority=\"high\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                <\/div>\r\n                \r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-teal-600 transition-colors\">\r\n                        <a href=\"https:\/\/klaeslab.de\/mindmove\/\" class=\"focus:outline-none\">\r\n                            <span class=\"absolute inset-0\" aria-hidden=\"true\"><\/span>\r\n                            MindMove: Automated Neurorehabilitation\r\n                        <\/a>\r\n                    <\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        Together with medica Medizintechnik GmbH, Ambulanticum GmbH, SNAP GmbH, and the RUB Chair for Production Systems, we are developing a novel, automated rehabilitation system for grasping therapy that combines modern neurotechnology with robotics. We utilize Brain-Computer Interfaces (BCIs) to detect movement intentions directly from brain activity and synchronize them with robot-assisted movement execution. Supported by machine learning and automated assessments, the project aims to create personalized, high-frequency training scenarios to improve independence for patients with neurological conditions.\r\n                    <\/p>\r\n                    <!-- Bottom row: tags left, BMFTR sponsor logo right -->\r\n                    <div class=\"flex items-end justify-between mt-auto relative z-10 gap-4\">\r\n                        <div class=\"flex flex-wrap gap-2\">\r\n                            <span class=\"text-xs font-medium text-teal-600 bg-teal-50 px-3 py-1.5 rounded-full border border-teal-100\">Neurorehab<\/span>\r\n                            <span class=\"text-xs font-medium text-teal-600 bg-teal-50 px-3 py-1.5 rounded-full border border-teal-100\">BCI<\/span>\r\n                        <\/div>\r\n                        <img decoding=\"async\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/BMFTR_de_Web_RGB_gef_durch.png\"\r\n                             alt=\"Funded by BMFTR\"\r\n                             class=\"h-32 w-auto object-contain shrink-0\"\r\n                             onerror=\"this.style.display='none'\">\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <!-- \u2705 BrainGuard \u2014 new logo + EFRE sponsor logo bottom-right of text panel -->\r\n            <article class=\"filter-item security bci bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group relative cursor-pointer\">\r\n                <div class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden\">\r\n                    <!-- \u2705 Updated BrainGuard logo src -->\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/01\/8p4EkhboOx0Co74NMrYT-0-3hqei.jpg\" \r\n                         alt=\"BrainGuard Framework\" \r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                <\/div>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-indigo-600 transition-colors\">\r\n                        <a href=\"https:\/\/klaeslab.de\/brainguard\" class=\"focus:outline-none\">\r\n                            <span class=\"absolute inset-0\" aria-hidden=\"true\"><\/span>\r\n                            BrainGuard: Neuro-Cybersecurity\r\n                        <\/a>\r\n                    <\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        Together with the RUB Chair for Secure Mobile Networking, Physec GmbH, and SNAP Discovery AG, we are developing a systematic neuro-cybersecurity framework for Brain-Computer Interfaces (BCI) and neuromodulatory implants. We aim to create novel security concepts\u2014ranging from neural identification to hardware protection\u2014that safeguard sensitive neuronal data, prevent manipulation, and ensure user autonomy against emerging cyber threats.\r\n                    <\/p>\r\n                    <!-- Bottom row: tags left, EFRE sponsor logo right -->\r\n                    <div class=\"flex items-end justify-between mt-auto relative z-10 gap-4\">\r\n                        <div class=\"flex flex-wrap gap-2\">\r\n                            <span class=\"text-xs font-medium text-indigo-600 bg-indigo-50 px-3 py-1.5 rounded-full border border-indigo-100\">Cybersecurity<\/span>\r\n                            <span class=\"text-xs font-medium text-indigo-600 bg-indigo-50 px-3 py-1.5 rounded-full border border-indigo-100\">Implants<\/span>\r\n                        <\/div>\r\n                        <img decoding=\"async\" src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2026\/03\/EFRE_JTF_Logokombination_NRW_RGB.png\"\r\n                             alt=\"Funded by EFRE JTF NRW\"\r\n                             class=\"h-12 w-auto object-contain shrink-0\"\r\n                             onerror=\"this.style.display='none'\">\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item vr ai bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group\">\r\n                <a href=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/02\/graphic-1-300x256.jpg\" target=\"_blank\" class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden cursor-zoom-in\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/02\/graphic-1-300x256.jpg\" \r\n                         alt=\"VR Diagnostics\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                    <div class=\"absolute inset-0 bg-black bg-opacity-0 group-hover:bg-opacity-5 transition-all duration-300 flex items-center justify-center\">\r\n                        <span class=\"opacity-0 group-hover:opacity-100 bg-white\/90 text-slate-800 text-xs font-semibold px-3 py-1 rounded-full shadow-lg transform translate-y-2 group-hover:translate-y-0 transition-all duration-300\">View Image<\/span>\r\n                    <\/div>\r\n                <\/a>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-purple-600 transition-colors\">WAVES: The use of virtual reality for diagnostics of neurological disorders<\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        We develop a standardized test for diagnosing motor function disorders in Virtual Reality (VR). We use modern Machine Learning (ML) algorithms, such as deep learning, in the context of diagnosis and therapy of neurological diseases with hand and arm dysfunctions.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto\">\r\n                        <span class=\"text-xs font-medium text-purple-600 bg-purple-50 px-3 py-1.5 rounded-full border border-purple-100\">VR<\/span>\r\n                        <span class=\"text-xs font-medium text-purple-600 bg-purple-50 px-3 py-1.5 rounded-full border border-purple-100\">Deep Learning<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item ai bci bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group\">\r\n                <a href=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/03\/graphic-7-300x296.jpg\" target=\"_blank\" class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden cursor-zoom-in\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/03\/graphic-7-300x296.jpg\" \r\n                         alt=\"Neural Signal Processing\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                    <div class=\"absolute inset-0 bg-black bg-opacity-0 group-hover:bg-opacity-5 transition-all duration-300 flex items-center justify-center\">\r\n                        <span class=\"opacity-0 group-hover:opacity-100 bg-white\/90 text-slate-800 text-xs font-semibold px-3 py-1 rounded-full shadow-lg transform translate-y-2 group-hover:translate-y-0 transition-all duration-300\">View Image<\/span>\r\n                    <\/div>\r\n                <\/a>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-blue-600 transition-colors\">Neural signal processing using artificial intelligence on an embedded platform<\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        The aim of this project is to develop an end-to-end neural processing pipeline that implements online algorithms for spike sorting and neural decoding using analogue and digital embedded hardware. This pipeline is a fundamental building block for the development of the next generation of neural implants. The system relies on state-of-the-art machine learning algorithms to decode movement intentions from brain activity. In parallel, we will optimise a neural decoder based on deep learning methods.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto\">\r\n                        <span class=\"text-xs font-medium text-blue-600 bg-blue-50 px-3 py-1.5 rounded-full border border-blue-100\">Embedded<\/span>\r\n                        <span class=\"text-xs font-medium text-blue-600 bg-blue-50 px-3 py-1.5 rounded-full border border-blue-100\">AI<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item robotics bci vr bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group\">\r\n                <a href=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/02\/graphic-6-294x300.jpg\" target=\"_blank\" class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden cursor-zoom-in\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/02\/graphic-6-294x300.jpg\" \r\n                         alt=\"Smart Exoskeleton\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                    <div class=\"absolute inset-0 bg-black bg-opacity-0 group-hover:bg-opacity-5 transition-all duration-300 flex items-center justify-center\">\r\n                        <span class=\"opacity-0 group-hover:opacity-100 bg-white\/90 text-slate-800 text-xs font-semibold px-3 py-1 rounded-full shadow-lg transform translate-y-2 group-hover:translate-y-0 transition-all duration-300\">View Image<\/span>\r\n                    <\/div>\r\n                <\/a>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-rose-600 transition-colors\">Smart upper-limb exoskeleton<\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        We are interested in developing a key component \u2013a bio-mechanically designed, adaptive exoskeleton for the upper extremity \u2014 to explore and enable patients with arm and grip function impairment due to various neurological diseases in their journey of rehabilitation. The exoskeleton is a holistic rehabilitation system that includes the design and implementation of movement tasks in VR, a feedback system based on biosignals and a generic decoder for invasive and non-invasive brain-computer interfaces.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto\">\r\n                        <span class=\"text-xs font-medium text-rose-600 bg-rose-50 px-3 py-1.5 rounded-full border border-rose-100\">Robotics<\/span>\r\n                        <span class=\"text-xs font-medium text-rose-600 bg-rose-50 px-3 py-1.5 rounded-full border border-rose-100\">Rehab<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item robotics bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group\">\r\n                <a href=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/03\/graphic-8-300x297.jpg\" target=\"_blank\" class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden cursor-zoom-in\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/03\/graphic-8-300x297.jpg\" \r\n                         alt=\"Collaborative Robotics\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                    <div class=\"absolute inset-0 bg-black bg-opacity-0 group-hover:bg-opacity-5 transition-all duration-300 flex items-center justify-center\">\r\n                        <span class=\"opacity-0 group-hover:opacity-100 bg-white\/90 text-slate-800 text-xs font-semibold px-3 py-1 rounded-full shadow-lg transform translate-y-2 group-hover:translate-y-0 transition-all duration-300\">View Image<\/span>\r\n                    <\/div>\r\n                <\/a>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-orange-600 transition-colors\">Collaborative robotics<\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        Together with Ruhr University for Applied Sciences, RUB Chair for Production Systems, and University of Madeira, we develop a framework for trustful and efficient motor collaboration between humans and robotic agents. We use human biomarkers as a proxy of human state in order to adaptively guide actions of the collaborative robot.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto\">\r\n                        <span class=\"text-xs font-medium text-orange-600 bg-orange-50 px-3 py-1.5 rounded-full border border-orange-100\">HRI<\/span>\r\n                        <span class=\"text-xs font-medium text-orange-600 bg-orange-50 px-3 py-1.5 rounded-full border border-orange-100\">Biomarkers<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item bci bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group relative cursor-pointer\">\r\n                <div class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2023\/02\/graphic-5-292x300.jpg\" \r\n                         alt=\"Terahertz Imaging\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                <\/div>\r\n                \r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-cyan-600 transition-colors\">\r\n                        <a href=\"https:\/\/www.terahertz.nrw\/pis-im-netzwerk\/\" class=\"focus:outline-none\">\r\n                            <span class=\"absolute inset-0\" aria-hidden=\"true\"><\/span>\r\n                            Terahertz\r\n                        <\/a>\r\n                    <\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        We apply terahertz radiation to discover novel methods for imaging brain activity for future brain-machine interfaces.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto relative z-10\">\r\n                        <span class=\"text-xs font-medium text-cyan-600 bg-cyan-50 px-3 py-1.5 rounded-full border border-cyan-100\">Imaging<\/span>\r\n                        <span class=\"text-xs font-medium text-cyan-600 bg-cyan-50 px-3 py-1.5 rounded-full border border-cyan-100\">Future Tech<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n            <article class=\"filter-item vr sensory bci bg-white rounded-2xl shadow-sm border border-slate-100 overflow-hidden hover:shadow-xl transition-all duration-300 flex flex-col md:flex-row min-h-[320px] group\">\r\n                <a href=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/01\/VR-Summit-2024-2-300x300.png\" target=\"_blank\" class=\"block w-full h-64 md:w-80 md:h-auto shrink-0 bg-white border-b md:border-b-0 md:border-r border-slate-100 relative overflow-hidden cursor-zoom-in\">\r\n                    <img src=\"https:\/\/klaeslab.de\/wp-content\/uploads\/2025\/01\/VR-Summit-2024-2-300x300.png\" \r\n                         alt=\"Phantom Touch Research\" \r\n                         loading=\"lazy\"\r\n                         decoding=\"async\"\r\n                         class=\"w-full h-full object-contain p-6 transition-transform duration-500 group-hover:scale-105 fade-in-img\"\r\n                         onerror=\"this.src='https:\/\/placehold.co\/600x400\/e2e8f0\/475569?text=Image+Unavailable'\">\r\n                    <div class=\"absolute inset-0 bg-black bg-opacity-0 group-hover:bg-opacity-5 transition-all duration-300 flex items-center justify-center\">\r\n                        <span class=\"opacity-0 group-hover:opacity-100 bg-white\/90 text-slate-800 text-xs font-semibold px-3 py-1 rounded-full shadow-lg transform translate-y-2 group-hover:translate-y-0 transition-all duration-300\">View Image<\/span>\r\n                    <\/div>\r\n                <\/a>\r\n                <div class=\"p-6 md:p-8 flex-1 flex flex-col justify-start\">\r\n                    <h3 class=\"text-2xl font-bold text-slate-900 mb-4 group-hover:text-fuchsia-600 transition-colors\">Phantom Touch<\/h3>\r\n                    <p class=\"text-slate-600 text-base leading-relaxed mb-6\">\r\n                        This line of research explores phantom touch illusion \u2014the perception of touch without physical contact in virtual reality. This can be useful for neurorehabilitation and sensory augmentation. By integrating VR, brain-computer interfaces, and haptic technologies, we aim to enhance motor recovery, rewire neural pathways, and improve sensory experiences for patients with neurological disorders.\r\n                    <\/p>\r\n                    <div class=\"flex flex-wrap gap-2 mt-auto\">\r\n                        <span class=\"text-xs font-medium text-fuchsia-600 bg-fuchsia-50 px-3 py-1.5 rounded-full border border-fuchsia-100\">Sensory<\/span>\r\n                        <span class=\"text-xs font-medium text-fuchsia-600 bg-fuchsia-50 px-3 py-1.5 rounded-full border border-fuchsia-100\">VR<\/span>\r\n                    <\/div>\r\n                <\/div>\r\n            <\/article>\r\n\r\n        <\/div>\r\n    <\/section>\r\n\r\n    <script>\r\n        function filterSelection(c) {\r\n            var x, i;\r\n            x = document.getElementsByClassName(\"filter-item\");\r\n            var btns = document.getElementsByClassName(\"filter-btn\");\r\n\r\n            for (i = 0; i < btns.length; i++) {\r\n                btns[i].classList.remove(\"bg-slate-800\", \"text-white\");\r\n                btns[i].classList.add(\"bg-white\", \"text-slate-600\");\r\n                if (!btns[i].classList.contains(\"border-slate-200\")) {\r\n                     btns[i].classList.add(\"border-slate-200\");\r\n                }\r\n            }\r\n\r\n            var activeBtn = Array.from(btns).find(b => b.getAttribute('onclick').includes(c));\r\n            if(activeBtn) {\r\n                activeBtn.classList.remove(\"bg-white\", \"text-slate-600\", \"border-slate-200\");\r\n                activeBtn.classList.add(\"bg-slate-800\", \"text-white\", \"border-transparent\");\r\n            }\r\n\r\n            if (c == \"all\") c = \"\";\r\n            for (i = 0; i < x.length; i++) {\r\n                if (x[i].className.indexOf(c) > -1) {\r\n                    x[i].style.display = \"flex\";\r\n                    x[i].style.opacity = 0;\r\n                    setTimeout((el) => { el.style.opacity = 1; }, 10, x[i]);\r\n                } else {\r\n                    x[i].style.display = \"none\";\r\n                }\r\n            }\r\n        }\r\n    <\/script>\r\n<\/body>\r\n<\/html>\r\n\n\t\t<\/div>\n\t<\/div>\n<div class=\"vc_empty_space\"   style=\"height: 32px\"><span class=\"vc_empty_space_inner\"><\/span><\/div><\/div><\/div><\/div><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\">\n\t<div class=\"wpb_text_column wpb_content_element \" >\n\t\t<div class=\"wpb_wrapper\">\n\t\t\t<h5 style=\"text-align: center;\">If you are interested in our projects and\/or working with us,<\/h5>\n<p style=\"text-align: center;\">please<a href=\"mailto: neurotechnologie@rub.de\"> reach out to us.<\/a><\/p>\n\n\t\t<\/div>\n\t<\/div>\n<\/div><\/div><\/div><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\"><form class=\"dt-contact-form dt-form privacy-form\" method=\"post\"><input type=\"hidden\" name=\"widget_id\" value=\"presscore-contact-form-widget--1\" \/><input type=\"hidden\" name=\"send_message\" value=\"\" \/><input type=\"hidden\" name=\"security_token\" value=\"4bf689b17f1aa089381a\"\/><div class=\"form-fields\"><span class=\"form-name\"><label class=\"assistive-text\">Name *<\/label><input type=\"text\" class=\"validate[required]\" placeholder=\"Name *\" name=\"name\" value=\"\" aria-required=\"true\">\n<\/span><span class=\"form-mail\"><label class=\"assistive-text\">E-Mail *<\/label><input type=\"text\" class=\"validate[required,custom[email]]\" placeholder=\"E-Mail *\" name=\"email\" value=\"\" aria-required=\"true\">\n<\/span><\/div><span class=\"form-message\"><label class=\"assistive-text\">Nachricht *<\/label><textarea class=\"validate[required]\" placeholder=\"Nachricht *\" name=\"message\" rows=\"6\" aria-required=\"true\"><\/textarea>\n<\/span><p class=\"the7-form-terms-wrap\"><input type=\"checkbox\" id=\"the7-form-terms-c70daf247944fe3add32218f914c75a6\" name=\"terms\" class=\"validate[required] the7-form-terms\" aria-required=\"true\" \/>&nbsp;<label for=\"the7-form-terms-c70daf247944fe3add32218f914c75a6\"><span class=\"form-terms-text\">By using this form I agree with the storage and handling of my data by KlaesLab.<\/span><\/label><\/p><p><a href=\"#\" class=\"dt-btn dt-btn-m dt-btn-submit\" rel=\"nofollow\"><span>Submit<\/span><\/a><input class=\"assistive-text\" type=\"submit\" value=\"absenden\"><\/p><\/form>\n<\/div><\/div><\/div><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\"><\/div><\/div><\/div><\/div><div class=\"vc_row wpb_row vc_row-fluid\"><div class=\"wpb_column vc_column_container vc_col-sm-12\"><div class=\"vc_column-inner\"><div class=\"wpb_wrapper\"><\/div><\/div><\/div><\/div>\n<\/div>","protected":false},"excerpt":{"rendered":"<p>KlaesLab &#8211; Neurotechnology at Ruhr-University Bochum Welcome to KlaesLab Led by Professor Christian Klaes, the KlaesLab specializes in exploring the intersection between biological and artificial systems. Professor Klaes holds the professorship for neurotechnology at the Ruhr-University Bochum and is an expert in human and monkey brain-computer interfaces, both invasive and non-invasive. Currently, the lab is&hellip;<\/p>","protected":false},"author":2,"featured_media":0,"parent":0,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_acf_changed":false,"om_disable_all_campaigns":false,"_monsterinsights_skip_tracking":false,"_monsterinsights_sitenote_active":false,"_monsterinsights_sitenote_note":"","_monsterinsights_sitenote_category":0,"footnotes":""},"class_list":["post-5","page","type-page","status-publish","hentry","description-off"],"acf":[],"aioseo_notices":[],"_links":{"self":[{"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/pages\/5","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/users\/2"}],"replies":[{"embeddable":true,"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/comments?post=5"}],"version-history":[{"count":110,"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/pages\/5\/revisions"}],"predecessor-version":[{"id":58618,"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/pages\/5\/revisions\/58618"}],"wp:attachment":[{"href":"https:\/\/klaeslab.de\/de\/wp-json\/wp\/v2\/media?parent=5"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}