diff --git a/_cite/.cache/cache.db b/_cite/.cache/cache.db index 83e1a82b..201e828e 100644 Binary files a/_cite/.cache/cache.db and b/_cite/.cache/cache.db differ diff --git a/_config.yaml b/_config.yaml index f9ed785a..9269f6e6 100644 --- a/_config.yaml +++ b/_config.yaml @@ -41,7 +41,7 @@ defaults: values: layout: post - scope: - type: "blog" + type: "opensource" values: layout: post @@ -57,6 +57,8 @@ collections: output: true blog: output: true + opensource: + output: true # jekyll plugins plugins: diff --git a/_data/citations.yaml b/_data/citations.yaml index 98c74d97..df24bc4a 100644 --- a/_data/citations.yaml +++ b/_data/citations.yaml @@ -1022,7 +1022,7 @@ plugin: sources.py file: sources.yaml - id: doi:10.1109/TIV.2024.3467115 - title: Safety-Quantifiable Planar-Feature-based LiDAR Localization with a Prior + title: Safety-Quantifiable Planar-Feature-Based LiDAR Localization With a Prior Map for Intelligent Vehicles in Urban Scenarios authors: - Jiachen Zhang @@ -1030,7 +1030,7 @@ - Weisong Wen - Li-Ta Hsu publisher: IEEE Transactions on Intelligent Vehicles - date: '2024-01-01' + date: '2025-07-01' link: https://doi.org/g8t5zc type: paper image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/2024/Zhang2024SafetyQuantifiable.png @@ -2670,10 +2670,10 @@ title: Online Dynamic Model Calibration for Reliable Control of Quadrotor Based on Factor Graph Optimization authors: - - PEIWEN YANG - - WEISONG WEN - - SHIYU BAI - - JIAHAO HU + - Peiwen Yang + - Weisong Wen + - Shiyu Bai + - Jiahao Hu publisher: IEEE Transactions on Aerospace and Electronic Systems date: '2025-05-01' link: https://doi.org/g9hrnt @@ -2824,3 +2824,191 @@ - urban canyons plugin: sources.py file: sources.yaml +- id: doi:10.1109/TAES.2025.3607718 + title: Unified Sufficient Conditions for Exact Convex Relaxation of Nonconvex Optimal + Control Problems + authors: + - Runqiu Yang + - Weisong Wen + - Peiwen Yang + - Zichen Zhao + - Fengtianyi Huang + publisher: IEEE Transactions on Aerospace and Electronic Systems + date: '2025-09-09' + link: https://doi.org/g93v27 + type: paper + tags: + - Optimal control + - Convex relaxation + - Trajectory planning + - Convex optimization + - Mars landing + plugin: sources.py + file: sources.yaml +- id: arXiv:2509.17198 + title: Certifiably Optimal Doppler Positioning using Opportunistic LEO Satellites + authors: + - Baoshan Song + - Weisong Wen + - Qi Zhang + - Bing Xu + - Li-Ta Hsu + publisher: arXiv + date: '2025-09-21' + link: https://arxiv.org/abs/2509.17198 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2509.17198v1.pdf + tags: + - LEO satellite + - Doppler positioning + - signal of opportunity + - convex optimization + - semidefinite programming + plugin: sources.py + file: sources.yaml +- id: arxiv:2509.21496 + title: 'Wall Inspector: Quadrotor Control in Wall-proximity Through Model Compensation' + authors: + - Peiwen Yang + - Weisong Wen + - Runqiu Yang + - Yingming Chen + - Cheuk Chi Tsang + publisher: arXiv + date: '2025-09-25' + link: https://arxiv.org/abs/2509.21496 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2509.21496v1.pdf + tags: + - null + plugin: sources.py + file: sources.yaml +- id: arxiv:2510.00524 + title: Two stage GNSS outlier detection for factor graph optimization based GNSS-RTK/INS/odometer + fusion + authors: + - Baoshan Song + - Penggao Yan + - Xiao Xia + - Yihan Zhong + - Weisong Wen + - Li-Ta Hsu + publisher: arXiv + date: '2025-10-01' + link: https://arxiv.org/abs/2510.00524 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.00524v1.pdf + tags: + - null + plugin: sources.py + file: sources.yaml +- id: arxiv:2510.04278 + title: 'Integrated Planning and Control on Manifolds: Factor Graph Representation + and Toolkit' + authors: + - Peiwen Yang + - Weisong Wen + - Runqiu Yang + - Yuanyuan Zhang + - Jiahao Hu + - Yingming Chen + - Naigui Xiao + - Jiaqi Zhao + publisher: arXiv + date: '2025-10-05' + link: https://arxiv.org/abs/2510.04278 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.04278v1.pdf + tags: + - null + plugin: sources.py + file: sources.yaml +- id: doi:10.1109/TITS.2025.3616580 + title: Learning Safe, Optimal, and Real-Time Flight Interaction With Deep Confidence-Enhanced + Reachability Guarantee + authors: + - Yuanyuan Zhang + - Yingying Wang + - Penggao Yan + - Weisong Wen + publisher: IEEE Transactions on Intelligent Transportation Systems + date: '2025-10-09' + link: https://doi.org/hbbrm6 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/Learning_Safe_Optimal_and_Real-Time_Flight_Interaction_With_Deep_Confidence-Enhanced_Reachability_Guarantee.pdf + tags: + - Deep reinforcement learning + - deep confidenceenhanced reachability guarantees + - joint planning and control + - unmanned aerial vehicles + plugin: sources.py + file: sources.yaml +- id: arxiv:2510.08880 + title: Online IMU-odometer Calibration using GNSS Measurements for Autonomous Ground + Vehicle Localization + authors: + - Baoshan Song + - Xiao Xia + - Penggao Yan + - Yihan Zhong + - Weisong Wen + - Li-Ta Hsu + publisher: arXiv + date: '2025-10-10' + link: https://arxiv.org/abs/2510.08880 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.08880v1.pdf + plugin: sources.py + file: sources.yaml +- id: arXiv:2512.20224 + title: 'UrbanV2X: A Multisensory Vehicle-Infrastructure Dataset for Cooperative + Navigation in Urban Areas' + authors: + - Qijun Qin + - Ziqi Zhang + - Yihan Zhong + - Feng Huang + - Xikun Liu + - Runzhi Hu + - Hang Chen + - Wei Hu + - Dongzhe Su + - Jun Zhang + - Hoi-Fung Ng + - Weisong Wen + publisher: arXiv + date: '2025-12-23' + link: https://arxiv.org/abs/2512.20224 + type: paper + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2512.20224v1.pdf + - type: source + text: code + link: https://polyu-taslab.github.io/UrbanV2X/ + tags: + - Multisensor Fusion + - Roadside Infrastructure + - SLAM + - Autonomous Driving + plugin: sources.py + file: sources.yaml diff --git a/_data/sources.yaml b/_data/sources.yaml index 83c1f39f..a90c86e4 100644 --- a/_data/sources.yaml +++ b/_data/sources.yaml @@ -2119,3 +2119,108 @@ - Doppler measurement model - geometry distribution - urban canyons + +- id: doi:10.1109/TAES.2025.3607718 + type: paper + date: 2025-09-09 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + # buttons: + # - type: manubot + # text: paper + # link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2404.14724v2.pdf + tags: + - Optimal control + - Convex relaxation + - Trajectory planning + - Convex optimization + - Mars landing + +- id: arXiv:2509.17198 + type: paper + date: 2025-09-21 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2509.17198v1.pdf + tags: + - LEO satellite + - Doppler positioning + - signal of opportunity + - convex optimization + - semidefinite programming + +- id: arxiv:2509.21496 + type: paper + date: 2025-09-25 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2509.21496v1.pdf + tags: + - + +- id: arxiv:2510.00524 + type: paper + date: 2025-10-01 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.00524v1.pdf + tags: + - + + +- id: arxiv:2510.04278 + type: paper + date: 2025-10-05 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.04278v1.pdf + tags: + - + +- id: doi:10.1109/TITS.2025.3616580 + type: paper + date: 2025-10-09 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/Learning_Safe_Optimal_and_Real-Time_Flight_Interaction_With_Deep_Confidence-Enhanced_Reachability_Guarantee.pdf + tags: + - Deep reinforcement learning + - deep confidenceenhanced reachability guarantees + - joint planning and control + - unmanned aerial vehicles + + +- id: arxiv:2510.08880 + type: paper + date: 2025-10-10 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2510.08880v1.pdf + +- id: arXiv:2512.20224 + type: paper + date: 2025-12-23 + # image: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/images/papers/Snipaste_2025-09-07_15-21-25.png + buttons: + - type: manubot + text: paper + link: https://github.com/PolyU-TASLAB/polyu-taslab.github.io/raw/main/research/papers/2512.20224v1.pdf + - type: source + text: code + link: https://polyu-taslab.github.io/UrbanV2X/ + tags: + - Multisensor Fusion + - Roadside Infrastructure + - SLAM + - Autonomous Driving \ No newline at end of file diff --git a/_events/2025-07-05-IROS_Acceptance.md b/_events/2025-07-05-IROS_Acceptance.md index 71d3a28e..bfd0cfe2 100644 --- a/_events/2025-07-05-IROS_Acceptance.md +++ b/_events/2025-07-05-IROS_Acceptance.md @@ -1,5 +1,5 @@ --- -title: Our paper is accpeted by IEEE IROS 2025 +title: Our paper is accepted by IEEE IROS 2025 subtitle: Example news # author: xxx image: images/news/IROS2025_RSG_GLIO.png diff --git a/_events/2025-09-14-Meituan_hosting_MarsTalk_at_PolyU.md b/_events/2025-09-14-Meituan_hosting_MarsTalk_at_PolyU.md new file mode 100644 index 00000000..8c1b4c40 --- /dev/null +++ b/_events/2025-09-14-Meituan_hosting_MarsTalk_at_PolyU.md @@ -0,0 +1,64 @@ +--- +title: Meituan Marstalk Hosts at PolyU:Industry Leaders Discuss Future of Robotics and Intelligent Systems +subtitle: news +# author: Yingming Chen +image: images/news/0914MarsTalk/marstalk.jpg +tags: news +order: +--- +*Hong Kong, September 14th, 2025* – The Hong Kong Polytechnic University (PolyU) successfully hosted the Meituan Marstalk today, bringing together leading experts in robotics, automation, and artificial intelligence to explore technological breakthroughs in intelligent systems for dynamic environments. +The ceremony featured a keynote address by representatives from HKISA, who emphasized the critical role of the new index in promoting trust and scalability in commercial and civic drone applications. The index will provide a measurable framework to evaluate drone performance, maintenance standards, and operational safety—key factors for integration into urban airspace. + +
+ +
+
+ HKISA presenting the DTORI indexes +
+ +### Distinguished Speakers Share Insights on Cutting-Edge Technologies + +The event featured an impressive lineup of speakers from both academia and industry, highlighting the growing collaboration between universities and technology companies in advancing robotics research. + +Dr. Yinian Mao, Vice President of Meituan and Director of Meituan Academy of Robotics Shenzhen, delivered a keynote presentation on the company's latest innovations in autonomous systems. "The integration of robotics and AI in real-world applications is accelerating at an unprecedented pace, particularly in service delivery and urban logistics," Dr. Mao emphasized during his address. + +Prof. Ning Xi, Chair Professor of Robotics and Automation and Head of Department of Data and Systems Engineering at HKU, as well as Director of the Advanced Technologies Institute, provided insights into the academic research underpinning these technological advances. His presentation focused on the convergence of data science and robotic systems in creating more intelligent and adaptive machines. + +
+ +
+
+ Prof. Wen-Hua Chen presenting. +
+ +### Focus on Low Altitude Economy and Drone Technology +Prof. Wen-Hua Chen, Interim Head of Aerospace Engineering at PolyU, Chair Professor of Robotics and Autonomous Systems, and Director of the Research Centre for Low Altitude Economy, discussed the emerging opportunities in the low altitude economy sector. "Hong Kong and the Greater Bay Area are uniquely positioned to lead in the development of low altitude economy applications, from drone delivery to urban air mobility," Prof. Chen noted. + +Dr. Wu Haotian, Senior Director of Meituan and Head of Hardware Platform of Keeta Drone, presented practical applications of drone technology in Meituan's delivery ecosystem. The presentation showcased how autonomous drones are being deployed to navigate complex urban environments and deliver services more efficiently. + +Dr. Wenbo Ding, Associate Professor and Director of the Office of Research at Tsinghua SIGS, rounded out the speaker panel with insights on the research collaboration opportunities between institutions in the Greater Bay Area. + +### Bridging Academia and Industry Through Talent Development +The event went beyond traditional academic presentations by incorporating practical career development opportunities for students and young professionals. Two special sessions were organized to connect talent with industry opportunities: including Express Resume Submission and Fast-track Interview Pass. +
+ +
+
+ Conversation among leading industry participants and academic innovators. +
+ +### TASLAB participating MarsTalk +TASLAB members also helped hosting this event. + +
+ +
+
+ TASLAB group photo. +
+ + diff --git a/_events/2025-09-16-OHKF_RELEASES_TALKING FLIGHT.md b/_events/2025-09-16-OHKF_RELEASES_TALKING FLIGHT.md new file mode 100644 index 00000000..542f09bc --- /dev/null +++ b/_events/2025-09-16-OHKF_RELEASES_TALKING FLIGHT.md @@ -0,0 +1,43 @@ +--- +title: OHKF Releases "Taking Flight — Forging a Future for Hong Kong’s Low-Altitude Economy" Report +subtitle: news +# author: Li Heng +image: images/news/0916TALK/image1.png +tags: news +order: +--- + +## OHKF Releases "Taking Flight — Forging a Future for Hong Kong’s Low-Altitude Economy" Report + +### In-depth Discussion on the Future Development of Hong Kong's Low-Altitude Economy + +This month, Our Hong Kong Foundation (OHKF) released its latest research report entitled "Taking Flight — Forging a Future for Hong Kong’s Low-Altitude Economy." +image1 + +### Team Support and Key Discussion Points + +As a supporting team for this report, we conducted in-depth discussions focusing on the following key issues: + +1. The government support needed by university research teams, such as the Civil Aviation Department's review and support for research drone test flights; +2. The critical breakthroughs required for transforming low-altitude technology research achievements into industrial applications in Hong Kong; +3. How to strengthen collaboration between academia and industry to bring economic benefits to Hong Kong; +4. How Hong Kong can synergize with Mainland China's low-altitude economy industry during the development process. + +### Academic and Industry Exchange + +At the same time, we invited researchers from Our Hong Kong Foundation (OHKF) to visit The Hong Kong Polytechnic University and participate in several academic and industry sharing and discussion sessions. +
+ Low-Altitude Patent Seminar + Mr.Zhu Xiaojun +
+ + +--- + +### Full Report + +For the full report, please visit: [https://www.ourhkfoundation.org.hk/en/media/reports/taking-flight-forging-a-future-for-hong-kongs-low-altitude-economy](https://www.ourhkfoundation.org.hk/en/media/reports/taking-flight-forging-a-future-for-hong-kongs-low-altitude-economy) + +We look forward to contributing further to the development of the low-altitude economy industry. diff --git a/_events/2025-09-29-PolyU_AAE_Conducts_Drone_Test_Flight_in_Sandbox_Regulatory_Project.md b/_events/2025-09-29-PolyU_AAE_Conducts_Drone_Test_Flight_in_Sandbox_Regulatory_Project.md new file mode 100644 index 00000000..cc6c49f2 --- /dev/null +++ b/_events/2025-09-29-PolyU_AAE_Conducts_Drone_Test_Flight_in_Sandbox_Regulatory_Project.md @@ -0,0 +1,53 @@ +--- +title: "PolyU AAE Conducts Drone Test Flight in Sandbox Regulatory Project" +subtitle: "news" +image: images/news/0929CampusFlight/Drone_Flight_2.jpg +tags: news +order: +--- + +*Hong Kong, September 29th, 2025* – The Aeronautics and Aviation Engineering (AAE) department at The Hong Kong Polytechnic University (PolyU) today conducted a drone performance test flight at the Shaw Sports Complex on campus, as part of the sandbox regulatory project for the low altitude economy. This successful event marks a new phase in PolyU AAE's initiative to advance regulatory frameworks and operational standards for unmanned aerial vehicles (UAVs) in urban environments. + +The test flight, carried out in collaboration with the Civil Aviation Department (CAD), evaluated key performance metrics of drones under controlled conditions, focusing on safety, efficiency, and compliance with emerging low-altitude airspace regulations. The sandbox project aims to create a scalable model for integrating drone technology into Hong Kong's transportation and logistics ecosystems. + +
+ +
+
+ Drone performance evaluation at Shaw Sports Complex, PolyU. +
+ +### Sandbox Project Advances Low Altitude Economy Framework + +The sandbox regulatory project, led by PolyU AAE, provides a controlled environment for testing and validating drone operations, contributing to the development of standardized safety protocols and performance benchmarks. Today's test flight demonstrated the practical application of these standards, assessing factors such as flight stability, navigation accuracy, and payload capacity. + +"Today's test flight is a critical step forward in our low altitude economy initiatives," said a representative from the AAE department. "By working closely with regulatory bodies like CAD, we are paving the way for scalable and safe drone integration in Hong Kong and the Greater Bay Area." + +
+ +
+
+ AAE and CAD teams during the sandbox regulatory assessment. +
+ +### Collaborative Efforts for Future Urban Air Mobility + +The event highlighted the growing collaboration between academic institutions and government agencies in shaping the future of urban air mobility. The sandbox project not only focuses on technical performance but also addresses regulatory challenges, such as airspace management and public safety. + +"PolyU's sandbox project serves as a model for how academia and regulators can work together to foster innovation while ensuring safety and compliance," added a CAD official. "We are excited to see these efforts translate into real-world applications." + +### Next Steps for Low Altitude Economy Development + +With the successful completion of this test flight, PolyU AAE plans to expand the sandbox project to include more complex scenarios, such as multi-drone operations and extended-visual-line-of-sight (EVLOS) flights. The department will also continue to engage industry partners and policymakers to drive the adoption of low-altitude economy solutions across the region. + +
+ +
+
+ Discussing future phases of the sandbox regulatory project. +
+ +The sandbox regulatory project aligns with Hong Kong's broader goals to become a hub for technological innovation, particularly in areas such as smart city development and sustainable transportation. PolyU AAE remains at the forefront of these efforts, leveraging its expertise in aeronautics and aviation engineering to contribute to the region's economic and technological advancement. \ No newline at end of file diff --git a/_events/2025-09-30-SourthernPower.md b/_events/2025-09-30-SourthernPower.md new file mode 100644 index 00000000..0348ef35 --- /dev/null +++ b/_events/2025-09-30-SourthernPower.md @@ -0,0 +1,22 @@ +--- +title: TAS LAB Advances Collaboration on Offshore Wind Turbine Inspection with China Southern Power Grid +subtitle: news +# author: XIAO Naigui +image: images/news/0930SouthernPower/image.png +tags: news +order: +--- + +## TAS LAB Advances Collaboration on Offshore Wind Turbine Inspection with China Southern Power Grid + +**Zhuhai, September 30, 2025** – A team led by Dr. Wen Weisong of the Department of Aeronautics and Civil Aviation at The Hong Kong Polytechnic University held a highly successful meeting today with the China Southern Power Grid Southern Offshore Wind Power Joint Development Co., Ltd. The meeting in Zhuhai marked a significant step forward in discussions for a joint laboratory research project. + +
+ +
+ +The collaboration is centered on the "UAV-based Blade and Tower Inspection" project, operating under the Guangdong-Hong Kong Joint Laboratory for Marine Infrastructure. The project aims to develop efficient, drone-based technologies for inspecting offshore wind turbines. The Hong Kong research efforts are led by Principal Investigator Dr. Wen Weisong, in partnership with his Guangdong counterpart, Lin Jinghua of the China Energy Engineering Group Guangdong Electric Power Design Institute Co., Ltd.. This collaboration also aims to jointly publish at least two academic papers, apply for one or more invention patents, and cultivate postgraduate talent. + + + diff --git a/_events/2025-10-10-ZhangyuanyuanTITS.md b/_events/2025-10-10-ZhangyuanyuanTITS.md new file mode 100644 index 00000000..6f409e54 --- /dev/null +++ b/_events/2025-10-10-ZhangyuanyuanTITS.md @@ -0,0 +1,35 @@ +--- +title: Our paper is accepted by IEEE Transactions on Intelligent Transportation Systems +subtitle: news +# author: XIAO Naigui +image: images/news/1010ZYYTITIS/1.png +tags: news +order: +--- + +It is great to share that our paper (“Learning Safe, Optimal, and Real-Time Flight Interaction With Deep Confidence-Enhanced Reachability Guarantee”, by Yuanyuan Zhang, Yingying Wang, Penggao Yan, and Weisong Wen) is accepted by the IEEE Transactions on Intelligent Transportation Systems. Congratulations to Yuanyuan and our collegues. + +
+ +
+ +**Abstract** + +In the low-altitude economy, ensuring the safe and agile flight of unmanned aerial vehicles (UAVs) in dynamic obstacle environments is essential for expanding interactive applications like parcel delivery. While deep reinforcement learning (DRL) shows promise for UAV motion planning and control, its trial-and-error exploration often struggles to ensure both agility and safety, especially under uncertain observational noise. Therefore, this paper proposes a deep confidence-enhanced reachability policy optimization (DCRPO) framework. By integrating safe DRL with nonlinear model predictive control (NMPC), DCRPO achieves high-level safety decisions, complex real-time joint planning and control for UAVs. Furthermore, we develop a deep confidence-enhanced reachability guarantee that constructs a set of stochastically forward-reachable planned trajectories under uncertainty, enabling robust safety collision probability certifications. This safe reachability mechanism adaptively selects belief space actions from planned actions to interact with the environment, further enhancing safety and reducing training time. In extensive experiments of UAVs traversing a fast-moving rectangular gate, the proposed method outperforms other state-of-the-art baseline methods under varying environments in terms of operational robustness. Furthermore, the proposed method significantly reduces overall collision violations and training time, greatly improving both training safety and efficiency. The demonstration video (https://youtu.be/7xkp9U7FSJg) and the source code (https://github.com/ZyyFLY/DCRPO) are also provided. + +
+ +
+
+ System Framework +
+ +
+ +
+
+ Test Evaluation +
\ No newline at end of file diff --git a/_events/2025-10-17-Inner_Mongolia_Research_and_Industry_Exchange_Unmanned_Systems_and_Photovoltaic_Fieldwork.md b/_events/2025-10-17-Inner_Mongolia_Research_and_Industry_Exchange_Unmanned_Systems_and_Photovoltaic_Fieldwork.md new file mode 100644 index 00000000..602ef672 --- /dev/null +++ b/_events/2025-10-17-Inner_Mongolia_Research_and_Industry_Exchange_Unmanned_Systems_and_Photovoltaic_Fieldwork.md @@ -0,0 +1,49 @@ +--- +title: Inner Mongolia Research and Industry Exchange — Unmanned Systems and Photovoltaic Fieldwork +subtitle: news +image: images/news/1017NeiMengGuVisit/image6.jpg +tags: news +order: +--- + +## Inner Mongolia Research and Industry Exchange — Unmanned Systems and Photovoltaic Fieldwork + +### Team Visit to Key Institutions and Enterprises + +In late October, our team members visited several key organizations in Inner Mongolia, including: + +- Ordos Institute of Applied Technology +- Ordos Modern Industry Institute +- Inner Mongolia Huiju High-Tech Co., Ltd. +- Inner Mongolia Kubuqi Desert Photovoltaic Energy Co., Ltd. + +
+ Inner Mongolia Visit + Inner Mongolia Visit +
+ +
+ Inner Mongolia Visit +
+ +### Academic and Industry Collaboration + +During these visits, we held in-depth discussions with university and enterprise partners focusing on the application of unmanned systems in academic research and local industries. + +### On-site Photovoltaic Cleaning and Data Collection + +Additionally, at one of the power stations of Inner Mongolia Kubuqi Desert Photovoltaic Energy Co., Ltd., we conducted practical drone-based photovoltaic cleaning and data collection work. This effort lays a solid foundation for future research on unmanned systems. + +
+ Inner Mongolia Visit + Inner Mongolia Visit +
+ +--- + +We look forward to further advancing unmanned system technologies and strengthening cooperation between academia and industry in Inner Mongolia diff --git a/_events/2025-10-21-Attend_IROS.md b/_events/2025-10-21-Attend_IROS.md new file mode 100644 index 00000000..15717ae3 --- /dev/null +++ b/_events/2025-10-21-Attend_IROS.md @@ -0,0 +1,35 @@ +--- +title: Dr. HUANG Feng and PhD student ZHONG Yihan present their work at IEEE IROS 2025. +subtitle: Example news +# author: xxx +image: images/news/IROS2025/poster_present.jpg +tags: news +order: +--- + +Our lab member Dr. HUANG Feng and PhD student ZHONG Yihan are presenting their work at IEEE IROS 2025. After 19 years, IROS returns to China, coinciding with a pivotal moment in the rapid advancement of AI and robotics—making IROS 2025 an outstanding venue for discussion and networking.. The data of our work is available at [Github](https://github.com/DarrenWong/RSG-GLIO). + + +### Photos +
+ Team Banner +
+ +
+ Team Banner +
+ +
+ Team Banner +
+ +
+ Team Banner +
+ + + diff --git a/_events/2025-10-30-shougang.md b/_events/2025-10-30-shougang.md new file mode 100644 index 00000000..33e2a7fe --- /dev/null +++ b/_events/2025-10-30-shougang.md @@ -0,0 +1,34 @@ +--- +title: Shougang Jinggang Innovation Center Officials Visit Trustworthy AI and Autonomous Systems Lab at Hong Kong Polytechnic University. +subtitle: Example news +# author: xxx +image: images/news/Shougang/shougang1.png +tags: news +order: +--- + +Representatives from the Shougang Jinggang Innovation Center visited The Hong Kong Polytechnic University (PolyU), where they were introduced to the research activities of the Trustworthy AI and Autonomous Systems Laboratory (TAS Lab). + +The delegation from the Shougang Jinggang Innovation Center expressed profound admiration for the pioneering work and cutting-edge innovations underway at the Trustworthy AI and Autonomous Systems Laboratory. They highly commended our dedication to developing safe, reliable, and ethically sound autonomous systems, noting that this mission perfectly aligns with the future direction of technology development. + +The Center's representatives were particularly impressed by the exhibited robots—specifically the humanoid robot, the cleaning drone, and the sophisticated V2X cooperative autonomous driving platform—which they cited as exceptional demonstrations of our team's technical excellence. To further inspire innovation and foster collaboration, the Center has extended a valued invitation for the TAS Lab to display these groundbreaking robotic systems at a dedicated exhibition space within the Shougang Jinggang Innovation Center, anticipating this partnership will be a tremendous opportunity to highlight our research achievements to a broader audience of industry leaders and potential investors. + +### Photos +
+ Team Banner +
+ +
+ Team Banner +
+
+ Team Banner +
+ +
+ Team Banner +
\ No newline at end of file diff --git a/_events/2025-10-31-nanjing_jiangning.md b/_events/2025-10-31-nanjing_jiangning.md new file mode 100644 index 00000000..7ae3d9af --- /dev/null +++ b/_events/2025-10-31-nanjing_jiangning.md @@ -0,0 +1,27 @@ +--- +title: Nanjing Jiangning Economic Development Zone Officials Visit Trustworthy AI and Autonomous Systems Lab at Hong Kong Polytechnic University. +subtitle: Example news +# author: xxx +image: images/news/nanjingjiangning/2.jpg +tags: news +order: +--- + +Officials from the Nanjing Jiangning Economic Development Zone visited The Hong Kong Polytechnic University, where they were introduced to the research activities of the Trustworthy AI and Autonomous Systems Laboratory. The delegation was given an overview of several innovative projects, including cleaning drones, tunnel inspection drones, humanoid robots, and end-to-end autonomous driving systems. + +During the visit, the officials expressed strong appreciation for the laboratory’s work, highlighting the potential applications and technological advancements demonstrated by the projects. The exchange underscored the importance of collaboration in cutting-edge research and its role in driving industrial and technological development. + + +### Photos +
+ Team Banner +
+ +
+ Team Banner +
+ + + diff --git a/_events/2025-11-03-ZhengXi_Phd_defense.md b/_events/2025-11-03-ZhengXi_Phd_defense.md new file mode 100644 index 00000000..ba278928 --- /dev/null +++ b/_events/2025-11-03-ZhengXi_Phd_defense.md @@ -0,0 +1,18 @@ +--- +title: Congratulations to the successfully PhD oral defense of Dr. ZHENG Xi! +# author: Yixin Gao +image: images/news/20251103_Zhengxi/zhengxi_oral_defense.jpg +tags: news +order: +--- + +Congratulations to the successfully PhD oral defense of Dr. ZHENG Xi! + +
+ group photos +
+ + + + diff --git a/_events/2025-11-03-hk_fangwuzhanlan.md b/_events/2025-11-03-hk_fangwuzhanlan.md new file mode 100644 index 00000000..5247f5a5 --- /dev/null +++ b/_events/2025-11-03-hk_fangwuzhanlan.md @@ -0,0 +1,30 @@ +--- +title: PolyU's TAS Lab Showcases Advanced Drone Technology at Chartered Institute of Housing Asian Pacific Branch Dinner +subtitle: +# author: xxx +image: images/news/1103fangwu/fangwu1.jpg +tags: news +order: +--- + +HONG KONG – November 3, 2025 – The TAS Lab from The Hong Kong Polytechnic University (PolyU) presented its cutting-edge unmanned aerial vehicle (UAV) technology at a special exhibition during the annual dinner of The Chartered Institute of Housing (CIH) Asian Pacific Branch. + +The event, attended by key figures and professionals from the housing and property management industry, provided a prime opportunity for the PolyU research team to demonstrate the practical applications of their advanced drone systems. + +The TAS Lab's exhibition featured video presentations and our drone models WALL-E, highlighting capabilities specifically relevant to the housing sector. These included high-precision autonomous navigation for façade cleaning, AI-powered defect detection, and 3D mapping for building maintenance. + +The demonstration sparked significant interest among the attendees, fostering discussions on how this technology could be integrated into existing workflows for building surveys, maintenance planning, and safety compliance. + +### Photos +
+ Team Banner +
+ +
+ Team Banner +
+ + + diff --git a/_events/2025-11-06-sz_chuanghuan.md b/_events/2025-11-06-sz_chuanghuan.md new file mode 100644 index 00000000..607ed5cc --- /dev/null +++ b/_events/2025-11-06-sz_chuanghuan.md @@ -0,0 +1,20 @@ +--- +title: Professor Wen Weisong of PolyU Leads Delegation to Shenzhen Chuanghuan to Discuss Drone Pipeline Inspection Technology +subtitle: +# author: xxx +# image: images/news/1103fangwu/fangwu1.jpg +tags: news +order: +--- + +SHENZHEN, China – November 6, 2025 – A research delegation from The Hong Kong Polytechnic University (PolyU), led by Professor Wen Weisong, visited the offices of [Shenzhen Chuanghuan] today to engage in high-level technical discussions and explore future collaboration. + +The primary focus of the meeting was the application of advanced unmanned aerial vehicle (UAV) technology for internal pipeline exploration and inspection. + +The PolyU team presented its latest research findings and technological breakthroughs in autonomous systems. Key discussion points included navigating drones in GPS-denied, confined spaces, 3D mapping of internal structures, and AI-powered defect detection for pipe maintenance. + +Representatives from Shenzhen Chuanghuan shared their industry expertise and the significant market demand for safer, more efficient inspection solutions for complex urban and industrial pipe networks. + +The two parties held a productive dialogue on bridging the gap between cutting-edge academic research and real-world industrial applications. Both sides identified significant synergies and expressed a strong mutual interest in a future partnership. + +The visit concluded with an agreement to draft a formal plan for future cooperation, potentially including joint research projects, technology trials, and the development of specialized drone platforms tailored for pipeline environments. diff --git a/_events/2025-11-10-hk_heu_visit.md b/_events/2025-11-10-hk_heu_visit.md new file mode 100644 index 00000000..5ca7d88c --- /dev/null +++ b/_events/2025-11-10-hk_heu_visit.md @@ -0,0 +1,55 @@ +--- +title: Harbin Engineering University Vice President YU Zhiwen Visits PolyU’s TAS Lab to Strengthen Research Ties +subtitle: +# author: xxx +image: images/news/1110_heu/heu4.jpg +tags: news +order: +--- + +HONG KONG – November 10, 2025 – The TAS Lab at The Hong Kong Polytechnic University (PolyU) today welcomed a distinguished delegation from Harbin Engineering University (HEU), led by Vice President YU Zhiwen. The visit was organized to demonstrate the lab's latest advancements in autonomous systems and to deepen the research partnership between the two institutions. + +The visit began with a guided tour of the [FJ005 Indoor Flight Arena], the lab's state-of-the-art research and testing facility. + +Following the tour, the HEU delegation received a comprehensive briefing on the lab's key projects, presented by the TAS Lab's core research team: + +Mr. ZHU Fengchi delivered a presentation on the achievements of the Joint Laboratory and the fruitful collaboration between PolyU and HEU. + +
+ Team Banner +
+ +Mr. LIU Xikun introduced the T2 Drone Project, providing a detailed overview and demonstration of the UAV's advanced capabilities. + +
+ Team Banner +
+ +Mr. XIAO Naigui and Mr. HU Jiahao presented the lab's autonomous cleaning drone and gave a technical demonstration of several other specialized UAV projects. + +
+ Team Banner +
+ +Prof. JIANG Yiping and Prof. GAO Zhen showcased the capabilities of the indoor flight arena and presented the lab's wider research outcomes in autonomous navigation and control. + +
+ Team Banner +
+ +The demonstrations also included an introduction to the lab's Unmanned Ground Vehicle (UGV) platforms, illustrating the breadth of the TAS Lab's expertise in autonomous robotics. + +The session fostered a productive and in-depth discussion, with Vice President Yu and the HEU delegation engaging with the researchers on the technical innovations presented. The visit marks another significant step in the ongoing collaboration between the two universities, paving the way for continued innovation in autonomous systems. + +
+ Team Banner +
+ + + + diff --git a/_events/2025-11-11-UBeat_interviewed_Prof_Wen_Weisong.md b/_events/2025-11-11-UBeat_interviewed_Prof_Wen_Weisong.md new file mode 100644 index 00000000..08e4cc7e --- /dev/null +++ b/_events/2025-11-11-UBeat_interviewed_Prof_Wen_Weisong.md @@ -0,0 +1,41 @@ +--- +title: Prof. Weisong Wen Interviewed by UBeat on "Drone-Based Curtain Wall Cleaning" Technology and Prospects +subtitle: +# author: xxx +image: images/news/Ubeat/image1.png +tags: news +order: +--- + +## Professor Weisong Wen interviewed by UBeat on "Drone-Based Curtain Wall Cleaning" technology and prospects + +### Drones for glass curtain wall cleaning: all parties gearing up. + +UBeat recently ran the feature "Drones for glass curtain wall cleaning: all parties gearing up." In the interview, TAS Lab lead and Assistant Professor Weisong Wen outlined the technology roadmap, regulatory compliance, and application outlook for drone-based curtain wall cleaning, showcasing the team’s progress in autonomous localization, control, and operational safety. + +
+ +
+
+ Prof.Wen receiving the interview. +
+### Industry pain points and opportunities + +* Technical challenges: Perception and localization on highly reflective glass; disturbance-rejection control for close-proximity flight; compensation for water-jet reaction forces; enclosure waterproofing and payload reliability. +* Safety and compliance: Advanced operations permissions, exclusion zones and contingency planning, geo-fencing and wind-field assessment—prioritizing “safety first, standards-led.” +* Application value: Reduced high-altitude work risk, improved efficiency, and strong potential for energy savings and carbon reduction. +* Progress and plans: A staged demonstration path—building mapping → localization and pathing → on-site cleaning—while co-developing standard procedures with property managers and regulators. + +
+ +
+
+ PolyU JCIT Tower. +
+ +### Link + +UBeat feature: [https://ubeat.com.cuhk.edu.hk/180\_%E7%84%A1%E4%BA%BA%E6%A9%9F%E6%B4%97%E7%8E%BB%E7%92%83%E5%B9%95%E7%89%86-%E5%90%84%E6%96%B9%E8%93%84%E5%8B%A2%E5%BE%85%E7%99%BC/](https://ubeat.com.cuhk.edu.hk/180_%E7%84%A1%E4%BA%BA%E6%A9%9F%E6%B4%97%E7%8E%BB%E7%92%83%E5%B9%95%E7%89%86-%E5%90%84%E6%96%B9%E8%93%84%E5%8B%A2%E5%BE%85%E7%99%BC/) + diff --git a/_events/2025-11-12-hk_bj_symposium.md b/_events/2025-11-12-hk_bj_symposium.md new file mode 100644 index 00000000..15d137dd --- /dev/null +++ b/_events/2025-11-12-hk_bj_symposium.md @@ -0,0 +1,29 @@ +--- +title: Quadruped Robot Steals Spotlight at 28th Beijing-Hong Kong Economic Cooperation Symposium +subtitle: +# author: xxx +image: images/news/1112HK_BJ_sym/4.jpg +tags: news +order: +--- + +HONG KONG – November 12, 2025 – The 28th Beijing-Hong Kong Economic Cooperation Symposium witnessed a showcase of technological innovation as Dr. Runqiu Yang and Mr. Zhongqi Wang from our research laboratory presented our work on quadruped robot. The robot is designed for inspection, logistics, and search-and-rescue operations, capable of carrying heavy loads for extended periods. The demonstration attracted substantial attention from government officials, industry leaders, and academic professionals. + +The symposium, themed "Beijing-Hong Kong Joining Hands, Connecting the World," brought together over 800 participants from government agencies, international business associations, leading enterprises, and industrial professionals. The symposium's significance was underscored by the presence of key political leaders, with Hong Kong Special Administrative Region Chief Executive John Lee and Beijing Municipal Mayor Yin Yong both delivering addresses at the opening ceremony. The symposium served as a crucial bridge for strengthening ties between the two regions' technological ecosystems. As Hong Kong Special Administrative Region Chief Executive John Lee noted in his opening address, "Beijing possesses profound historical culture and strong technological innovation capabilities, while Hong Kong enjoys the advantages of connecting the mainland with the rest of the world under the 'one country, two systems' framework." + +
+ Team Banner +
+ +
+ Team Banner +
+ +
+ Team Banner +
+ + diff --git a/_events/2025-11-18-ITSC2025-IVmeetsurban.md b/_events/2025-11-18-ITSC2025-IVmeetsurban.md new file mode 100644 index 00000000..3b0a7c8f --- /dev/null +++ b/_events/2025-11-18-ITSC2025-IVmeetsurban.md @@ -0,0 +1,66 @@ +--- +title: 4th IV Meets Urban Workshop a Success at ITSC 2025 +subtitle: Safe And Certifiable Navigation And Control for Intelligent Vehicles In Complex Urban Scenarios +# author: Yixin Gao +image: images/news/ITSC2025/group_photo.jpg +tags: news +order: +--- + +**GOLD COAST, AUSTRALIA – November 18, 2025 –** The **4th Workshop on Intelligent Vehicle Meets Urban: Safe And Certifiable Navigation And Control** was successfully held at the Star Grand, Broadbeach, Gold Coast, Australia, in conjunction with the ITSC 2025 conference. The event convened leading experts and researchers to address the critical challenges of ensuring **safe, robust, and certifiable autonomous navigation** in complex urban environments. + +
+ Workshop main photo with speakers and attendees +
+ +The workshop featured a series of high-impact presentations by renowned experts, whose contributions steered discussions on cutting-edge solutions for urban autonomy: + +* **Prof. Li-Ta Hsu** (The Hong Kong Polytechnic University) +* **Prof. Timothy D Barfoot** (University of Toronto) +* **Prof. Fu Zhang** (The University of Hong Kong) +* **Prof. Yi Zhou** (Hunan University, China) +* **Dr. Mao Shan** (The University of Sydney) +* **Prof. Shreyas Kousik** (Georgia Institute of Technology) + + + + + + + + + + + + + +
+ + + + + +
+ + + + + +
+ +Their talks covered essential topics from high-precision multi-sensor fusion and radar-based navigation to formal safety methods and LiDAR-centric systems for drones. + + +A key highlight of the event was the dynamic poster session. Colleagues from our research group and Zhenxing Ming from University of Sydneey presented their latest findings, contributing significantly to the dialogue on next-generation intelligent vehicles. Their exhibited works covered areas such as robust localization, V2X data fusion, and integrity monitoring for autonomous navigation. + + +The 4th Workshop on Intelligent Vehicle Meets Urban was a resounding success, fostering collaboration and setting new directions for research in safe and certifiable autonomous systems. The organizing committee extends its sincere gratitude to all invited speakers, poster presenters, and attendees for their active participation. + + +For the detailed schedule, invited speaker abstracts, and information on accepted posters and videos, please visit the official workshop page: **[4th Workshop on IV meets Urban](https://sites.google.com/view/ivurban2025itsc)** + + + + diff --git a/_events/2025-12-13-jj.md b/_events/2025-12-13-jj.md new file mode 100644 index 00000000..4dffe8da --- /dev/null +++ b/_events/2025-12-13-jj.md @@ -0,0 +1,39 @@ +--- +title: TAS Team Showcases Innovative Robotics at PolyU Technology Transfer Conference +subtitle: +# author: xxx +image: images/news/1213Jinjiang/1.jpg +tags: news +order: +--- + +**JINJIANG, China** – December 14, 2025 – The TAS Research Team from The Hong Kong Polytechnic University (PolyU) made a strong impression at the inaugural **"Hong Kong Polytechnic University Technology Transfer Conference & Inaugural Annual Exchange Meeting of the Institute for Technological Innovation (2025)."** Held in Jinjiang City, Fujian Province, the event drew over 3,500 participants from government, academia, and industry, focusing on advancing industry-academia-research integration and fostering an innovation ecosystem. + +
+ Team Banner +
+ +
+ Team Banner +
+ +Led by PolyU's commitment to transforming research into real-world applications, the TAS team showcased cutting-edge outputs in robotics and sensing technologies. Highlights included unmanned aerial vehicles (UAVs) for building window cleaning, quadruped robots designed for inspection and logistics, ultra-wideband (UWB) modules for precise positioning, and LiDAR scanners enabling high-resolution 3D environmental modeling. These demonstrations attracted keen interest from investors and professionals, underscoring PolyU's role in driving national innovation strategies. + +
+ Team Banner +
+ +
+ Team Banner +
+ +As PolyU President Professor Teng Jinchao emphasized, the conference marks a milestone in converting laboratory breakthroughs into market-ready products, with TAS contributions exemplifying this vision. + +
+ Team Banner +
diff --git a/_events/2025-12-17-Ruijie_IoTJ.md b/_events/2025-12-17-Ruijie_IoTJ.md new file mode 100644 index 00000000..b809b293 --- /dev/null +++ b/_events/2025-12-17-Ruijie_IoTJ.md @@ -0,0 +1,22 @@ +--- +title: Our paper is accepted by IEEE Internet of Things Journal +subtitle: Example news +# author: xxx +image: images/news/ruijie_rttlio.png +tags: news +order: +--- + +It is great to share that our paper (“RTT-LIO: A Wi-Fi RTT-aided LiDAR-Inertial Odometry via Tightly-Coupled Factor Graph Optimization in Complex Scenes”, by Ruijie Xu, Xikun Liu, Xin Wang, Weisong Wen, and Yulong Huang) is accepted by the IEEE Internet of Things Journal. Congratulations to Ruijie and etc.! + +## Abstract + +The pursuit of reliable and high-precision indoor positioning has become increasingly critical with the widespread deployment of Unmanned Autonomous Systems (UAS) across smart cities. While Wi-Fi Round-Trip-Time (RTT) technology offers promising absolute positioning capabilities, it faces challenges from signal interference and processing delays. Similarly, LiDAR-inertial odometry (LIO) systems provide accurate relative positioning, but suffer from cumulative drift over time. Although existing methods have explored loosely coupled technologies, they process sensor data separately, failing to fully exploit the complementary strengths of different sensors. This research pioneered a tightly-coupled RTT/LIO framework, encompassing novel factor graph formulations that ensure consistency between RTT and LiDAR observations, alongside LiDAR-aided RTT outlier detection and exclusion. Furthermore, we developed an innovative approach to estimate the positions of unknown access points (AP) by using prior trajectory and RTT observations. AP position estimation is based on kernel density estimation (KDE) and geometric diversity constraints (GDC) with the help of an adaptive RANSAC-based fault detection algorithm. Compared to RTT-only implementations, state-of-the-art LIO systems, and conventional loosely coupled approaches, our method demonstrated error reductions of 20-80\% in extensive experiments. The [code, Wi-Fi RTT/LiDAR/IMU dataset](https://github.com/RuijieXu0408/RTT-LIO), and [demo video](https://www.bilibili.com/video/BV1Y94MzYE7F) of our proposed methodology has been made publicly available to display our research. + + +## System Framework + +
+ Team Banner +
diff --git a/_events/2025-12-18-AirBlower_UAV_Demo_at_PolyU.md b/_events/2025-12-18-AirBlower_UAV_Demo_at_PolyU.md new file mode 100644 index 00000000..1e3987d9 --- /dev/null +++ b/_events/2025-12-18-AirBlower_UAV_Demo_at_PolyU.md @@ -0,0 +1,39 @@ +--- +title: T2 Airport Airlower UAV Demonstration at PolyU +subtitle: news +image: images/news/1218/silent2.jpg +tags: news +--- + +## Team Demonstrates Airblower UAV Technology with Civil Aviation Department at PolyU + + + +On December 18th, our team conducted a demonstration of airblower UAV technology in collaboration with the Civil Aviation Department at The Hong Kong Polytechnic University (PolyU). +
+ Team Banner +
+ + + +The demonstration showcased our airblower UAV system's capabilities to representatives from Hong Kong's Civil Aviation Department (CAD), highlighting the technology's potential applications in urban environments and its compliance with aviation safety standards in enclosed areas. + +
+ +
+ + +### Advancing UAV Applications in Hong Kong + +The UAV is the product of this collaboration with the Gammon Construction Ltd., represents an important step in advancing the in field application of our airblower UAV system at the newly built T2 airport. The demonstration provided a practical example of how this technology can be used in real-world scenarios and helped to build regulatory understanding and acceptance of specialized UAV applications in Hong Kong. The demonstration provided valuable insights into the operational parameters and safety considerations of airblower UAV technology. + +Our team remains committed to working closely with aviation authorities to ensure that innovative UAV solutions can be safely integrated into Hong Kong's airspace, contributing to the development of the city's low-altitude economy. + + + + + + diff --git a/_events/2025-12-19-cleaning_uav_sahnghai.md b/_events/2025-12-19-cleaning_uav_sahnghai.md new file mode 100644 index 00000000..d769c767 --- /dev/null +++ b/_events/2025-12-19-cleaning_uav_sahnghai.md @@ -0,0 +1,46 @@ +--- +title: Team Successfully Demonstrates Automated Drone Building Cleaning at Shanghai Pudong Software Park +subtitle: news +image: images/news/1219/image1.jpg +tags: news +--- + +## Team Successfully Demonstrates Automated Drone Building Cleaning at Shanghai Pudong Software Park + + + +Today, our team conducted a field demonstration of automated drone building cleaning at the Shanghai Pudong Software Park in China. + +
+ Team Banner +
+ +
+ Team Banner +
+ + +As a key part of the launch ceremony for the Shanghai Pudong Software Park Low-Altitude Economy Service Platform, our demonstration received significant attention and support from the Shanghai Government, the Pudong New Area Government, and the Aircraft Owners and Pilots Association of China (AOPA-China). + +
+ Team Banner +
+ + +### Industry Breakthrough and Market Potential + +Conducting a demonstration in a city like Shanghai—characterized by its dense skyline and immense demand for building cleaning services—marks a significant breakthrough for our team in the field of drone-based cleaning. + +This milestone not only validates the maturity of our technology but also demonstrates the solution's potential for application in the complex environments of mega-cities. + +--- + +Reference Press Release: https://mp.weixin.qq.com/s/HvCxbZXmLE4ve5UVuRo08g + + + + + diff --git a/_events/2026-01-13-Rinoai_MoU.md b/_events/2026-01-13-Rinoai_MoU.md new file mode 100644 index 00000000..691a412e --- /dev/null +++ b/_events/2026-01-13-Rinoai_MoU.md @@ -0,0 +1,26 @@ +--- +title: PolyU AAE and Rino.ai sign MOU to advance autonomous delivery vehicle applications + +subtitle: news +image: images/news/20260113_RinoaiMoU/MoU.png +tags: news +--- + +## PolyU AAE and Rino.ai sign MOU to advance autonomous delivery vehicle applications + +We have signed an MOU with Rino.ai, a leading L4 autonomous driving company, to co-develop and pilot autonomous vehicle applications on campus. Initial focus areas include last‑mile delivery and security patrols, with solutions tailored to dynamic pedestrian and traffic flows. + +Rino.ai has deployed 2,000+ vehicles in 170+ cities, leads the industry in new‑order volume, and has begun large‑scale deliveries of its Robovan autonomous logistics vehicle. PolyU AAE contributes internationally recognized expertise in multi‑sensor fusion, vehicle‑dynamics optimization, and intelligent transportation, supported by PolyU’s broader strengths in EVs and smart mobility. + +The partnership will enhance perception, decision‑making, and planning for dense campus environments and accelerate real‑world pilots at PolyU. More details can be found in this [website](https://www.rino.ai/news/rino-ai-and-the-hong-kong-polytechnic-university-sign-memorandum.html) + + +
+ Team Banner +
+ + + + + diff --git a/_events/2026-01-16-linxai.md b/_events/2026-01-16-linxai.md new file mode 100644 index 00000000..d5385d0d --- /dev/null +++ b/_events/2026-01-16-linxai.md @@ -0,0 +1,27 @@ +--- +title: TAS Team Visits LINXAI Company to Discuss Quadruped Robot Collaboration Projects +subtitle: +# author: xxx +image: images/news/0116_linxai/1.jpg +tags: news +order: +--- + +**SHENZHEN, China** – January 16, 2026 – The TAS Team embarked on a productive visit to LINXAI Company, a leading innovator in robotics technology. The purpose of the visit was to engage in detailed discussions on ongoing collaboration projects centered around the quadruped robot, while also touring the company's advanced laboratory facilities. This exchange highlights PolyU's dedication to fostering industry-academia partnerships and advancing practical applications in robotics. + +
+ Team Banner +
+ +The visit brought together TAS team members with LINXAI's engineering experts. Discussions focused on four innovative projects aimed at enhancing the capabilities of the quadruped robot: +1. Robot Dog Following: Utilizing Ultra-Wideband (UWB) technology for precise human-following, enabling applications in logistics, security, and personal assistance. +2. Robot Dog Vision-Based Motion Control: Integrating LiDAR sensors and Deep Reinforcement Learning (DRL) to improve terrain adaptability and gait optimization on unstructured surfaces. +3. Guide Dog Application: Developing an intelligent system for visually impaired users, combining localization, vision-language navigation, and locomotion modules for safe mobility assistance. +4. UAV-Robot Dog Landing: Creating an air-ground collaborative logistics system for seamless package transfer between UAVs and the quadruped robot, addressing last-mile delivery challenges. +These projects demonstrate the platform's potential in diverse fields, ranging from disaster response and industrial inspection to assistive technologies and smart logistics. + +
+ Team Banner +
diff --git a/_events/2026-01-18-TasFusion.md b/_events/2026-01-18-TasFusion.md new file mode 100644 index 00000000..a4b1a002 --- /dev/null +++ b/_events/2026-01-18-TasFusion.md @@ -0,0 +1,47 @@ +--- +title: PolyU TAS LAB Releases TasFusion - A GNSS/IMU Sliding-Window Optimization Framework +subtitle: news +image: images/opensource/TasFusion/longdata.png +tags: news +--- + +## PolyU TAS LAB Releases TasFusion: A GNSS/IMU Sliding-Window Optimization Framework + +The PolyU Trustworthy AI and Autonomous Systems Laboratory (TAS LAB) has officially released TasFusion, an open-source ROS1 framework for multi-sensor navigation and state estimation. + +TasFusion provides a Ceres-based GNSS/IMU loosely coupled sliding-window optimization framework, designed for research and experimental validation in outdoor navigation scenarios. The system supports IMU pre-integration, online bias estimation, marginalization to preserve historical information, and GNSS position and velocity constraints. All major functions are configurable through ROS launch parameters, enabling flexible deployment and ablation studies. + +The framework is accompanied by a complete toolchain, including GNSS message definitions, NLOS exclusion utilities, NovAtel receiver drivers, and NMEA parsing scripts. TasFusion has been validated on a GNSS-IMU-4G integrated navigation module (dual-IMU, u-blox F9P-04B, and 4G link), demonstrating reliable performance with high-frequency measurements and stable telemetry in real-world environments. + +TasFusion was developed in the context of the AAE4203 course at The Hong Kong Polytechnic University and is further supported by the Research Center for Autonomous System in Smart Transportation, PolyU-Wuxi Technology and Innovation Research Institute, reflecting close integration between education, research, and applied engineering. + +The project is now publicly available on GitHub and is intended to support research in navigation, sensor fusion, autonomous systems, and intelligent transportation applications. + +🔗 GitHub Repository: +https://github.com/PolyU-TASLAB/TasFusion + + + + + + + + +
+ + + + + +
+ +> Reference Hardware Platform ([Introduction Video](https://www.bilibili.com/video/BV1fiaqzNEEm)): +> +> TasFusion has been validated on GNSS-IMU-4G integrated navigation module (dual-IMU + u-blox F9P-04B + 4G uplink), providing high-frequency measurements and reliable telemetry for outdoor deployments. +> +> For inquiries regarding this hardware platform, please contact **hbwu@hkpolyu-wxresearch.cn**. + + + + + diff --git a/_events/2026-01-20-simpleai.md b/_events/2026-01-20-simpleai.md new file mode 100644 index 00000000..679de051 --- /dev/null +++ b/_events/2026-01-20-simpleai.md @@ -0,0 +1,36 @@ +--- +title: PolyU and Simple AI Launch Strategic Collaboration +subtitle: +# author: xxx +image: images/news/0119_simpleai/1.png +tags: news +order: +--- + +**HONG KONG, China** – January 19, 2026 – The Hong Kong Polytechnic University and Beijing Simple AI Technology Co., Ltd. (Simple AI) officially launched their strategic partnership through a Memorandum of Understanding (MOU) signing ceremony. Held at PolyU's Chiang Chen Studio Theatre (AG204), the event brought together key representatives from both institutions to exchange the agreement and discuss future collaborations in embodied intelligent robotics. This partnership underscores PolyU's commitment to advancing industry-academia synergies and driving innovation in AI-driven technologies for real-world applications. + +
+ Team Banner +
+ +The ceremony was hosted by Prof. Weisong WEN from TAS Lab. Attendees from PolyU included Ir Prof. H.C. Man, Dean of the Faculty of Engineering, Prof. Crystal Shi from the School of Hotel and Tourism Management, and Dr. Runqiu Yang from TAS Lab. Representing Simple AI were Founder and CEO Dr. Xiaofei Li, Vice President Yutang Tang, and Marketing Director Hui Wang. + +The MOU outlines a framework for joint efforts in several key areas of embodied intelligent robotics: + +1. **Joint Research and Development**: Focusing on the design and optimization of robot "brains" using AI-driven control systems, including End-to-End Learning, Vision-Language-Navigation (VLN), and Vision-Language-Action (VLA) models for robust perception, real-time reasoning, and precise execution in dynamic environments. +2. **Closed-Loop Systems Development**: Building systems that integrate behavioral AI models with real-world data from simulated and actual scenarios, addressing challenges like data scarcity in unstructured settings such as hotels, nursing homes, and homes. +3. **Integration of TAS Lab Expertise**: Leveraging PolyU's Trustworthy AI and Autonomous Systems Laboratory (TAS Lab) for high-precision positioning algorithms and multi-sensor fusion to ensure safety in autonomous operations, providing fail-safe checks against AI-driven perception in complex indoor environments. +4. **Phased Proof-of-Concept Pilots and Industrialization**: Executing pilots in sectors like hotel services (utilizing Simple AI's partnerships with major hotel groups and PolyU's leadership in Hospitality & Tourism Management), elderly care, and family environments, enhancing robots' generalization, long-horizon task planning, emotional intelligence, adaptive learning, and self-evolutionary capabilities. + +These initiatives aim to propel advancements in assistive robotics, fostering solutions for societal needs in hospitality, elderly care, and beyond. + +
+ Team Banner +
+ +
+ Team Banner +
diff --git a/_events/2026-01-28-Rino-AI-SFExpress-Visit.md b/_events/2026-01-28-Rino-AI-SFExpress-Visit.md new file mode 100644 index 00000000..52fbc2d1 --- /dev/null +++ b/_events/2026-01-28-Rino-AI-SFExpress-Visit.md @@ -0,0 +1,31 @@ +--- +title: TAS Team Meets with SF Express(Hong Kong) and Rino AI to Discuss Smart Logistics and Autonomous Campus Delivery Collaboration +subtitle: +# author: xxx +image: images/news/260128/260128.jpeg +tags: news +order: +--- + +**HONG KONG, China** – January 29, 2026 – The Hong Kong Polytechnic University's Trustworthy AI and Autonomous Systems Laboratory (TAS LAB) held a strategic cooperation meeting with SF Express Hong Kong and Rino AI to explore collaborative opportunities in smart logistics and autonomous campus delivery vehicles . This meeting underscores PolyU's commitment to advancing industry-academia partnerships and developing practical solutions for next-generation logistics systems that address real-world challenges in urban delivery and campus mobility. + +
+ Team Banner +
+ +The meeting brought together key representatives from TAS LAB, SF Express(Hong Kong), and Rino AI to discuss innovative approaches to intelligent logistics and autonomous delivery technologies. The discussions centered on leveraging cutting-edge AI, robotics, sensor fusion, and IoT solutions to address modern logistics challenges, particularly in campus and urban environments where safety, efficiency, and sustainability are paramount concerns. + +Attendees from PolyU included Prof. Weisong WEN from TAS Lab and research team members. Representing SF Express Hong Kong were senior executives from the logistics operations and technology innovation divisions, while Rino AI was represented by their leadership team specializing in artificial intelligence solutions and autonomous systems development. + +The three-party collaboration will focus on several key technological domains and application scenarios: + +1. **Smart Logistics Systems**: Developing comprehensive AI-powered solutions for intelligent logistics management, including dynamic route optimization algorithms, real-time package tracking systems, predictive demand forecasting, and intelligent warehouse management. The collaboration aims to integrate machine learning models with SF Express(Hong Kong)'s extensive operational data to create adaptive logistics systems that can respond to changing conditions in real-time. This includes developing advanced algorithms for fleet management, delivery scheduling optimization, and resource allocation that can significantly enhance operational efficiency while reducing energy consumption and environmental impact. The system will leverage big data analytics and cloud computing infrastructure to process vast amounts of logistics data, enabling predictive maintenance, demand forecasting, and intelligent decision-making across the entire supply chain. + +2. **Autonomous Campus Delivery Vehicles**: Designing and implementing intelligent unmanned delivery vehicles specifically tailored for university campus environments. The project will address unique challenges associated with campus delivery, including pedestrian detection and avoidance, navigation in mixed-use environments with students and faculty, compliance with campus safety regulations, and integration with existing campus infrastructure. The autonomous vehicles will leverage advanced perception systems, including LiDAR, cameras, and radar sensors, combined with sophisticated path planning algorithms to ensure safe and efficient delivery operations. Special attention will be given to human-robot interaction design to ensure the vehicles can operate seamlessly in crowded campus settings, with features such as audio-visual alerts, intuitive gesture recognition, and emergency stop mechanisms. The vehicles will be designed to handle various weather conditions, navigate complex terrain including stairs and ramps, and operate efficiently during peak hours when campus foot traffic is highest. + +3. **Technology Integration and System Architecture**: Combining PolyU TAS LAB's cutting-edge research expertise in autonomous systems, localization, and navigation with SF Express(Hong Kong)'s extensive logistics experience and operational insights, alongside Rino AI's artificial intelligence capabilities and machine learning infrastructure. The collaboration will focus on developing a comprehensive technology stack that integrates perception, planning, and control systems with logistics management platforms. This includes creating robust communication protocols between autonomous vehicles and central dispatch systems, implementing edge computing solutions for real-time decision-making, and developing fail-safe mechanisms to ensure system reliability. The system architecture will incorporate multi-layer redundancy, cybersecurity measures to protect against potential threats, and scalable cloud infrastructure to support future expansion. Advanced sensor fusion techniques will combine data from multiple sources to create accurate environmental models, while deep learning algorithms will enable the vehicles to learn from experience and continuously improve their performance. + +4. **Pilot Programs and Field Testing**: Exploring opportunities to establish comprehensive testbed environments at PolyU campus for real-world validation and demonstration of autonomous delivery technologies. The pilot programs will serve as living laboratories where researchers can collect operational data, test new algorithms, and refine system performance under actual operating conditions. The testbed will enable iterative development and validation of technologies before broader deployment, allowing the team to address challenges related to weather conditions, varying terrain, obstacle avoidance, and user acceptance. Data collected from these pilot programs will inform future system improvements and provide valuable insights for scaling the technology to other campuses and urban environments. The pilot phase will include controlled experiments to test specific capabilities, followed by gradual expansion to full operational deployment. User feedback mechanisms will be integrated to gather insights from students, faculty, and staff about their experiences with the autonomous delivery system, helping to refine user interfaces and improve overall service quality. + +5. **Safety and Regulatory Compliance**: Developing comprehensive safety protocols and working toward compliance with local regulations governing autonomous vehicle operations in Hong Kong. This includes establishing safety standards for autonomous campus delivery, conducting thorough risk assessments, implementing redundant safety systems with multiple fail-safe mechanisms, and collaborating with regulatory bodies to ensure that autonomous delivery vehicles meet all necessary requirements for operation in public spaces. The collaboration will work closely with university safety departments, local transportation authorities, and industry standards organizations to develop best practices for autonomous delivery operations. Safety features will include emergency braking systems, collision avoidance algorithms, remote monitoring and intervention capabilities, and comprehensive logging of all operational data for incident investigation and continuous improvement. diff --git a/_events/2026-01-30-Sandbox_Demonstration_at_Pak_Shak_Kok.md b/_events/2026-01-30-Sandbox_Demonstration_at_Pak_Shak_Kok.md new file mode 100644 index 00000000..20c0912c --- /dev/null +++ b/_events/2026-01-30-Sandbox_Demonstration_at_Pak_Shak_Kok.md @@ -0,0 +1,52 @@ +--- +title: Team Successfully Conducts Our First Cross-Sea Logistics Flight for Hong Kong Low-altitude Economy Regulatory Sandbox +subtitle: news +image: images/news/0130Sandbox/pic2.jpg +tags: + - news + - low-altitude economy + - logistics +--- + +
+ Team Banner +
+ +## Team Successfully Conducts Our First Cross-Sea Logistics Flight for Hong Kong Low-altitude Economy Regulatory Sandbox + +On January 30, 2026, our team marked a significant milestone by successfully completing our first cross-sea logistics test flight for Phase 1 of the **Hong Kong Low-altitude Economy Regulatory Sandbox**. + +
+ Team Banner +
+ + +
+ Team Banner +
+ + +This critical test flight was executed with the strong support and on-site witnessing of the Civil Aviation Department (CAD) of Hong Kong and our collaborative partners. The operation successfully validated the stability and reliability of our logistics solution within the sandbox's framework. + +
+ Team Banner +
+ +### Valuable Experience and Future Outlook + +We would like to extend our sincere gratitude to the Hong Kong CAD and our partners for their unwavering support of our research and operational efforts. + +This successful flight has allowed us to accumulate valuable flight data and operational experience specific to cross-sea logistics. We look forward to continuing our work and contributing further to the robust development of Hong Kong's low-altitude economy. + +### Acknowledgements + +We extend our sincere appreciation to our partners for their support of our research (listed in no particular order): + +* [Hong Kong Applied Science and Technology Research Institute Company Limited](https://www.astri.org/) +* [SUPTC Digital Technology (Hong Kong) Limited](http://www.sutpc.com/) +* [Leisure and Cultural Services Department](https://www.lcsd.gov.hk/) +* [Hong Kong Broadband Network Enterprise Solutions](https://www.hkbn.net/enterprise/) diff --git a/_events/2026-02-02-ICRA2026_Acceptance.md b/_events/2026-02-02-ICRA2026_Acceptance.md new file mode 100644 index 00000000..bee86e9b --- /dev/null +++ b/_events/2026-02-02-ICRA2026_Acceptance.md @@ -0,0 +1,22 @@ +--- +title: Our paper is accepted by IEEE ICRA 2026 +subtitle: Example news +# author: xxx +image: images/news/2026ICRA/system_framework.png +tags: news +order: +--- + +It is great to share that our paper (“Integrated Planning and Control on Manifolds: Factor Graph Representation and Toolkit” by Peiwen Yang, Weisong Wen, Runqiu Yang, Yuanyuan Zhang, Jiahao Hu, Yingming Chen, Naigui Xiao and Jiaqi Zhao) is accepted by the 2026 IEEE International Conference on Robotics & Automation. Congratulations to Peiwen and etc.! + +## Abstract + +Model predictive control (MPC) faces significant limitations when applied to systems evolving on nonlinear manifolds, such as robotic attitude dynamics and constrained motion planning, where traditional Euclidean formulations struggle with singularities, over-parameterization, and poor convergence. To overcome these challenges, this paper introduces FactorMPC, a factor-graph based MPC toolkit that unifies system dynamics, constraints, and objectives into a modular, user-friendly, and efficient optimization structure. Our approach natively supports manifold-valued states with Gaussian uncertainties modeled in tangent spaces. By exploiting the sparsity and probabilistic structure of factor graphs, the toolkit achieves real-time performance even for high-dimensional systems with complex constraints. The velocity-extended on-manifold control barrier function (CBF)-based obstacle avoidance factors are designed for safety-critical applications. By bridging graphical models with safety-critical MPC, our work offers a scalable and geometrically consistent framework for integrated planning and control. The simulations and experimental results on the quadrotor demonstrate superior trajectory tracking and obstacle avoidance performance compared to baseline methods. To foster research reproducibility, we have provided open-source implementation offering plug-and-play factors. Code and supplementary materials available at: https://github.com/RoboticsPolyu/FactorMPC. + +## System Framework + +
+ Team Banner +
+ diff --git a/_events/2026-02-02-ICRA2026_workshop.md b/_events/2026-02-02-ICRA2026_workshop.md new file mode 100644 index 00000000..c75bb155 --- /dev/null +++ b/_events/2026-02-02-ICRA2026_workshop.md @@ -0,0 +1,25 @@ +--- +title: Our workshop is accepted by IEEE ICRA 2026 +subtitle: Example news +# author: xxx +image: images/news/2026ICRA/Poster_WS_RobotMeetsGNSSRanging.png +tags: news +order: +--- + +We’re organizing the 1st Workshop on Robot Meets GNSS and Ranging for Seamless Autonomy, happening on Friday, June 5, 2026 in Vienna. +If you work on reliable autonomy in the real world—especially where GNSS/UWB/ranging gets messy—this workshop is for you. You can find more details from the [workshop page](https://robotmeetsranging.tech/) + + +
+ +
+
+ +
+ +Recognizing shared challenges in sensor integration, error modeling, integrity monitoring, and certifiable optimization of ranging observations from diverse systems (e.g., GNSS, Ultra-Wideband), this event emphasizes the critical role of ranging technologies in resilient robot navigation. It aims to identify open problems and promising research directions across domains, bringing together researchers from diverse communities to exchange knowledge and foster collaboration. + +TAS Lab remains at the forefront of research dedicated to creating safer, more efficient, and intelligent robotics solutions for the future. + diff --git a/_events/2026-02-04-CFSO_WuxiRI.md b/_events/2026-02-04-CFSO_WuxiRI.md new file mode 100644 index 00000000..5e78c6ba --- /dev/null +++ b/_events/2026-02-04-CFSO_WuxiRI.md @@ -0,0 +1,50 @@ +--- +title: PolyU CFSO Delegation Visits Wuxi Research Institute and Observes Smart Drone Cleaning Demo in Shanghai +subtitle: news +image: images/news/20260204_CFSO_WuxiRI/1.jpg +tags: news +--- + +## PolyU CFSO Delegation Visits Wuxi Research Institute and Observes Smart Drone Cleaning Demo in Shanghai + +Today, a delegation from the **Campus Facilities and Sustainability Office (CFSO)** of The Hong Kong Polytechnic University (PolyU), including **Mr. Wong** and **Mr. Cheung**, accompanied by **Dr. Wang ** from the PolyU Wenzhou Research Institute, visited the **PolyU Wuxi Technology and Innovation Research Institute** and the **Shanghai Sanlin Low-altitude Economy Industrial Park** for technical exchange and guidance. + +### Insight into Advanced Sensing and Inspection Technology + +At the Wuxi Technology and Innovation Research Institute, our team introduced the center's latest developments and core research areas to the delegation. + +The visit included a demonstration of our **handheld data collection and mapping devices, as well as indoor positioning devices**, showcasing how rapid environmental digitization supports smart facility management. + +
+ Team Banner +
+ +Subsequently, the delegation proceeded to the **indoor flight testing field** to witness a live demonstration of our autonomous inspection drones. + +These drones are equipped with advanced sensors and navigation systems, allowing them to: +* Fly autonomously in complex environments (such as tunnels or indoor structures). +* Perceive surroundings and avoid obstacles in real-time. +* Utilize **AI algorithms** to automatically analyze data and identify potential structural defects. + + +
+ Team Banner +
+ +### Witnessing the Future of Building Maintenance in Shanghai + +Following the visit to Wuxi, the delegation traveled to the **Shanghai Sanlin Low-altitude Economy Industrial Park**. Here, they observed a field demonstration of our **Smart Building Cleaning Drone**. + +The demonstration highlighted the drone's stability and efficiency in high-rise façade maintenance, presenting a safer and more automated alternative to traditional cleaning methods. The CFSO delegation expressed strong interest in how these low-altitude economy solutions could be applied to smart campus management and sustainability efforts in the future. + + +
+ Team Banner +
+ +Reference Press Release: [https://mp.weixin.qq.com/s/HvCxbZXmLE4ve5UVuRo08g](https://www.bilibili.com/video/BV12kcgztEgQ/?spm_id_from=333.1387.homepage.video_card.click) + +--- diff --git a/_includes/card.html b/_includes/card.html index e944ba89..0f053f55 100644 --- a/_includes/card.html +++ b/_includes/card.html @@ -1,21 +1,23 @@ {{ " " }} -
+
{{ include.title | default: -
+
{% if include.title %} {{ include.title }} - + {% endif %} {% if include.subtitle %} - {{ include.subtitle }} + {{ include.subtitle }} {% endif %} {% if include.description %} -

- {{ include.description | markdownify | remove: "

" | remove: "

" }} +

+ {{ include.description | markdownify | strip_html | truncatewords: 35, '...' }}

{% endif %} diff --git a/_includes/list.html b/_includes/list.html index 5dec46ed..e726227d 100644 --- a/_includes/list.html +++ b/_includes/list.html @@ -15,7 +15,7 @@ {% assign data = year.items %} {% if years.size > 1 %} - {{--}}

{{ year.name }}

+ {{--}}

{{ year.name }} ({{ year.items.size }})

{% assign data = data | sort: "date" | reverse %} {% endif %} diff --git a/_includes/portrait_pi.html b/_includes/portrait_pi.html index 06916b93..a7670eda 100644 --- a/_includes/portrait_pi.html +++ b/_includes/portrait_pi.html @@ -19,14 +19,15 @@ aria-label="{{ member.name | default: "member link" }}" > {% if type %} - {% include icon.html icon=type.icon %} + + {% include icon.html icon=type.icon %} + {% endif %} member portrait @@ -38,20 +39,26 @@ {% if member.display_1 %} - {{member.display_1}} + {{ member.display_1 }} {% endif %} {% if member.display_2 %} - {{member.display_2}} + {{ member.display_2 }} {% endif %} {% if member.display_3 %} - - {{member.display_3}} + + {{ member.display_3 }} {% endif %} + + {% if member.description %} +
+ {{ member.description | markdownify | strip_html }} +
+ {% endif %} -
\ No newline at end of file +
diff --git a/_includes/portrait_students.html b/_includes/portrait_students.html index 06916b93..ebc11960 100644 --- a/_includes/portrait_students.html +++ b/_includes/portrait_students.html @@ -10,48 +10,85 @@ {% assign type = site.data.types[member.role] %}
- - {% if type %} - {% include icon.html icon=type.icon %} - {% endif %} - - member portrait + + member portrait - {% if member.name %} - - {{ member.name }} - - {% endif %} + {% if member.name %} + + {{ member.name }} + + {% endif %} - {% if member.display_1 %} - - {{member.display_1}} - - {% endif %} + {% if member.display_1 %} + + {{ member.display_1 }} + + {% endif %} - {% if member.display_2 %} - - {{member.display_2}} - + {% if member.display_2 %} + + {{ member.display_2 }} + + {% endif %} + + + {% assign has_links = false %} + {% if member.links.home-page and member.links.home-page != "" and member.links.home-page != "/" %} + {% assign has_links = true %} + {% endif %} + {% if member.links.email and member.links.email != "" %} + {% assign has_links = true %} + {% endif %} + {% if member.links.google-scholar and member.links.google-scholar != "" and member.links.google-scholar != "/" %} + {% assign has_links = true %} + {% endif %} + {% if member.links.github and member.links.github != "" and member.links.github != "/" %} + {% assign has_links = true %} {% endif %} - {% if member.display_3 %} - - {{member.display_3}} - + {% if has_links %} + {% endif %} - -
\ No newline at end of file +
+
diff --git a/_members/Ai_Kedai.md b/_members/Ai_Kedai.md new file mode 100644 index 00000000..1e404d9a --- /dev/null +++ b/_members/Ai_Kedai.md @@ -0,0 +1,24 @@ +--- +name: Akida Tursun +image: images/team/Akida.jpg +role: ra # pi / postdoc / phd / ms / under / ra / visiting +affiliation: PolyU-Wuxi Technology and innovation Research Institute +order: 11 + +links: + orcid: + email: Akida@hkpolyu-wxresearch.cn + profile: + +display_1: + - B.Eng.(Jiangnan University) +display_2: + - June 2024 - Present +--- + + + + +Akida received a Bachelor's degree in Management from Jiangnan University, with major courses including Principles of Management, Marketing, Consumer Behavior, Human Resource Management, etc. Currently, she works in the the Hong Kong Polytechnic University-Wuxi Research Institute, handling administrative and sales-related responsibilities. + + diff --git a/_members/Bai_Lu.md b/_members/Bai_Lu.md index b5a0e602..116c3287 100644 --- a/_members/Bai_Lu.md +++ b/_members/Bai_Lu.md @@ -1,7 +1,7 @@ --- name: Bai Lu image: images/team/bai_lu.jpg -role: postdoc # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Hong Kong Polytechnic University order: 5 @@ -10,16 +10,14 @@ links: email: lubai@buaa.edu.cn profile: -display_1: - - postdoc, Ph.D.(BUAA), M.Eng.(BUAA), B.Eng.(BUAA) -display_2: - - Feb 2025 - Present +display_1: Ph.D.(BUAA), M.Eng.(BUAA), B.Eng.(BUAA) +display_2: Postdoc (Feb 2025) --- -Lu Bai (Member, IEEE) received the BEng degree and MEng in Electronic and Information Engineering from Beihang University, Beijing, China in 2014 and 2017, respectively. After that, she received the Ph.D. degree in communication and information systems from Beihang University, Beijing, China, in 2024. +Lu Bai (Member, IEEE) received the BEng degree and MEng in Electronic and Information Engineering from Beihang University, Beijing, China in 2014 and 2017, respectively. After that, she received the Ph.D. degree in communication and information systems from Beihang University, Beijing, China, in 2024. She was a postdoctoral fellow at the Hong Kong Polytechnic University. Her research focuses on GNSS-5G hybrid positioning methods, GNSS spoofing detection and interference mitigation techniques. **Research Areas** diff --git a/_members/Chen_Hongchang.md b/_members/Chen_Hongchang.md new file mode 100644 index 00000000..c0dd6130 --- /dev/null +++ b/_members/Chen_Hongchang.md @@ -0,0 +1,28 @@ +--- +name: Chen Hongchang +image: images/team/hongchang.jpg +role: phd +affiliation: Hong Kong Polytechnic University +order: 1 + +links: + home-page: + orcid: 0009-0007-8094-5926 + google-scholar: + github: + email: hongchang.chen@connect.polyu.hk + profile: + +display_1: Ph.D.(PolyU), M.Eng.(BIT), B.Eng. (HNU) +display_2: +--- + + + + +Hongchang Chen received his M.S. degree from school of Mechanical Engineering at Beijing Institute of Technology, Beijing, China, in 2025. He is currently pursuing his Ph.D. degree at The Hong Kong Polytechnic University (PolyU). His current research interests include Robotics and Computer Vision. + + + +**Research Areas** +Autonomous Driving; Robotics; Computer Vision diff --git a/_members/FenchiZHU_HEU_visiting.md b/_members/FenchiZHU_HEU_visiting.md new file mode 100644 index 00000000..f6158eaf --- /dev/null +++ b/_members/FenchiZHU_HEU_visiting.md @@ -0,0 +1,27 @@ +--- +name: Fengchi ZHU +image: images/team/fengchiZHU.jpg +role: visiting # pi / postdoc / phd / ms / under / ra / visiting +affiliation: Harbin Engineering University +order: 1 + +links: + home-page: N/A + orcid: 0000-0002-1572-7769 + google-scholar: BWgKaxcAAAAJ&hl=zh-CN + github: N/A + email: zfchiggins@163.com + profile: + +display_1: M.S and B.Eng. (HEU) +display_2: Oct 2025 + +--- + + + + +Fengchi Zhu received the B.S. degree in Automation from the College of Intelligent Systems Science and Engineering, Harbin Engineering University, in 2021, where he is currently working toward the Ph.D degree in control science and engineering. From Oct. 2025 to Mar. 2026, he is a visiting graduate researcher at the Department of Aeronautical and Aviation Engineering, Faculty of Engineering, The Hong Kong Polytechnic University. He won the Best Student Paper Award in 2023 IEEE International Conference on Mechatronics and Automation. His current research interests include state estimation, integrated navigation and cooperative navigation. + +**Research Areas** +State estimation, Multi-agent Systems, Adaptive Kalman Filter \ No newline at end of file diff --git a/_members/GuangyanGuo_HEU_visiting.md b/_members/GuangyanGuo_HEU_visiting.md new file mode 100644 index 00000000..da555152 --- /dev/null +++ b/_members/GuangyanGuo_HEU_visiting.md @@ -0,0 +1,27 @@ +--- +name: Guangyan Guo +image: images/team/guangyanGuo.jpg +role: visiting # pi / postdoc / phd / ms / under / ra / visiting +affiliation: Harbin Engineering University +order: 2 + +links: + home-page: N/A + orcid: N/A + google-scholar: N/A + github: N/A + email: guoguangyan@hrbeu.edu.cn + profile: + +display_1: B.Eng. (HEU) +display_2: Dec 2025 + +--- + + + + +Guangyan Guo received the B.S. degree in Automation from the College of Intelligent Systems Science and Engineering, Harbin Engineering University, in 2021. He is currently pursuing the Ph.D. degree in control science and engineering at the same university. From December 2025 to May 2026, he is a visiting graduate researcher at the Department of Aeronautical and Aviation Engineering, Faculty of Engineering, The Hong Kong Polytechnic University, Hong Kong. He won the Best Student Paper Award at the 2025 Chinese Automation Congress. His current research interests include visual SLAM, visual scene reconstruction, and the computer simulation of physical fields. + +**Research Areas** +visual SLAM,Visual scene reconstruction, Computer simulation \ No newline at end of file diff --git a/_members/LixiangShi_Tongji_visiting.md b/_members/LixiangShi_Tongji_visiting.md index 491b88ac..e84c491a 100644 --- a/_members/LixiangShi_Tongji_visiting.md +++ b/_members/LixiangShi_Tongji_visiting.md @@ -1,20 +1,20 @@ --- name: Yuxiang Shi image: images/team/Yuxiang_SHI_tongji.jpg -role: visiting # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Tongji University -order: 2 +order: 9 links: - home-page: N/A - orcid: N/A - google-scholar: N/A - github: N/A + home-page: + orcid: + google-scholar: + github: email: 2011200@tongji.edu.cn profile: -display_1: Phd student (Tongji) and B.Eng. (Southwest Jiaotong University) -display_2: To be join in fall 2025 +display_1: Ph.D. Student (Tongji), B.Eng. (SWJTU) +display_2: Visiting (2025) --- diff --git a/_members/Qijun.md b/_members/Qijun.md index a2dc61ac..a6510158 100644 --- a/_members/Qijun.md +++ b/_members/Qijun.md @@ -1,20 +1,16 @@ --- -name: QIN Qijun +name: Qin Qijun image: images/team/qijun.jpg -role: ms # pi / postdoc / phd / ms / under / ra / visiting +role: ms # pi / postdoc / phd / ms / under / ra / visiting affiliation: Hong Kong Polytechnic University order: 2 links: github: https://github.com/QuintinUmi email: qijun.qin@connect.polyu.hk - -display_1: BEng (HONS) Aviation Engineering (2021-2025) -display_2: - - Merit Award, Best URIS Research Project 2024 - - Attended PRSC 2024 - - Dean's Honours List, PolyU (2022-2023) - - To join Mphil in fall 2025 + +display_1: MPhil Student (PolyU), B.Eng. (PolyU) +display_2: Fall 2025 --- diff --git a/_members/WANG_Zhongqi.md b/_members/WANG_Zhongqi.md new file mode 100644 index 00000000..b469adc0 --- /dev/null +++ b/_members/WANG_Zhongqi.md @@ -0,0 +1,24 @@ +--- +name: WANG Zhongqi +image: images/team/zhongqi_wang.jpg +role: under +affiliation: Hong Kong Polytechnic University +order: 2 + +links: + home-page: + github: https://github.com/zqwang1105 + email: 20099224d@connect.polyu.hk + profile: + +display_1: B.Eng.(PolyU) +display_2: July 2025 - Present +--- + + + + +Mr. WANG Zhongqi is currently a final-year Undergraduate Student Assistant at Department of Mechanical Engineering, The Hong Kong Polytechnic University (PolyU). + +**Research Areas** +Robotics, Reinforcement learning diff --git a/_members/XiangruWang.md b/_members/XiangruWang.md index 0d283eeb..1280c473 100644 --- a/_members/XiangruWang.md +++ b/_members/XiangruWang.md @@ -1,7 +1,7 @@ --- name: Wang Xiangru image: images/team/wang_xiangru.jpg -role: ra # pi / postdoc / phd / ms / under / ra / visiting +role: phd # pi / postdoc / phd / ms / under / ra / visiting affiliation: Hong Kong Polytechnic University order: 8 @@ -11,7 +11,7 @@ links: profile: display_1: - - RA, M.Sc. (TUM), B.Eng. (WHU) + - PhD student, M.Sc. (TUM), B.Eng. (WHU) display_2: - From Feb. 2025 --- diff --git a/_members/Yang_Mokui.md b/_members/Yang_Mokui.md new file mode 100644 index 00000000..717b0450 --- /dev/null +++ b/_members/Yang_Mokui.md @@ -0,0 +1,24 @@ +--- +name: Yang Mokui +image: images/team/yang_mokui.jpg +role: ms +affiliation: Hong Kong Polytechnic University +order: 1 + +links: + home-page: / + github: https://github.com/Guohao-Fu + email: mokui.yang@connect.polyu.hk + profile: + +display_1: M.Phil. Student, B.Eng.(HDU) +display_2: May 2025 - Present +--- + + + + +Mr. Yang Mokui is currently a MPhil Student at Department of Aeronautical and Aviation Engineering, The Hong Kong Polytechnic University(PolyU), supervised by Dr. Wen Weisong. + +**Research Areas** +FPGA Hardware Acceleration diff --git a/_members/ZHAO_Jiaqi.md b/_members/ZHAO_Jiaqi.md index 2bc582e3..5de186f9 100644 --- a/_members/ZHAO_Jiaqi.md +++ b/_members/ZHAO_Jiaqi.md @@ -1,24 +1,24 @@ --- name: ZHAO Jiaqi image: images/team/zhao_jiaqi.jpg -role: under +role: ms affiliation: Hong Kong Polytechnic University order: 1 links: - home-page: www.linkedin.com/in/jiaqi-zhao-7ab009228 + home-page: https://www.linkedin.com/in/jiaqi-zhao-7ab009228 github: https://github.com/Qiamp email: jiaqi.zhao@connect.polyu.hk profile: -display_1: B.Eng.(PolyU) +display_1: M.Phil. Student, B.Eng.(PolyU), Drone Captain with HKCAD & CAAC License display_2: March 2024 - Present --- -Mr. Zhao Jiaqi is currently a Year-4 Undergraduate Student Assistant at Department of Aeronautical and Aviation Engineering, The Hong Kong Polytechnic University(PolyU), supervised by Dr. Wen Weisong. +Mr. Zhao Jiaqi is currently a MPhil Student at Department of Aeronautical and Aviation Engineering, The Hong Kong Polytechnic University(PolyU), supervised by Dr. Wen Weisong. **Research Areas** -UAV Vision-Based Positioning and Navigation +UAV end2end Positioning and Navigation diff --git a/_members/Zhang_Ziqi.md b/_members/Zhang_Ziqi.md index ec012c84..8c48d676 100644 --- a/_members/Zhang_Ziqi.md +++ b/_members/Zhang_Ziqi.md @@ -3,7 +3,7 @@ name: Zhang Ziqi image: images/team/Zhang_Ziqi.JPG role: phd # pi / postdoc / phd / ms / under / ra / visiting affiliation: Hong Kong Polytechnic University -order: 1 +order: 3 links: orcid: 0009-0001-1289-4932 diff --git a/_members/ZihaoWang_WHU_visiting.md b/_members/ZihaoWang_WHU_visiting.md index c374514f..9b741fb0 100644 --- a/_members/ZihaoWang_WHU_visiting.md +++ b/_members/ZihaoWang_WHU_visiting.md @@ -1,20 +1,20 @@ --- name: Zihao Wang image: images/team/Zihao-Wang.png -role: visiting # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Wuhan University -order: 1 +order: 8 links: - home-page: N/A + home-page: orcid: 0009-0000-0221-9768 - google-scholar: N/A - github: N/A + google-scholar: + github: email: wzihao@whu.edu.cn profile: display_1: M.S and B.Eng. (WHU) -display_2: To be join in fall 2025 +display_2: Visiting (Sept - Dec 2025) --- diff --git a/_members/gao_yixin.md b/_members/gao_yixin.md index c982bcc7..4130fd5b 100644 --- a/_members/gao_yixin.md +++ b/_members/gao_yixin.md @@ -13,7 +13,7 @@ links: email: yixin.gao@connect.polyu.hk display_1: PhD Student(PolyU), M.Eng.(ZJU), B.Eng. (UPC) -display_2: Spring 2024 -- Present +display_2: Fall 2024 -- Present --- diff --git a/_members/hujiahao.md b/_members/hujiahao.md index 108c3bd6..30043028 100644 --- a/_members/hujiahao.md +++ b/_members/hujiahao.md @@ -1,7 +1,7 @@ --- name: Hu Jiahao image: images/team/hu_jiahao.jpg -role: ra # pi / postdoc / phd / ms / under / visiting +role: phd # pi / postdoc / phd / ms / under / visiting affiliation: Hong Kong Polytechnic University order: 1 diff --git a/_members/jiao_jianhao.md b/_members/jiao_jianhao.md new file mode 100644 index 00000000..3d4a303d --- /dev/null +++ b/_members/jiao_jianhao.md @@ -0,0 +1,25 @@ +--- +name: Jianho Jiao +image: images/team/jianhaojiao_pict_2023.jpg +role: postdoc # pi / postdoc / phd / ms / under / ra / visiting +affiliation: Hong Kong Polytechnic University +order: 1 + +links: + home-page: https://gogojjh.github.io/ + orcid: + google-scholar: https://scholar.google.com/citations?user=psqleSQAAAAJ&hl=zh-TW + github: https://github.com/gogojjh + email: jiaojh1994@gmail.com + profile: + +display_1: Ph.D.(HKUST), B.Eng. (ZJU) +--- + + + + +Jianhao Jiao (Member, IEEE) received a Ph.D. in Electronic and Computer Engineering from the Hong Kong University of Science and Technology, in 2021. His research specializes in SLAM, sensor fusion, and robust robotic navigation, exemplified by pioneering works such as M-LOAM, FusionPortable dataset, and the scalable, structure-free visual navigation system, OpenNavMap. He has authored over ten papers in premier robotics venues (e.g., IROS, ICRA, NeurIPS, IJRR, IEEE TRO) and serves as an Associate Editor for RAL, IROS 2024-2025, ICRA 2025. Dr. Jiao’s ultimate research objective is to endow autonomous systems with lifelong, cognitive spatial memory mechanisms capable of dynamic updating, directed towards applications in challenging, unstructured environments such as subterranean mines and forests. + +**Research Areas** +Mobile Robot, Navigation, Embodied Intelligence \ No newline at end of file diff --git a/_members/liheng.md b/_members/liheng.md new file mode 100644 index 00000000..231735da --- /dev/null +++ b/_members/liheng.md @@ -0,0 +1,30 @@ +--- +name: Li Heng +image: images/team/LiHeng.jpg +role: ra +affiliation: Hong Kong Polytechnic University +order: 9 + +links: + home-page: + orcid: + google-scholar: + github: https://github.com/shannonlee2024 + email: shannon-h.li@polyu.edu.hk + profile: + +display_1: Research Assistant,B.Eng. (DLPU), R&D Engineer(Unmanned Systems Field, Shenzhen ,China) +display_2: Spring 2025 -- Present + +--- + + + + +Heng Li received a BEng degree in Automation(Innovation Experimental Class) from Dalian Polytechnic University(DLPU) DaLian, China, in 2017. And he was a research assistant to the Director of R&D Institute of Integrated Measurement&Control,DLPU. + +From 2017 to 2024, he worked as a R&D engineer at a company in the unmanned systems field for seven years. His main research areas are perception and security of unmanned systems. + + +**Research Areas** +Unmanned Systems Perception;Unmanned Systems Security diff --git a/_members/liu_xikun.md b/_members/liu_xikun.md index a178bd75..8063e8c5 100644 --- a/_members/liu_xikun.md +++ b/_members/liu_xikun.md @@ -1,7 +1,7 @@ --- name: Liu Xikun image: images/team/liu_xikun.png -role: phd # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Hong Kong Polytechnic University order: 3 @@ -13,14 +13,14 @@ links: email: xi-kun.liu@connect.polyu.hk profile: -display_1: Ph.D.candidate (PolyU), M.Sc.(KIT), B.Eng. (HUST) +display_1: Ph.D.(PolyU), M.Sc.(KIT), B.Eng. (HUST) display_2: MION --- -Xikun Liu received his bachelor's degree in Mechanical Design, Manufacturing, and Automation from Huazhong University of Science and Technology, China in 2017, and master’s degree in Mechatronics and Information Technology from Karlsruhe Institute of Technology, Germany in 2021. He is currently a Ph.D. candidate in the Department of Aeronautical and Aviation Engineering, the Hong Kong Polytechnic University. His research interests include GNSS and sensor-aided GNSS positioning, SLAM, and multiple sensor fusion in autonomous driving. +Xikun Liu received his Ph.D. degree from the Department of Aeronautical and Aviation Engineering, the Hong Kong Polytechnic University. He received his bachelor's degree in Mechanical Design, Manufacturing, and Automation from Huazhong University of Science and Technology, China in 2017, and master's degree in Mechatronics and Information Technology from Karlsruhe Institute of Technology, Germany in 2021. His research interests include GNSS and sensor-aided GNSS positioning, SLAM, and multiple sensor fusion in autonomous driving. **Research Areas** 3D LiDAR aided GNSS Positioning; Sensor Fusion; Wireless Positioning; GNSS; diff --git a/_members/runqiuyang.md b/_members/runqiuyang.md index 86faa579..ebb22138 100644 --- a/_members/runqiuyang.md +++ b/_members/runqiuyang.md @@ -8,7 +8,7 @@ order: 3 links: home-page: n/a orcid: 0000-0001-6286-8217 - google-scholar: https://scholar.google.com/citations?user=cDycNtAAAAAJ&hl=en + google-scholar: cDycNtAAAAAJ&hl=en github: n/a email: runqiu.yang@polyu.edu.hk profile: diff --git a/_members/runzhi_hu.md b/_members/runzhi_hu.md index 9a379529..9c1d2f97 100644 --- a/_members/runzhi_hu.md +++ b/_members/runzhi_hu.md @@ -1,9 +1,9 @@ --- name: Runzhi Hu image: images/team/runzhi_hu.jpg -role: phd # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Hong Kong Polytechnic University -order: 3 +order: 5 links: home-page: https://rztest.cn/ @@ -13,14 +13,14 @@ links: email: run-zhi.hu@connect.polyu.hk profile: -display_1: Ph.D. Candidate(PolyU), M.Eng.(CAU), B.Eng. (CAU) -display_2: 2022 fall - Present +display_1: Ph.D.(PolyU), M.Eng.(CAU), B.Eng. (CAU) +display_2: 2022 - 2025 --- -Runzhi Hu was born in Leshan, Sichuan, China. He received his B.S and master degrees in mechanical engineering and computer science, respectively, from China Agricultural University. He now is a Ph.D candidate at the Hong Kong Polytechnic University. His research interests include HD map, multi-sensor fusion, SLAM, and GNSS positioning in urban canyons. He loves popping and locking dancing so much. +Runzhi Hu was born in Leshan, Sichuan, China. He received his B.S and master degrees in mechanical engineering and computer science, respectively, from China Agricultural University. He received his Ph.D. degree from the Hong Kong Polytechnic University. His research interests include HD map, multi-sensor fusion, SLAM, and GNSS positioning in urban canyons. **Research Areas** Deep Learning, Sensor Fusion; GNSS; SLAM; HD Map diff --git a/_members/wang_junzhe.md b/_members/wang_junzhe.md new file mode 100644 index 00000000..4cde88c0 --- /dev/null +++ b/_members/wang_junzhe.md @@ -0,0 +1,17 @@ +--- +name: WANG Junzhe +image: images/team/wang_junzhe.png +role: ms +affiliation: Hong Kong Polytechnic University +order: 114 +links: + email: cooper.wang@connect.polyu.hk + profile: +display_1: M.Phil. Student, B.Eng.(HKUST) +display_2: Fall 2025 +--- + + +WANG Junzhe received a BEng degree from the Hong Kong University of Science and Technology (HKUST). He is currently pursuing his M.Phil. degree at The Hong Kong Polytechnic University (PolyU). +**Research Areas** +UAV, Mapping, and Localization; Sensor Fusion; GNSS diff --git a/_members/wangyun.md b/_members/wangyun.md index d9d4cdfc..dc7c873d 100644 --- a/_members/wangyun.md +++ b/_members/wangyun.md @@ -1,7 +1,7 @@ --- name: Wang Yun image: images/team/wang_yun.jpg -role: ra +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: PolyU-Wuxi Technology and innovation Research Institute order: 7 @@ -13,16 +13,16 @@ links: email: yun.wang@hkpolyu-wxresearch.cn profile: -display_1: B.Eng.(SEUCX) -display_2: +display_1: B.Eng.(SEUCX) +display_2: RA --- -WangYun received a B.Eng degree in Automation from Southeast University Cheng Xian College(SEUCX),Nanjing,China,in,2022.After that, he worked as a electrical engineer at the Makita before joining PolyU-Wuxi Technology and innovation Research Institute. +Wang Yun received a B.Eng degree in Automation from Southeast University Cheng Xian College (SEUCX), Nanjing, China, in 2022. After that, he worked as an electrical engineer at the Makita before joining PolyU-Wuxi Technology and innovation Research Institute. **Research Areas** -Autonomous Driving; UAV;GNSS +Autonomous Driving; UAV; GNSS diff --git a/_members/xpzhai.md b/_members/xpzhai.md index 87a66ce1..fc0fa412 100644 --- a/_members/xpzhai.md +++ b/_members/xpzhai.md @@ -13,7 +13,7 @@ links: profile: display_1: Ph.D.(PolyU, NPU), B.Eng.(NPU) -display_2: To join in fall 2025 +display_2: Jan 2026 --- diff --git a/_members/yfeng.md b/_members/yfeng.md index dd5bf881..5b76d023 100644 --- a/_members/yfeng.md +++ b/_members/yfeng.md @@ -13,7 +13,7 @@ links: profile: display_1: Ph.D. Student (PolyU-NKU), B.Eng. (NKU) -display_2: To join in fall 2025 +display_2: Sept 2025 --- diff --git a/_members/yihan_zhong.md b/_members/yihan_zhong.md index 2a905108..aa420592 100644 --- a/_members/yihan_zhong.md +++ b/_members/yihan_zhong.md @@ -1,26 +1,26 @@ --- name: Zhong Yihan image: images/team/yihan_zhong.jpg -role: phd # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Hong Kong Polytechnic University -order: 2 +order: 4 links: home-page: orcid: 0000-0002-1462-3642 - google-scholar: https://scholar.google.com/citations?user=c1xJ5pIAAAAJ&hl=en&oi=ao + google-scholar: c1xJ5pIAAAAJ&hl=en&oi=ao github: https://github.com/Pirkaklo email: yi-han.zhong@connect.polyu.hk profile: -display_1: Ph.D. Candidate(PolyU), M.Sc.(PolyU), B.Eng. (GXU), MION -display_2: Fall 2022 -- Present +display_1: Ph.D.(PolyU), M.Sc.(PolyU), B.Eng. (GXU), MION +display_2: 2022 - 2025 --- -Yihan Zhong Yihan Zhong obtained his bachelor's degree in process equipment and control engineering from Guangxi University in 2020 and a Master's degree with the Department of Mechanical Engineering from The Hong Kong Polytechnic University (PolyU) in 2022. He is currently a Ph.D. student at the Department of Aeronautical and Aviation Engineering (AAE) of PolyU. +Yihan Zhong obtained his bachelor's degree in process equipment and control engineering from Guangxi University in 2020 and a Master's degree with the Department of Mechanical Engineering from The Hong Kong Polytechnic University (PolyU) in 2022. He received his Ph.D. degree from the Department of Aeronautical and Aviation Engineering (AAE) of PolyU. His research interests include factor graph optimization-based collaborative positioning and low-cost localization. diff --git a/_members/yingmign_chen.md b/_members/yingmign_chen.md index 1378099a..10917511 100644 --- a/_members/yingmign_chen.md +++ b/_members/yingmign_chen.md @@ -14,7 +14,7 @@ links: profile: display_1: M.Phil. Student, B.Eng. (Western University of Ontario) -display_2: Spring 2024 +display_2: Spring 2024 -- Present --- diff --git a/_members/yywang.md b/_members/yywang.md index b80ca504..1489db7a 100644 --- a/_members/yywang.md +++ b/_members/yywang.md @@ -6,9 +6,9 @@ affiliation: Hong Kong Polytechnic University order: 2 links: - home-page: http://www.ee.cuhk.edu.hk/~yywang/ + home-page: https://yywang.pages.dev/ orcid: 0000-0003-3293-0790 - google-scholar: https://scholar.google.com/citations?user=bRwHOgwAAAAJ&hl=zh-CN + google-scholar: bRwHOgwAAAAJ&hl=zh-CN email: ying5wang@polyu.edu.hk profile: @@ -22,3 +22,4 @@ Yingying Wang received the B.E. degree in Electronic Engineering from Northeaste **Research Areas** Smart sensing; Robotics; Sensor Fusion; Inertial Measurement Unit; Wireless Sensing + diff --git a/_members/zhengxi.md b/_members/zhengxi.md index af732b40..9d634c2f 100644 --- a/_members/zhengxi.md +++ b/_members/zhengxi.md @@ -1,14 +1,14 @@ --- name: Zheng Xi image: images/team/zheng_xi.png -role: phd # pi / postdoc / phd / ms / under / ra / visiting +role: alumni # pi / postdoc / phd / ms / under / ra / visiting / alumni affiliation: Hong Kong Polytechnic University order: 2 links: home-page: orcid: 0000-0001-8399-5127 - google-scholar: https://scholar.google.com/citations?user=cfhVuzMAAAAJ&hl=zh-CN + google-scholar: cfhVuzMAAAAJ&hl=zh-CN github: https://github.com/ZHENGXi-git email: zheng-xi.zheng@connect.polyu.hk profile: diff --git a/_members/zxr.md b/_members/zxr.md index de2515a4..0081ea0a 100644 --- a/_members/zxr.md +++ b/_members/zxr.md @@ -14,7 +14,7 @@ links: profile: display_1: Master of Philosophy, B.Eng. (BUAA) -display_2: To join in fall 2025 +display_2: Sept 2025 --- diff --git a/_opensource/2025-11-11-HDMap.md b/_opensource/2025-11-11-HDMap.md new file mode 100644 index 00000000..b9880bc4 --- /dev/null +++ b/_opensource/2025-11-11-HDMap.md @@ -0,0 +1,37 @@ +--- +title: Semantic-Vector HD Map +subtitle: Multi-Sensor HD Map Construction Pipeline for Autonomous Vehicles +author: Runzhi Hu +image: images/opensource/HDMap/garage_half.gif +tags: +order: +--- + +An open-source HD vector map (HDVM) generation pipeline for autonomous vehicles, integrating GNSS, INS, LiDAR, and camera data. + +**[HDMap](https://github.com/ebhrz/HDMap)** provides a complete pipeline for constructing high-definition semantic and vector maps, designed for autonomous driving in complex urban environments. Unlike traditional methods that rely on planar assumptions, our approach fuses multi-sensor data to produce accurate 3D HD maps. + +**Pipeline Overview:** +1. **Semantic extraction** — Extracts semantic information from raw images using Vision Transformer (ViT) and Swin Transformer architectures +2. **3D reconstruction** — Obtains absolute 3D coordinates of semantic objects from LiDAR depth data +3. **Precise localization** — Uses GNSS-RTK and INS for high-precision pose estimation +4. **Vector map generation** — Extracts vector features (e.g., lane markings) to form the HD vector map +5. **Error analysis** — Provides an error propagation scheme analyzing segmentation and LiDAR-camera extrinsic calibration errors + +A **Docker version** of the pipeline is available for easy deployment. + +

+ HD Map Demo +

+ +**Citation:** +```bibtex +@article{hu2024hdmap, + author={Hu, Runzhi and Bai, Shiyu and Wen, Weisong and Xia, Xin and Hsu, Li-Ta}, + title={Towards high-definition vector map construction based on multi-sensor integration for intelligent vehicles: Systems and error quantification}, + journal={IET Intelligent Transport Systems}, + doi={https://doi.org/10.1049/itr2.12524} +} +``` + +**GitHub:** [https://github.com/ebhrz/HDMap](https://github.com/ebhrz/HDMap) diff --git a/_opensource/2025-11-11-kltdataset.md b/_opensource/2025-11-11-kltdataset.md new file mode 100644 index 00000000..f1fa3703 --- /dev/null +++ b/_opensource/2025-11-11-kltdataset.md @@ -0,0 +1,25 @@ +--- +title: KLT Dataset +subtitle: Urban GNSS Dataset with LOS/NLOS Labels +author: Runzhi Hu +image: images/opensource/kltdataset/NLOS_crop.gif +tags: +order: +--- + +An open urban GNSS dataset with LOS/NLOS satellite labels for benchmarking GNSS positioning in challenging environments. + +**[KLT Dataset](https://github.com/ebhrz/KLTDataset)** is a light urban scenario dataset collected for GNSS research, providing labeled satellite signal conditions to support studies in multipath mitigation, NLOS detection, and robust positioning. + +**Dataset Contents:** +- **GNSS raw measurements** — Collected using a u-blox F9P receiver with pseudorange and carrier phase observations +- **Ground truth** — High-precision reference trajectories from a SPAN-CPT system +- **LOS/NLOS labels** — Per-satellite labels for GPS and BeiDou constellations +- **Additional sensors** — IMU, LiDAR, and camera recordings included in the ROS bag file +- **Quick-start scripts** — Configuration files and start scripts provided for immediate use + +

+ KLT Dataset — NLOS Visualization +

+ +**GitHub:** [https://github.com/ebhrz/KLTDataset](https://github.com/ebhrz/KLTDataset) diff --git a/_opensource/2025-11-11-plvins.md b/_opensource/2025-11-11-plvins.md new file mode 100644 index 00000000..b63f88cc --- /dev/null +++ b/_opensource/2025-11-11-plvins.md @@ -0,0 +1,35 @@ +--- +title: SafetyQuantifiable-PLVINS +subtitle: Safety-Quantifiable Visual Localization with 3D Prior Map +author: Xi Zheng +image: images/opensource/zhengxi/framework2.png +tags: +order: +--- + +Safety-quantifiable line feature-based monocular visual localization with 3D prior map and integrity monitoring. + +**[SafetyQuantifiable-PLVINS](https://github.com/ZHENGXi-git/SafetyQuantifiable-PLVINS)** addresses drift and safety quantification challenges in visual localization by proposing a novel map-aided method that delivers both accurate pose estimates and a measurable error bound. + +**Key Contributions:** +- Tightly integrates visual-inertial odometry with a prior 3D line map via geometric constraints between 2D image features and 3D map lines +- Introduces a **GNSS-inspired integrity monitoring framework** to compute a Protection Level (PL) +- Quantifies potential error in both position and orientation, certifying the solution's safety +- First application of integrity monitoring to visual localization systems + +

+ SafetyQuantifiable-PLVINS Framework +

+ +**Citation:** +```bibtex +@article{zheng2025safety, + title={Safety-quantifiable line feature-based monocular visual localization with 3D prior map}, + author={Zheng, Xi and Wen, Weisong and Hsu, Li-Ta}, + journal={IEEE Transactions on Intelligent Transportation Systems}, + year={2025}, + publisher={IEEE} +} +``` + +**GitHub:** [https://github.com/ZHENGXi-git/SafetyQuantifiable-PLVINS](https://github.com/ZHENGXi-git/SafetyQuantifiable-PLVINS) diff --git a/_opensource/2025-11-11-pyrtklib.md b/_opensource/2025-11-11-pyrtklib.md new file mode 100644 index 00000000..74a9bf29 --- /dev/null +++ b/_opensource/2025-11-11-pyrtklib.md @@ -0,0 +1,42 @@ +--- +title: pyrtklib +subtitle: Python Binding for RTKLIB — The Most Popular GNSS Positioning Library +author: Runzhi Hu +tags: +order: +--- + +A complete Python binding for RTKLIB, bringing the full power of the most widely-used GNSS positioning library to the Python ecosystem. + +**[pyrtklib](https://github.com/IPNL-POLYU/pyrtklib)** bridges the gap between RTKLIB's high-performance C implementation and the Python-based research workflows widely used in deep learning and data science. With pyrtklib, you can read RINEX files, process GNSS observations, and perform SPP, RTK, and PPP positioning — all from Python. + +**Key Features:** +- Full Python interface to RTKLIB's core functions +- Supports SPP, DGNSS, RTK, and PPP positioning modes +- Seamless RINEX file reading and GNSS observation processing +- Enables tight integration of deep learning with GNSS positioning +- Available via PyPI for easy installation + +**Quick Install:** +```bash +pip install pyrtklib # Standard RTKLIB version +pip install pyrtklib5 # Based on rtklibexplorer/rtklib_demo5 +``` + +[![PyPI Downloads](https://static.pepy.tech/personalized-badge/pyrtklib?period=total&units=INTERNATIONAL_SYSTEM&left_color=BLACK&right_color=GREEN&left_text=downloads)](https://pepy.tech/projects/pyrtklib) + +**Citation:** +```bibtex +@ARTICLE{10965937, + author={Hu, Runzhi and Xu, Penghui and Zhong, Yihan and Wen, Weisong}, + journal={IEEE Transactions on Intelligent Transportation Systems}, + title={pyrtklib: An Open-Source Package for Tightly Coupled Deep Learning and GNSS Integration for Positioning in Urban Canyons}, + year={2025}, + volume={26}, + number={7}, + pages={10652-10662}, + doi={10.1109/TITS.2025.3552691} +} +``` + +**GitHub:** [https://github.com/IPNL-POLYU/pyrtklib](https://github.com/IPNL-POLYU/pyrtklib)  |  **Demo5 version:** [https://github.com/IPNL-POLYU/pyrtklib_demo5](https://github.com/IPNL-POLYU/pyrtklib_demo5) diff --git a/_opensource/2025-11-11-tasgnss.md b/_opensource/2025-11-11-tasgnss.md new file mode 100644 index 00000000..8c890e3c --- /dev/null +++ b/_opensource/2025-11-11-tasgnss.md @@ -0,0 +1,24 @@ +--- +title: TASGNSS +subtitle: Simple and Modern Python GNSS Interface +author: Runzhi Hu +tags: +order: +--- + +A simple and modern Python interface for GNSS positioning, built on top of pyrtklib. + +**[TASGNSS](https://github.com/PolyU-TASLAB/TASGNSS)** provides a clean, high-level Python API for GNSS data processing and positioning. Built on [pyrtklib](https://github.com/IPNL-POLYU/pyrtklib), it abstracts away low-level complexity and offers an intuitive interface for researchers and developers working with GNSS data. + +**Key Features:** +- High-level Pythonic API for GNSS positioning (SPP, RTK, PPP) +- Built on the well-established pyrtklib/RTKLIB engine +- Easy-to-use data reading from RINEX and other standard formats +- Comprehensive [documentation](https://polyu-taslab.github.io/TASGNSS/) with tutorials and examples + +**Quick Install:** +```bash +pip install tasgnss +``` + +**GitHub:** [https://github.com/PolyU-TASLAB/TASGNSS](https://github.com/PolyU-TASLAB/TASGNSS)  |  **Docs:** [https://polyu-taslab.github.io/TASGNSS/](https://polyu-taslab.github.io/TASGNSS/) diff --git a/_opensource/2025-11-11-tc-viml.md b/_opensource/2025-11-11-tc-viml.md new file mode 100644 index 00000000..8a1b5059 --- /dev/null +++ b/_opensource/2025-11-11-tc-viml.md @@ -0,0 +1,43 @@ +--- +title: TC-VIML +subtitle: Tightly-Coupled Visual-Inertial-Map Localization for Intelligent Vehicles +author: Xi Zheng +image: images/opensource/zhengxi/framework.png +tags: +order: +--- + +Tightly-coupled Visual/Inertial/Map integration with observability analysis for reliable localization of intelligent vehicles. + +**[TC-VIML](https://github.com/ZHENGXi-git/TC-VIML)** proposes a tightly-coupled visual-inertial odometry (VIO) system that leverages a 3D prior line map for drift-free localization. Unlike loosely-coupled methods, our approach deeply integrates line features into a factor graph optimization framework, supported by a robust cross-modality matching and outlier rejection strategy. + +**Key Contributions:** +- Tight integration of 2D image line features with a 3D prior line map via factor graph optimization +- Robust cross-modality matching and outlier rejection for line feature association +- First rigorous proof that the system achieves **full observability in global translation** (only yaw unobservable) +- Validated in both simulated and real-world urban driving environments + +

+ TC-VIML Framework +

+ +**Citation:** +```bibtex +@article{zheng2024tightly, + title={Tightly-coupled visual/inertial/map integration with observability analysis for reliable localization of intelligent vehicles}, + author={Zheng, Xi and Wen, Weisong and Hsu, Li-Ta}, + journal={IEEE Transactions on Intelligent Vehicles}, + year={2024}, + publisher={IEEE} +} + +@inproceedings{zheng2023tightly, + title={Tightly-coupled line feature-aided visual inertial localization within lightweight 3D prior map for intelligent vehicles}, + author={Zheng, Xi and Wen, Weisong and Hsu, Li-Ta}, + booktitle={IEEE ITSC}, + pages={6019--6026}, + year={2023} +} +``` + +**GitHub:** [https://github.com/ZHENGXi-git/TC-VIML](https://github.com/ZHENGXi-git/TC-VIML) diff --git a/_opensource/2025-11-11-tdl-gnss.md b/_opensource/2025-11-11-tdl-gnss.md new file mode 100644 index 00000000..2814a59e --- /dev/null +++ b/_opensource/2025-11-11-tdl-gnss.md @@ -0,0 +1,38 @@ +--- +title: TDL-GNSS +subtitle: Tightly Coupled Deep Learning Framework for GNSS Positioning +author: Runzhi Hu +image: images/papers/2024/runzhi2024pyrtklib.png +tags: +order: +--- + +A tightly coupled deep learning framework for GNSS positioning in challenging urban environments. + +**[TDL-GNSS](https://github.com/ebhrz/TDL-GNSS)** is built on top of [pyrtklib](https://github.com/IPNL-POLYU/pyrtklib) and [TASGNSS](https://github.com/PolyU-TASLAB/TASGNSS), designed to seamlessly integrate deep learning models into the GNSS processing workflow. The framework enables researchers to leverage neural networks for tasks such as satellite signal quality assessment, weight optimization, and positioning error mitigation — all within a unified Python pipeline. + +**Key Features:** +- Tightly integrates deep learning with conventional GNSS processing (SPP, RTK, PPP) +- Built on the established pyrtklib and TASGNSS ecosystem +- End-to-end trainable pipeline for GNSS positioning +- Designed for urban canyon scenarios with severe multipath and NLOS effects + +

+ TDL-GNSS Framework +

+ +**Citation:** +```bibtex +@ARTICLE{10965937, + author={Hu, Runzhi and Xu, Penghui and Zhong, Yihan and Wen, Weisong}, + journal={IEEE Transactions on Intelligent Transportation Systems}, + title={pyrtklib: An Open-Source Package for Tightly Coupled Deep Learning and GNSS Integration for Positioning in Urban Canyons}, + year={2025}, + volume={26}, + number={7}, + pages={10652-10662}, + doi={10.1109/TITS.2025.3552691} +} +``` + +**GitHub:** [https://github.com/ebhrz/TDL-GNSS](https://github.com/ebhrz/TDL-GNSS) diff --git a/_opensource/2026-01-17-TasFusion.md b/_opensource/2026-01-17-TasFusion.md new file mode 100644 index 00000000..6e0b78b2 --- /dev/null +++ b/_opensource/2026-01-17-TasFusion.md @@ -0,0 +1,39 @@ +--- +title: TasFusion +subtitle: ROS1 Package for Multi-Sensor GNSS/IMU Fusion Navigation +author: ZHAO Jiaqi +image: images/opensource/TasFusion/demo.gif +tags: +order: +--- + +A ROS1 package for Ceres-based GNSS/IMU loosely coupled sliding-window optimization, designed for robust multi-sensor navigation. + +**[TasFusion](https://github.com/PolyU-TASLAB/TasFusion)** provides a complete multi-sensor navigation framework with the following features: + +- **Ceres-based optimization** — Sliding-window GNSS/IMU loosely coupled fusion with IMU pre-integration and online bias estimation +- **Marginalization** — Preserves historical information for consistent state estimation +- **GPS constraints** — Supports both position and velocity constraints from GNSS +- **NLOS exclusion** — Built-in utilities to reject non-line-of-sight satellite signals +- **Flexible configuration** — All major functions can be enabled/disabled via launch file parameters +- **Supporting tools** — Includes GNSS message definitions, a NovAtel driver, and NMEA ROS parsing scripts + + + + + + + +
+ + + + + +
+ +> **Reference Hardware Platform** ([Introduction Video](https://www.bilibili.com/video/BV1fiaqzNEEm)): +> TasFusion has been validated on a GNSS-IMU-4G integrated navigation module (dual-IMU + u-blox F9P-04B + 4G uplink), providing high-frequency measurements and reliable telemetry for outdoor deployments. +> For hardware inquiries, please contact **hbwu@hkpolyu-wxresearch.cn**. + +**GitHub:** [https://github.com/PolyU-TASLAB/TasFusion](https://github.com/PolyU-TASLAB/TasFusion) diff --git a/_posts/2021-09-07-Huawei_PolyU_High-accuracy_Localization_Project.md b/_posts/2021-09-07-Huawei_PolyU_High-accuracy_Localization_Project.md index 89d1202e..605e9176 100644 --- a/_posts/2021-09-07-Huawei_PolyU_High-accuracy_Localization_Project.md +++ b/_posts/2021-09-07-Huawei_PolyU_High-accuracy_Localization_Project.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: images/project/huawei_mapping.gif tags: Localization, mapping, sensor-fusion, RTK, GNSS, LiDAR, IMU, Virtual-satellites, Cycle-slip-detection +research_direction: gnss order: --- diff --git a/_posts/2022-02-14-Research_on_GNSS_Urban_Positioning_Algorithm_Based_on_3D_LiDAR.md b/_posts/2022-02-14-Research_on_GNSS_Urban_Positioning_Algorithm_Based_on_3D_LiDAR.md index 9504d66a..9cfd34c5 100644 --- a/_posts/2022-02-14-Research_on_GNSS_Urban_Positioning_Algorithm_Based_on_3D_LiDAR.md +++ b/_posts/2022-02-14-Research_on_GNSS_Urban_Positioning_Algorithm_Based_on_3D_LiDAR.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: images/project/GDSTC/fgo.png tags: Localization, mapping, sensor-fusion, RTK, GNSS, LiDAR, IMU +research_direction: gnss order: --- ## Abstract diff --git a/_posts/2023-01-01-Safety-certifiable_UAV_System_for_Terrian_and_Civil_Infrastructure_Inspection.md b/_posts/2023-01-01-Safety-certifiable_UAV_System_for_Terrian_and_Civil_Infrastructure_Inspection.md index 4b13c0be..a82571c0 100644 --- a/_posts/2023-01-01-Safety-certifiable_UAV_System_for_Terrian_and_Civil_Infrastructure_Inspection.md +++ b/_posts/2023-01-01-Safety-certifiable_UAV_System_for_Terrian_and_Civil_Infrastructure_Inspection.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: tags: Localization, UAV +research_direction: drones order: --- diff --git a/_posts/2023-04-01-Vision_Aided_GNSS-RTK_Positioning_for_UAV_System_in_Urban_Canyons.md b/_posts/2023-04-01-Vision_Aided_GNSS-RTK_Positioning_for_UAV_System_in_Urban_Canyons.md index fd3b1e36..6eb6d7f3 100644 --- a/_posts/2023-04-01-Vision_Aided_GNSS-RTK_Positioning_for_UAV_System_in_Urban_Canyons.md +++ b/_posts/2023-04-01-Vision_Aided_GNSS-RTK_Positioning_for_UAV_System_in_Urban_Canyons.md @@ -4,6 +4,7 @@ subtitle: Fisheye Camera Aided GNSS NLOS Detection and Learning-based Pseudorang # author: XNG image: images/project/Vision_aided_GNSS_RTK/framework.png tags: Artificial intelligence, Deep learning, GNSS +research_direction: gnss order: --- ## Abstract diff --git a/_posts/2023-05-08-Unmanned_Aerial_Vehicle_Aided_High_Accuracy_Addictive_Manufacturing_for_Carbon_Fiber_Reinforced_Thermoplastic_Composites_Material.md b/_posts/2023-05-08-Unmanned_Aerial_Vehicle_Aided_High_Accuracy_Addictive_Manufacturing_for_Carbon_Fiber_Reinforced_Thermoplastic_Composites_Material.md index 9e854ff3..19ff47f3 100644 --- a/_posts/2023-05-08-Unmanned_Aerial_Vehicle_Aided_High_Accuracy_Addictive_Manufacturing_for_Carbon_Fiber_Reinforced_Thermoplastic_Composites_Material.md +++ b/_posts/2023-05-08-Unmanned_Aerial_Vehicle_Aided_High_Accuracy_Addictive_Manufacturing_for_Carbon_Fiber_Reinforced_Thermoplastic_Composites_Material.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: tags: UAV, Manufacturing +research_direction: drones order: --- diff --git a/_posts/2023-09-30-Research_on_high-precision_vehicle-mounted_GNSS-IMU-Camera_fusion_positioning_technology_in_complex_urban_environments_based_on_factor_graph.md b/_posts/2023-09-30-Research_on_high-precision_vehicle-mounted_GNSS-IMU-Camera_fusion_positioning_technology_in_complex_urban_environments_based_on_factor_graph.md index b89fec63..064037ca 100644 --- a/_posts/2023-09-30-Research_on_high-precision_vehicle-mounted_GNSS-IMU-Camera_fusion_positioning_technology_in_complex_urban_environments_based_on_factor_graph.md +++ b/_posts/2023-09-30-Research_on_high-precision_vehicle-mounted_GNSS-IMU-Camera_fusion_positioning_technology_in_complex_urban_environments_based_on_factor_graph.md @@ -5,6 +5,7 @@ subtitle: A Factor Graph Optimization-Based Multiple‑epoch Ambiguity Resolutio # author: XNG image: images/project/Vision_aided_GNSS_RTK/framework.png tags: Global navigation satellite system, Real-time kinematic positioning, Factor graph optimization, Multi‑epoch ambiguity resolution, Urban canyons +research_direction: gnss order: --- ## Abstract diff --git a/_posts/2023-10-15-Vehicle-infrastructure_Collaboration_for_Connected_Unmanned_Ground_and_Aerial_Vehicles_in_Complex_Urban_Canyons.md b/_posts/2023-10-15-Vehicle-infrastructure_Collaboration_for_Connected_Unmanned_Ground_and_Aerial_Vehicles_in_Complex_Urban_Canyons.md index 9d27d287..a8b23d6e 100644 --- a/_posts/2023-10-15-Vehicle-infrastructure_Collaboration_for_Connected_Unmanned_Ground_and_Aerial_Vehicles_in_Complex_Urban_Canyons.md +++ b/_posts/2023-10-15-Vehicle-infrastructure_Collaboration_for_Connected_Unmanned_Ground_and_Aerial_Vehicles_in_Complex_Urban_Canyons.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: tags: Unmanned Ground Vehicle, Unmanned Aerial Vehicle, Cooperation, urban canyons +research_direction: vehicles order: --- diff --git a/_posts/2023-12-03-Multi_robot_Collaborative_Operations_in_Lunar_Areas_for_Regolith_Processing_Project.md b/_posts/2023-12-03-Multi_robot_Collaborative_Operations_in_Lunar_Areas_for_Regolith_Processing_Project.md index 7cd45ffc..4624568c 100644 --- a/_posts/2023-12-03-Multi_robot_Collaborative_Operations_in_Lunar_Areas_for_Regolith_Processing_Project.md +++ b/_posts/2023-12-03-Multi_robot_Collaborative_Operations_in_Lunar_Areas_for_Regolith_Processing_Project.md @@ -3,7 +3,8 @@ title: Multi-robot Collaborative Operations in Lunar Areas for Regolith Processi subtitle: High Accuracy Positioning with Multi-sensory Integration for Robotics in Complex Scenarios # author: CYM image: images/project/prototype.png -tags: Multi-robot-collaboration, MPC, mapping, Leader-Follower-Formation-Algorithm, sensor-fusion, LiDAR, IMU +tags: Multi-robot-collaboration, MPC, mapping, Leader-Follower-Formation-Algorithm, sensor-fusion, LiDAR, IMU +research_direction: humanoid order: --- ## Abstract diff --git a/_posts/2024-01-01-Data-driven-assisted_GNSS_RTK-INS_Navigation_for_Autonomous_Systems_in_Urban_Canyons.md b/_posts/2024-01-01-Data-driven-assisted_GNSS_RTK-INS_Navigation_for_Autonomous_Systems_in_Urban_Canyons.md index e9c10698..b94486d2 100644 --- a/_posts/2024-01-01-Data-driven-assisted_GNSS_RTK-INS_Navigation_for_Autonomous_Systems_in_Urban_Canyons.md +++ b/_posts/2024-01-01-Data-driven-assisted_GNSS_RTK-INS_Navigation_for_Autonomous_Systems_in_Urban_Canyons.md @@ -4,6 +4,7 @@ subtitle: AI-aided Navigation # author: XNG image: images/project/Data_driven_structure.png tags: GNSS Positioning, NLOS/Multipath Correction, Autonomous Driving, Positional Encoding, Multimodal Network, Vision Feature +research_direction: gnss order: --- ## Abstract diff --git a/_posts/2024-01-01-Maximum_Consensus_Integration_of_GNSS_and_LiDAR_for_Urban_Navigation.md b/_posts/2024-01-01-Maximum_Consensus_Integration_of_GNSS_and_LiDAR_for_Urban_Navigation.md index 565be938..6d43fbd1 100644 --- a/_posts/2024-01-01-Maximum_Consensus_Integration_of_GNSS_and_LiDAR_for_Urban_Navigation.md +++ b/_posts/2024-01-01-Maximum_Consensus_Integration_of_GNSS_and_LiDAR_for_Urban_Navigation.md @@ -3,7 +3,8 @@ title: Maximum Consensus Integration of GNSS and LiDAR for Urban Navigation subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: -tags: GNSS, LIDAR, Sensor fusion +tags: GNSS, LIDAR, Sensor fusion +research_direction: gnss order: --- diff --git a/_posts/2024-04-01-Sustainable_Window_Cleaning_for_PolyU_Jockey_Club_Innovation_Tower_with_Unmanned_Aerial_Vehicles (UAV)_An_Application_of_Autonomous_Systems_Enabled_Carbon_Reduction.md b/_posts/2024-04-01-Sustainable_Window_Cleaning_for_PolyU_Jockey_Club_Innovation_Tower_with_Unmanned_Aerial_Vehicles (UAV)_An_Application_of_Autonomous_Systems_Enabled_Carbon_Reduction.md index 27a70caf..172407ef 100644 --- a/_posts/2024-04-01-Sustainable_Window_Cleaning_for_PolyU_Jockey_Club_Innovation_Tower_with_Unmanned_Aerial_Vehicles (UAV)_An_Application_of_Autonomous_Systems_Enabled_Carbon_Reduction.md +++ b/_posts/2024-04-01-Sustainable_Window_Cleaning_for_PolyU_Jockey_Club_Innovation_Tower_with_Unmanned_Aerial_Vehicles (UAV)_An_Application_of_Autonomous_Systems_Enabled_Carbon_Reduction.md @@ -3,7 +3,8 @@ title: Sustainable Window Cleaning for PolyU Jockey Club Innovation Tower with U subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: images/project/uav_clean.png -tags: Unmanned Aerial Vehicle +tags: Unmanned Aerial Vehicle +research_direction: drones order: --- diff --git a/_posts/2024-04-08-Development_of_an_Assisted_Navigation_and_Collision_Avoidance_System_using_AI_and_Location-based_Service.md b/_posts/2024-04-08-Development_of_an_Assisted_Navigation_and_Collision_Avoidance_System_using_AI_and_Location-based_Service.md index b9e13b19..ed5bd982 100644 --- a/_posts/2024-04-08-Development_of_an_Assisted_Navigation_and_Collision_Avoidance_System_using_AI_and_Location-based_Service.md +++ b/_posts/2024-04-08-Development_of_an_Assisted_Navigation_and_Collision_Avoidance_System_using_AI_and_Location-based_Service.md @@ -4,6 +4,7 @@ subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: images/project/stf/demo_gif.gif tags: Positioning Services, Multi-Vehicle Collaborative Sensing, AI aided GNSS, GNSS Signal Tracing, Sensor Integration +research_direction: fusion order: --- ## Abstract diff --git a/_posts/2024-04-08-Safe-assured_Learning-based_Deep_SE(3)_Motion_Joint_Planning_and_Control_for_Unmanned_Aerial_Vehicles.md b/_posts/2024-04-08-Safe-assured_Learning-based_Deep_SE(3)_Motion_Joint_Planning_and_Control_for_Unmanned_Aerial_Vehicles.md index 17a86157..997a9a10 100644 --- a/_posts/2024-04-08-Safe-assured_Learning-based_Deep_SE(3)_Motion_Joint_Planning_and_Control_for_Unmanned_Aerial_Vehicles.md +++ b/_posts/2024-04-08-Safe-assured_Learning-based_Deep_SE(3)_Motion_Joint_Planning_and_Control_for_Unmanned_Aerial_Vehicles.md @@ -3,7 +3,8 @@ title: Safe-assured Learning-based Deep SE(3) Motion Joint Planning and Control subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: -tags: Advanced Vehicle Safety Systems, Automated Vehicle Operation, Motion Planning, Navigation, Aerial, Marine and Surface Intelligent Vehicles +tags: Advanced Vehicle Safety Systems, Automated Vehicle Operation, Motion Planning, Navigation, Aerial, Marine and Surface Intelligent Vehicles +research_direction: drones order: --- diff --git a/_posts/2024-10-14-AI_assisted_inertial_navigation_system.md b/_posts/2024-10-14-AI_assisted_inertial_navigation_system.md index 7505c715..fcdeaba5 100644 --- a/_posts/2024-10-14-AI_assisted_inertial_navigation_system.md +++ b/_posts/2024-10-14-AI_assisted_inertial_navigation_system.md @@ -3,7 +3,8 @@ title: AI assisted inertial navigation system subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: images/project/Honor.png -tags: Inertial Navigation System, AI +tags: Inertial Navigation System, AI +research_direction: fusion order: --- diff --git a/_posts/2024-12-09-Reliable_UAV_Perception_and_Perching_Solutions_in_Urban_Streets.md b/_posts/2024-12-09-Reliable_UAV_Perception_and_Perching_Solutions_in_Urban_Streets.md index 81d42a32..50d819a1 100644 --- a/_posts/2024-12-09-Reliable_UAV_Perception_and_Perching_Solutions_in_Urban_Streets.md +++ b/_posts/2024-12-09-Reliable_UAV_Perception_and_Perching_Solutions_in_Urban_Streets.md @@ -4,6 +4,7 @@ subtitle: Smart Street light Poles with UAV Airports # author: ZHAO Jiaqi image: images/project/UAV_Perching/Smart_Street_light_Poles_with_UAV_Airports.png tags: Landing, UAV Perching, UAV Airport +research_direction: drones order: --- Develop a comprehensive UAV perception and Perching solution, focusing on the integration of smart streetlight poles with a UAV takeoff, landing, and battery exchange platform diff --git a/_posts/2025-01-01-Our_Autonomous_Platforms.md b/_posts/2025-01-01-Our_Autonomous_Platforms.md index 9a893b8b..d8ad4b63 100644 --- a/_posts/2025-01-01-Our_Autonomous_Platforms.md +++ b/_posts/2025-01-01-Our_Autonomous_Platforms.md @@ -1,63 +1,146 @@ --- title: Our Autonomous Platforms -# subtitle: Knowledge Transfer to Unmanned Autonomous Systems +# subtitle: End-to-End AI-Powered Self-Driving Systems # author: Zhang Ziqi image: images/project/Vehicle/ADV.png tags: Autonomous-Driving +research_direction: vehicles order: --- -Demonstration of our Autonomous Driving Vehicles and their onboard sensor platforms. + +Our cutting-edge research platforms for end-to-end AI self-driving, where neural networks learn to drive directly from sensor data to control outputs. + +## What is End-to-End AI Self-Driving? + +End-to-end AI self-driving represents a paradigm shift in autonomous vehicle technology. Unlike traditional modular pipelines that break down driving into separate perception, prediction, planning, and control modules, end-to-end approaches use deep neural networks to learn the entire driving task holistically—directly mapping raw sensor inputs to vehicle control commands. + +This revolutionary approach offers several key advantages: + +**Direct Sensor-to-Control Learning**: Neural networks process multi-modal sensor data (cameras, LiDAR, GNSS) and output steering angles, throttle, and brake commands in a single forward pass, eliminating the error propagation inherent in modular systems. + +**Learned Representations**: Rather than hand-crafting features and rules, the network automatically discovers optimal internal representations of the driving environment, capturing subtle patterns that human engineers might miss. + +**Data-Driven Adaptation**: End-to-end models continuously improve through exposure to diverse driving scenarios, learning complex behaviors like defensive driving, traffic flow prediction, and context-aware decision-making from demonstration data. + +**Unified Optimization**: The entire driving pipeline is optimized jointly using gradient-based learning, ensuring that perception and control work synergistically rather than as isolated components. + +Our research explores multiple end-to-end architectures—from imitation learning systems that mimic expert drivers to reinforcement learning agents that discover optimal policies through trial and error in simulation, then transfer to real-world deployment. ## Introduction -An autonomous car, also known as a self-driving vehicle, is a sophisticated mode of transportation that can perceive its environment and navigate without human intervention. These vehicles employ a variety of advanced technologies to achieve safe and efficient driving, making them a significant innovation in modern transportation. +Autonomous vehicles represent the future of intelligent transportation, leveraging end-to-end AI architectures to transform raw sensor data into safe, human-like driving decisions. Our laboratory develops and deploys advanced self-driving systems that embody the latest breakthroughs in deep learning, computer vision, and robotics. + +At the core of our autonomous platforms is an integrated AI pipeline that processes multi-modal sensor streams—LiDAR point clouds, camera images, and GNSS/INS data—through sophisticated neural network architectures. These systems learn to simultaneously perceive the environment, predict future trajectories, and execute driving maneuvers in real-time, handling complex urban scenarios with human-level performance. + +The autonomous driving vehicle operates under comprehensive CANBUS control integrated with ROS2 middleware. Our AI control stack communicates seamlessly with the vehicle's MCU, translating high-level neural network outputs into low-level CAN signals for precise actuation. This architecture enables full drive-by-wire control including: + +- **Longitudinal control**: Acceleration and braking commands derived from learned policies +- **Lateral control**: Steering angles predicted by end-to-end neural networks +- **Mode management**: Automated gear shifting (D/P/R/N) based on mission planning +- **Safety systems**: AI-monitored lighting, indicators, and fail-safe mechanisms + +This platform serves as our testbed for advancing AI-powered autonomous driving, from imitation learning and reinforcement learning to vision-language models for natural language navigation. + +## End-to-End AI Architecture Components + +Our autonomous driving system implements a comprehensive end-to-end AI architecture comprising the following key components: + +### 1. Multi-Modal Perception Network +**Function**: Fuses data from cameras, LiDAR, and GNSS/INS into unified spatial-temporal representations + +**Architecture**: Vision backbone (ResNet, EfficientNet, or Vision Transformers) for image feature extraction; PointNet++/VoxelNet for 3D point cloud processing; Multi-scale feature pyramid networks for detecting objects at various distances; Temporal fusion modules (ConvLSTM, 3D CNNs) for motion prediction + +**Outputs**: Bird's-eye-view (BEV) semantic maps, 3D object detections, drivable area segmentation, lane boundary predictions + +### 2. World Model & Prediction +**Function**: Learns predictive models of how the environment evolves over time + +**Architecture**: Recurrent neural networks (GRU/LSTM) or Transformers for sequential prediction; Probabilistic trajectory forecasting for surrounding vehicles and pedestrians; Occupancy grid prediction for future scene states; Attention mechanisms for modeling agent-agent interactions + +**Outputs**: Multi-modal future trajectory distributions, predicted collision risks, uncertainty estimates -A critical aspect of autonomous vehicles is their ability to sense and localize themselves within their surroundings. This capability is essential for navigating complex environments, avoiding obstacles, and making real-time driving decisions. Accurate sensing and localization allow autonomous cars to interpret data from their surroundings and respond appropriately to dynamic conditions. +### 3. Planning & Decision-Making Network +**Function**: Generates safe, comfortable, and efficient driving trajectories -The autonomous driving vehicle operates under the comprehensive control of a CANBUS system. The host computer establishes a connection with the MCU, which is equipped with integrated ROS messaging capabilities. This integration allows the system to convert ROS messages into CAN signals, which are then transmitted to the MCU. +**Architecture**: Hierarchical planning with high-level route planning and low-level trajectory optimization; Imitation learning from expert demonstrations (Behavioral Cloning, GAIL, DAgger); Reinforcement learning for reward-driven policy optimization (PPO, SAC, TD3); Cost volume networks for evaluating trajectory candidates; Attention-based reasoning for traffic rule compliance + +**Outputs**: Reference trajectories (waypoints with velocity profiles), discrete actions (lane changes, stops) + +### 4. Control Network +**Function**: Executes planned trajectories through precise vehicle control + +**Architecture**: PID controllers enhanced with learned gain scheduling; Model Predictive Control (MPC) with learned dynamics models; Direct end-to-end control networks (steering/throttle/brake prediction); Residual learning to compensate for model uncertainties + +**Outputs**: Low-level commands (steering angle, throttle percentage, brake pressure) + +### 5. Safety & Verification Layer +**Function**: Ensures AI decisions meet safety constraints and override when necessary + +**Components**: Learned safety filters using reachability analysis; Rule-based fallback systems for edge cases; Uncertainty-aware decision-making (epistemic and aleatoric uncertainty); Real-time monitoring and anomaly detection; Redundant sensor validation and fault diagnosis + +**Outputs**: Safety scores, intervention flags, fail-safe commands + +### 6. Continuous Learning Pipeline +**Function**: Enables the system to improve from real-world deployment data + +**Components**: On-vehicle data logging (sensor streams, AI decisions, interventions); Offline reinforcement learning from logged experience; Active learning for identifying informative scenarios; Sim-to-real transfer learning using domain adaptation; Federated learning across vehicle fleet + +**Outputs**: Updated model weights, identified edge cases, performance metrics -This architecture provides us with extensive access to the vehicle's functionalities. We can not only relay vital velocity information but also manage gear settings, including Drive (D), Park (P), Reverse (R), and Neutral (N). Additionally, the system enables control of various lighting functions, enhancing both safety and operational efficiency. Overall, this setup ensures seamless communication between components, facilitating precise control and monitoring of the vehicle’s performance. ## Sensor Platform -Currently, our lab has two autonomous vehicles deployed on the PolyU Main Campus and the PolyU-Wuxi Research Institute. Both vehicles are equipped with unique sensors, including LiDAR, cameras, and integrated GNSS/INS, for localization and navigation. +Our laboratory operates two autonomous vehicle testbeds—one at PolyU Main Campus and another at PolyU-Wuxi Research Institute—both equipped with production-grade sensor suites for multi-modal AI training and validation. + +The sensor configuration enables comprehensive environmental perception: -Here is the sensor suite: +| Sensor Type | Brand/Model | Specifications | AI Application | +|-------------|-------------|----------------|----------------| +| **LiDAR** | Robosense RS-LiDAR-32 | 32 channels, 200m range, 360° FOV, 30° vertical FOV, 10-20Hz | 3D point cloud processing for obstacle detection, semantic segmentation, and occupancy prediction | +| **Cameras** | HikRobot Event Camera | 1280×720 resolution, 120dB HDR, 60fps, global shutter | Vision-based perception, lane detection, traffic sign recognition, end-to-end driving policy learning | +| **GNSS/INS**| CHCNav GNSS/INS | Dual-frequency RTK, integrated IMU, cm-level accuracy | Ground-truth localization for supervised learning, map-based planning, sensor fusion validation | -| Sensor Type | Brand/Model | Parameters | -|-------------|-------------|------------| -| **LiDAR** | Robosense RS-LiDAR-32 | 32 laser channels, 200m range, 360° horizontal FOV, 30° vertical FOV, 10Hz-20Hz scanning frequency | -| **Cameras** | HikRobot Event camera | 1280x720 resolution, 120dB dynamic range, 60fps frame rate, global shutter | -| **GNSS/INS**| CHCNav GNSS/INS | Dual-frequency GNSS receiver, integrated IMU, centimeter-level accuracy, real-time kinematic (RTK) support | +This sensor fusion architecture provides redundant, complementary data streams that feed our end-to-end AI models, enabling robust perception under diverse weather and lighting conditions. +## AI-Driven Autonomous Driving Demonstrations -## ADV Demo Video +### Real-World Testing: Campus Deployment -### Testing
-

ADV in PolyU Campus

+

End-to-End AI Navigation — PolyU Campus

- -

ADV in PolyU-Wuxi Research Institute

+ +

Autonomous Operation — PolyU-Wuxi Research Institute

+### AI Training Pipeline: CARLA Simulation - -### Carla Simulation Video +Our AI models are pre-trained and validated in high-fidelity simulation environments before real-world deployment. Using CARLA simulator, we generate diverse driving scenarios for imitation learning, reinforcement learning, and domain adaptation research.
- Team Banner -

Carla Simulation

+

CARLA Simulation Environment — End-to-End AI Policy Learning

+ + + +## Research Team + +**Principal Investigator:** +[Dr. Wen Weisong](https://polyu-taslab.github.io/members/Wen_Weisong.html) — Assistant Professor, Department of Aeronautical and Aviation Engineering, The Hong Kong Polytechnic University -### Researcher +**Core Researchers:** +[Mr. Zhang Ziqi](https://polyu-taslab.github.io/members/Zhang_Ziqi.html) — PhD Student, End-to-End Learning & Sensor Fusion +[Dr. Huang Feng](https://polyu-taslab.github.io/members/Huang_Feng.html) — Postdoctoral Researcher, Navigation & Localization + +--- -[Dr. Weisong Wen](https://polyu-taslab.github.io/members/Wen_Weisong.html), [Mr. Zhang Ziqi](https://polyu-taslab.github.io/members/Zhang_Ziqi.html), [Mr. Huang Feng](https://polyu-taslab.github.io/members/Huang_Feng.html) +**Research Focus:** End-to-End Deep Learning, Vision-Language Navigation, Multi-Modal Sensor Fusion, Sim-to-Real Transfer, Safe Reinforcement Learning, Imitation Learning, World Models for Autonomous Driving \ No newline at end of file diff --git a/_posts/2025-02-16-Safety-certified_Multi-source_Fusion_Positioning_for_Autonomous_Vehicles_in_Complex_Scenarios.md b/_posts/2025-02-16-Safety-certified_Multi-source_Fusion_Positioning_for_Autonomous_Vehicles_in_Complex_Scenarios.md index 4ff1b8a9..0c21e171 100644 --- a/_posts/2025-02-16-Safety-certified_Multi-source_Fusion_Positioning_for_Autonomous_Vehicles_in_Complex_Scenarios.md +++ b/_posts/2025-02-16-Safety-certified_Multi-source_Fusion_Positioning_for_Autonomous_Vehicles_in_Complex_Scenarios.md @@ -3,7 +3,8 @@ title: Safety-certified Multi-source Fusion Positioning for Autonomous Vehicles subtitle: Knowledge Transfer to Unmanned Autonomous Systems # author: XNG image: -tags: Localization, Sensor fusion, Safety, Autonomous Vehicle +tags: Localization, Sensor fusion, Safety, Autonomous Vehicle +research_direction: fusion order: --- diff --git a/_sass/custom.scss b/_sass/custom.scss new file mode 100644 index 00000000..c8f75f41 --- /dev/null +++ b/_sass/custom.scss @@ -0,0 +1,179 @@ +--- +title: Projects +nav: + order: 3 + tooltip: +--- + +# {% include icon.html icon="fa-solid fa-wrench" %}Projects + +
+

Our Projects

+

Explore our latest projects and initiatives.

+
+ +
+
+ +
+
+
+ +
+
+ +
+
+ +
+
+ +{% include section.html %} + +{% include search-box.html %} + +{% include tags.html tags=site.tags %} + +{% include search-info.html %} + +{% include list.html data="posts" component="post-excerpt" %} + + + + \ No newline at end of file diff --git a/_styles/-theme.scss b/_styles/-theme.scss index 0caecc61..f83d93a6 100644 --- a/_styles/-theme.scss +++ b/_styles/-theme.scss @@ -27,25 +27,25 @@ :root { // font families - --title: "Barlow", sans-serif; - --heading: "Barlow", sans-serif; - --body: "Barlow", sans-serif; + --title: "Barlow", Arial, sans-serif; + --heading: "Barlow", Arial, sans-serif; + --body: "Barlow", Arial, sans-serif; --code: "Roboto Mono", monospace; // font sizes - --large: 1.2rem; - --xl: 1.4rem; - --xxl: 1.6rem; + --large: 1.15rem; + --xl: 1.3rem; + --xxl: 1.5rem; // font weights --thin: 200; --regular: 400; - --semi-bold: 500; - --bold: 600; + --semi-bold: 600; + --bold: 700; // text line spacing - --spacing: 2; - --compact: 1.5; + --spacing: 1.55; + --compact: 1.4; // effects --rounded: 3px; diff --git a/_styles/body.scss b/_styles/body.scss index 91ecffcf..d34ef67f 100644 --- a/_styles/body.scss +++ b/_styles/body.scss @@ -10,6 +10,7 @@ body { background: var(--background); color: var(--text); font-family: var(--body); + font-size: 15px; text-align: center; line-height: var(--compact); } diff --git a/_styles/bold.scss b/_styles/bold.scss index 01c72f68..01efe1e8 100644 --- a/_styles/bold.scss +++ b/_styles/bold.scss @@ -5,3 +5,9 @@ b, strong { font-weight: var(--bold); } + +// Blue bold emphasis for key terms +.blue { + color: var(--primary); + font-weight: var(--bold); +} \ No newline at end of file diff --git a/_styles/citation.scss b/_styles/citation.scss index dc6c95e2..11fad92b 100644 --- a/_styles/citation.scss +++ b/_styles/citation.scss @@ -10,40 +10,41 @@ $wrap: 800px; .citation { display: flex; - margin: 20px 0; - border-radius: var(--rounded); + flex-direction: row; + margin: 10px 0; + padding: 14px 18px; + border-radius: 6px; background: var(--background); - overflow: hidden; - box-shadow: var(--shadow); + overflow: visible; + border-left: 3px solid var(--primary); + box-shadow: 0 1px 4px rgba(0, 0, 0, 0.06); + transition: box-shadow 0.2s, transform 0.15s; + align-items: flex-start; } -.citation-image { - position: relative; - width: $thumb-size; - flex-shrink: 0; - // box-shadow: var(--shadow); +.citation:hover { + box-shadow: 0 3px 12px rgba(0, 0, 0, 0.10); + transform: translateY(-1px); } -.citation-image img { - position: absolute; - inset: 0; - width: 100%; - height: 100%; - object-fit: contain; +/* Hide thumbnail images for cleaner academic look */ +.citation-image { + display: none; } .citation-text { position: relative; display: inline-flex; flex-wrap: wrap; - gap: 10px; + gap: 4px; max-width: 100%; height: min-content; - padding: 20px; - padding-left: 30px; + padding: 0; + padding-left: 0; text-align: left; overflow-wrap: break-word; z-index: 0; + font-size: 0.93em; } .citation-title, @@ -54,50 +55,73 @@ $wrap: 800px; } .citation-title { - font-weight: var(--semi-bold); + font-size: 1em; + font-weight: 600; + line-height: 1.4; + color: var(--primary); +} + +.citation-title:hover { + text-decoration: underline; } .citation-text > .icon { - position: absolute; - top: 20px; - right: 20px; - color: var(--light-gray); - opacity: 0.5; - font-size: 30px; - z-index: -1; + display: none; +} + +.citation-authors { + font-size: 0.88em; + color: #444; + line-height: 1.4; } .citation-publisher { text-transform: capitalize; + font-style: italic; +} + +.citation-details { + font-size: 0.84em; + color: #777; + line-height: 1.4; } .citation-description { - color: var(--gray); + font-size: 0.85em; + color: #666; + line-height: 1.45; + margin-top: 2px; } .citation-buttons { display: flex; flex-wrap: wrap; - gap: 10px; + gap: 6px; + margin-top: 2px; } .citation-buttons .button { margin: 0; + font-size: 0.82em; + padding: 2px 8px; + border-radius: 4px; + background: #f0f7ff; + color: var(--primary); + border: 1px solid #d6e8f7; +} + +.citation-buttons .button:hover { + background: var(--primary); + color: #fff; } .citation-text > .tags { display: inline-flex; justify-content: flex-start; - margin: 0; + margin: 2px 0 0 0; } -@container (max-width: #{$wrap}) { - .citation { - flex-direction: column; - } - - .citation-image { - width: unset; - height: $thumb-size; - } +.citation-text > .tags .tag { + font-size: 0.78em; + padding: 1px 8px; } diff --git a/_styles/code.scss b/_styles/code.scss index 4a50657e..6ea03189 100644 --- a/_styles/code.scss +++ b/_styles/code.scss @@ -5,7 +5,7 @@ pre, code, pre *, code * { - font-family: var(--code); + font-family: var(--code), 'Fira Mono', 'Menlo', 'Monaco', 'Consolas', 'Liberation Mono', 'Courier New', monospace; } // inline code diff --git a/_styles/feature.scss b/_styles/feature.scss index 3d2a53f9..8e8f40ff 100644 --- a/_styles/feature.scss +++ b/_styles/feature.scss @@ -7,8 +7,8 @@ $wrap: 800px; display: flex; justify-content: center; align-items: center; - gap: 40px; - margin: 40px 0; + gap: 24px; + margin: 20px 0; } .feature-image { diff --git a/_styles/footer.scss b/_styles/footer.scss index d0d52774..b7d890b6 100644 --- a/_styles/footer.scss +++ b/_styles/footer.scss @@ -6,10 +6,11 @@ footer { justify-content: center; align-items: center; flex-direction: column; - gap: 20px; - padding: 40px; + gap: 14px; + padding: 24px 30px; line-height: var(--spacing); box-shadow: var(--shadow); + font-size: 0.88rem; } footer a { diff --git a/_styles/header.scss b/_styles/header.scss index d6b435b1..4a318bb1 100644 --- a/_styles/header.scss +++ b/_styles/header.scss @@ -99,13 +99,16 @@ nav { justify-content: center; align-items: center; flex-wrap: wrap; - gap: 10px; + gap: 8px; font-family: var(--heading); + font-size: 0.88rem; + font-weight: var(--semi-bold); text-transform: uppercase; + letter-spacing: 0.5px; } nav > a { - padding: 5px; + padding: 4px 6px; } nav > a:hover { diff --git a/_styles/heading.scss b/_styles/heading.scss index f739e58a..281127e7 100644 --- a/_styles/heading.scss +++ b/_styles/heading.scss @@ -7,45 +7,47 @@ h3, h4, h5, h6 { - margin: 40px 0 20px 0; + margin: 20px 0 8px 0; font-family: var(--heading); - font-weight: var(--semi-bold); + font-weight: var(--bold); text-align: left; - letter-spacing: 1px; + letter-spacing: 0.3px; } h1 { - margin: 40px 0; - font-size: 1.6rem; - font-weight: var(--regular); + margin: 20px 0; + font-size: 1.5rem; + font-weight: var(--semi-bold); text-transform: uppercase; text-align: center; + letter-spacing: 1px; } h2 { - font-size: 1.6rem; - padding-bottom: 5px; + font-size: 1.35rem; + padding-bottom: 4px; border-bottom: solid 1px var(--light-gray); - font-weight: var(--regular); + font-weight: var(--semi-bold); } h3 { - font-size: 1.5rem; + font-size: 1.15rem; + font-weight: var(--semi-bold); } h4 { - font-size: 1.3rem; + font-size: 1.05rem; } h5 { - font-size: 1.15rem; + font-size: 0.95rem; } h6 { - font-size: 1rem; + font-size: 0.9rem; } :where(h1, h2, h3, h4, h5, h6) > .icon { - margin-right: 1em; + margin-right: 0.6em; color: var(--light-gray); } diff --git a/_styles/list.scss b/_styles/list.scss index d769a6a3..f28cd718 100644 --- a/_styles/list.scss +++ b/_styles/list.scss @@ -3,8 +3,8 @@ ul, ol { - margin: 20px 0; - padding-left: 40px; + margin: 6px 0; + padding-left: 24px; } ul { @@ -12,10 +12,11 @@ ul { } li { - margin: 5px 0; - padding-left: 10px; + margin: 2px 0; + padding-left: 4px; text-align: justify; line-height: var(--spacing); + font-size: 0.95rem; ul, ol { diff --git a/_styles/paragraph.scss b/_styles/paragraph.scss index 08b05a33..f9b612ea 100644 --- a/_styles/paragraph.scss +++ b/_styles/paragraph.scss @@ -2,7 +2,8 @@ --- p { - margin: 20px 0; + margin: 8px 0; text-align: justify; line-height: var(--spacing); + font-size: 0.95rem; } diff --git a/_styles/post-excerpt.scss b/_styles/post-excerpt.scss index 27c7a1dc..d5cfec42 100644 --- a/_styles/post-excerpt.scss +++ b/_styles/post-excerpt.scss @@ -1,7 +1,7 @@ --- --- -$thumb-size: 200px; +$thumb-size: 160px; $wrap: 800px; .post-excerpt-container { @@ -10,18 +10,24 @@ $wrap: 800px; .post-excerpt { display: flex; - margin: 20px 0; - border-radius: var(--rounded); + margin: 12px 0; + border-radius: 6px; background: var(--background); overflow: hidden; - box-shadow: var(--shadow); + border-left: 3px solid var(--primary); + box-shadow: 0 1px 4px rgba(0, 0, 0, 0.06); + transition: box-shadow 0.2s, transform 0.15s; +} + +.post-excerpt:hover { + box-shadow: 0 3px 12px rgba(0, 0, 0, 0.10); + transform: translateY(-1px); } .post-excerpt-image { position: relative; width: $thumb-size; flex-shrink: 0; - // box-shadow: var(--shadow); } .post-excerpt-image img { @@ -35,8 +41,8 @@ $wrap: 800px; .post-excerpt-text { display: flex; flex-wrap: wrap; - gap: 20px; - padding: 20px 30px; + gap: 6px; + padding: 14px 20px; text-align: left; } @@ -46,15 +52,27 @@ $wrap: 800px; .post-excerpt-text > a:first-child { width: 100%; - font-weight: var(--semi-bold); + font-weight: 600; + font-size: 1.05em; + color: var(--primary); + line-height: 1.4; +} + +.post-excerpt-text > a:first-child:hover { + text-decoration: underline; } .post-excerpt-text > div { justify-content: flex-start; + font-size: 0.85em; + color: #777; } .post-excerpt-text > p { width: 100%; + font-size: 0.9em; + line-height: 1.5; + color: #444; } @container (max-width: #{$wrap}) { diff --git a/_styles/rule.scss b/_styles/rule.scss index abf797b8..ec4eca01 100644 --- a/_styles/rule.scss +++ b/_styles/rule.scss @@ -2,7 +2,7 @@ --- hr { - margin: 40px 0; + margin: 20px 0; background: var(--light-gray); border: none; height: 1px; diff --git a/_styles/search-box.scss b/_styles/search-box.scss index 5f20a783..091d0dc2 100644 --- a/_styles/search-box.scss +++ b/_styles/search-box.scss @@ -24,3 +24,88 @@ color: var(--black); border: none; } + +/* Smart search suggestion styles */ +.search-suggestions { + position: absolute; + top: 100%; + left: 0; + width: 100%; + background: #fff; + border: 1px solid #ccc; + border-radius: 0 0 8px 8px; + box-shadow: 0 2px 8px rgba(0,0,0,0.08); + z-index: 10; + max-height: 220px; + overflow-y: auto; + display: none; + font-size: 1rem; +} + +.suggestion-item { + display: flex; + flex-direction: column; + gap: 2px; + padding: 12px 18px; + cursor: pointer; + color: var(--black); + transition: background 0.2s; + border-bottom: 1px solid #f0f0f0; +} +.suggestion-item:last-child { + border-bottom: none; +} +.suggestion-item:hover { + background: var(--light-gray, #f5f5f5); +} +.suggestion-title { + font-weight: 600; + color: var(--primary, #1a73e8); + font-size: 1.05em; +} +.suggestion-tags { + font-size: 0.92em; + color: var(--secondary, #666); + margin-top: 2px; + display: flex; + flex-wrap: wrap; + gap: 6px; +} +.suggestion-tag { + background: var(--light-gray, #f5f5f5); + color: var(--primary, #1a73e8); + border-radius: 999px; + padding: 2px 10px; + font-size: 0.9em; +} + +.search-box .search-input { + border-radius: 8px; + border: 1.5px solid #bdbdbd; + padding: 10px 14px; + font-size: 1.08rem; + box-sizing: border-box; + outline: none; + transition: border-color 0.2s, box-shadow 0.2s; +} +.search-box .search-input:focus { + border-color: var(--primary, #1a73e8); + box-shadow: 0 0 0 2px rgba(26,115,232,0.08); +} + +.search-box button { + border-radius: 8px; + border: 1.5px solid #bdbdbd; + background: #f5f5f5; + color: var(--black); + font-size: 1.08rem; + cursor: pointer; + padding: 0 16px; + height: 38px; + margin-left: 8px; + transition: background 0.2s, border-color 0.2s; +} +.search-box button:hover { + background: var(--light-gray, #eaeaea); + border-color: var(--primary, #1a73e8); +} diff --git a/_styles/section.scss b/_styles/section.scss index 332deb65..a332681e 100644 --- a/_styles/section.scss +++ b/_styles/section.scss @@ -2,7 +2,7 @@ --- $page: 1000px; -$padding: 40px; +$padding: 30px; section { padding: $padding max($padding, calc((100% - $page) / 2)); diff --git a/blog/index.md b/blog/index.md index 1f1ce0d9..67ed8017 100644 --- a/blog/index.md +++ b/blog/index.md @@ -1,8 +1,8 @@ --- title: Blog -nav: - order: 5 - tooltip: Knowledge sharing +# nav: +# order: 5 +# tooltip: Knowledge sharing --- # {% include icon.html icon="fa-solid fa-feather-pointed" %}Blog diff --git a/contact/index.md b/contact/index.md deleted file mode 100644 index 45849407..00000000 --- a/contact/index.md +++ /dev/null @@ -1,81 +0,0 @@ ---- -title: Join Us! -nav: - order: 6 - tooltip: Email, address, and location ---- - -# {% include icon.html icon="fa-regular fa-envelope" %}Join Us - - -### Openings - -We regularly have multiple openings for Postdoc/PhD/MPhil/RA/Internships (All year round) to work on research related to trustworthy autonomous systems in general, including UAV and self-driving cars. If you are a PolyU student (Undergraduate and MSc students seeking URIS or dissertation supervision) interested in working with me, feel free to drop me an email (together with your transcript and brief introduction) or walk into my office at room R820! - -**Postdoc/RA positions** (Regular quotas): High precise perception positioning control with multi-sensory integration, autonomous systems, unmanned aerial vehicles (UAV), semantic aided positioning, map update and qualification, hardware-software co-design for next-generation navigation chips, urban GNSS positioning, GNSS RTK, PPP, PPP-RTK, multi-agent collaborative positioning. For those interested, please send us your CV, representative publications list, and research statement/proposal. (We will reply to you within one week if you are shortlisted for an interview). For any candidate, you MUST have at least one of the following properties: (1) a strong publication record! Or (2) strong capabilities in coding (at least C++ or Python) or hardware. Or (3) strong capabilities in preparing research proposals. - -**PhD/MPhil topics** (Regular quotas): Safety-certifiable positioning, control, and perception for autonomous systems. For more topics we are working on, please refer to our project page. For those interested, please send us your CV, representative publications list, and research proposal. (We will reply to you within one week if you are shortlisted for an interview). - - -{% - include button.html - type="email" - text="welson.wen@polyu.edu.hk" - link="welson.wen@polyu.edu.hk" -%} -{% - include button.html - type="phone" - text="(852) 3400 8234" - link="+852 3400 8234" -%} -{% - include button.html - type="address" - tooltip="Our location on Google Maps for easy navigation" - link="https://maps.app.goo.gl/Aj8Zj2xQ8KzHSRtr9" -%} - -{% include section.html %} - -{% capture col1 %} - -{% - include figure.html - image="images/AboutPolyU_Campus3.png" - caption=" " -%} - -{% endcapture %} - -{% capture col2 %} - -{% - include figure.html - image="images/AboutPolyU_Campus5.jpg" - caption=" " -%} - -{% endcapture %} - -{% include cols.html col1=col1 col2=col2 %} - -{% include section.html %} - - - -{% capture col1 %} - -{% endcapture %} - -{% capture col2 %} - -{% endcapture %} - -{% capture col3 %} - -{% endcapture %} - -{% include cols.html col1=col1 col2=col2 col3=col3 %} - - diff --git a/images/news/0116_linxai/1.jpg b/images/news/0116_linxai/1.jpg new file mode 100644 index 00000000..d5c5ca2a Binary files /dev/null and b/images/news/0116_linxai/1.jpg differ diff --git a/images/news/0116_linxai/2.jpg b/images/news/0116_linxai/2.jpg new file mode 100644 index 00000000..2c145475 Binary files /dev/null and b/images/news/0116_linxai/2.jpg differ diff --git a/images/news/0116_linxai/f96fcea426e5e7dc867a622af30f2335.jpg b/images/news/0116_linxai/f96fcea426e5e7dc867a622af30f2335.jpg new file mode 100644 index 00000000..d5c5ca2a Binary files /dev/null and b/images/news/0116_linxai/f96fcea426e5e7dc867a622af30f2335.jpg differ diff --git a/images/news/0116_linxai/readme.md b/images/news/0116_linxai/readme.md new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/images/news/0116_linxai/readme.md @@ -0,0 +1 @@ + diff --git a/images/news/0119_simpleai/1.png b/images/news/0119_simpleai/1.png new file mode 100644 index 00000000..f0df8d8a Binary files /dev/null and b/images/news/0119_simpleai/1.png differ diff --git a/images/news/0119_simpleai/2.png b/images/news/0119_simpleai/2.png new file mode 100644 index 00000000..8d19c5d2 Binary files /dev/null and b/images/news/0119_simpleai/2.png differ diff --git a/images/news/0119_simpleai/3.png b/images/news/0119_simpleai/3.png new file mode 100644 index 00000000..e9dbe8eb Binary files /dev/null and b/images/news/0119_simpleai/3.png differ diff --git a/images/news/0119_simpleai/4.png b/images/news/0119_simpleai/4.png new file mode 100644 index 00000000..fe07a83f Binary files /dev/null and b/images/news/0119_simpleai/4.png differ diff --git a/images/news/0119_simpleai/readme.md b/images/news/0119_simpleai/readme.md new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/images/news/0119_simpleai/readme.md @@ -0,0 +1 @@ + diff --git a/images/news/0130Sandbox/pic1.jpeg b/images/news/0130Sandbox/pic1.jpeg new file mode 100644 index 00000000..4d7d53a4 Binary files /dev/null and b/images/news/0130Sandbox/pic1.jpeg differ diff --git a/images/news/0130Sandbox/pic2.jpg b/images/news/0130Sandbox/pic2.jpg new file mode 100644 index 00000000..a28448b6 Binary files /dev/null and b/images/news/0130Sandbox/pic2.jpg differ diff --git a/images/news/0130Sandbox/pic4.jpeg b/images/news/0130Sandbox/pic4.jpeg new file mode 100644 index 00000000..c4d38935 Binary files /dev/null and b/images/news/0130Sandbox/pic4.jpeg differ diff --git a/images/news/0130Sandbox/pic4.jpg b/images/news/0130Sandbox/pic4.jpg new file mode 100644 index 00000000..0e69791d Binary files /dev/null and b/images/news/0130Sandbox/pic4.jpg differ diff --git a/images/news/0914MarsTalk/Chen.jpg b/images/news/0914MarsTalk/Chen.jpg new file mode 100644 index 00000000..5a107f4e Binary files /dev/null and b/images/news/0914MarsTalk/Chen.jpg differ diff --git a/images/news/0914MarsTalk/GroupTas.jpg b/images/news/0914MarsTalk/GroupTas.jpg new file mode 100644 index 00000000..a5071388 Binary files /dev/null and b/images/news/0914MarsTalk/GroupTas.jpg differ diff --git a/images/news/0914MarsTalk/GroupTas2.jpg b/images/news/0914MarsTalk/GroupTas2.jpg new file mode 100644 index 00000000..986b022f Binary files /dev/null and b/images/news/0914MarsTalk/GroupTas2.jpg differ diff --git a/images/news/0914MarsTalk/WenWeisong.jpg b/images/news/0914MarsTalk/WenWeisong.jpg new file mode 100644 index 00000000..668ee6e0 Binary files /dev/null and b/images/news/0914MarsTalk/WenWeisong.jpg differ diff --git a/images/news/0914MarsTalk/marstalk.jpg b/images/news/0914MarsTalk/marstalk.jpg new file mode 100644 index 00000000..a724b6a0 Binary files /dev/null and b/images/news/0914MarsTalk/marstalk.jpg differ diff --git a/images/news/0916TALK/image1.png b/images/news/0916TALK/image1.png new file mode 100755 index 00000000..982a2481 Binary files /dev/null and b/images/news/0916TALK/image1.png differ diff --git a/images/news/0916TALK/image2.png b/images/news/0916TALK/image2.png new file mode 100755 index 00000000..66c2207d Binary files /dev/null and b/images/news/0916TALK/image2.png differ diff --git a/images/news/0916TALK/image3.jpg b/images/news/0916TALK/image3.jpg new file mode 100755 index 00000000..cbeb98ae Binary files /dev/null and b/images/news/0916TALK/image3.jpg differ diff --git a/images/news/0916TALK/image4.png b/images/news/0916TALK/image4.png new file mode 100644 index 00000000..250f80ba Binary files /dev/null and b/images/news/0916TALK/image4.png differ diff --git a/images/news/0929CampusFlight/CAD_exam.jpg b/images/news/0929CampusFlight/CAD_exam.jpg new file mode 100644 index 00000000..4754f252 Binary files /dev/null and b/images/news/0929CampusFlight/CAD_exam.jpg differ diff --git a/images/news/0929CampusFlight/Drone_Flight.jpg b/images/news/0929CampusFlight/Drone_Flight.jpg new file mode 100644 index 00000000..1b7cad41 Binary files /dev/null and b/images/news/0929CampusFlight/Drone_Flight.jpg differ diff --git a/images/news/0929CampusFlight/Drone_Flight_2.jpg b/images/news/0929CampusFlight/Drone_Flight_2.jpg new file mode 100644 index 00000000..8d8440f0 Binary files /dev/null and b/images/news/0929CampusFlight/Drone_Flight_2.jpg differ diff --git a/images/news/0929CampusFlight/Examine_drone.jpg b/images/news/0929CampusFlight/Examine_drone.jpg new file mode 100644 index 00000000..d399a507 Binary files /dev/null and b/images/news/0929CampusFlight/Examine_drone.jpg differ diff --git a/images/news/0930SouthernPower/image.png b/images/news/0930SouthernPower/image.png new file mode 100644 index 00000000..7d513a0f Binary files /dev/null and b/images/news/0930SouthernPower/image.png differ diff --git a/images/news/1010ZYYTITIS/1.png b/images/news/1010ZYYTITIS/1.png new file mode 100644 index 00000000..0a2571ac Binary files /dev/null and b/images/news/1010ZYYTITIS/1.png differ diff --git a/images/news/1010ZYYTITIS/framework.png b/images/news/1010ZYYTITIS/framework.png new file mode 100644 index 00000000..df2327d8 Binary files /dev/null and b/images/news/1010ZYYTITIS/framework.png differ diff --git a/images/news/1010ZYYTITIS/test.png b/images/news/1010ZYYTITIS/test.png new file mode 100644 index 00000000..47a985e7 Binary files /dev/null and b/images/news/1010ZYYTITIS/test.png differ diff --git a/images/news/1017NeiMengGuVisit/image1.jpg b/images/news/1017NeiMengGuVisit/image1.jpg new file mode 100644 index 00000000..7139e240 Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image1.jpg differ diff --git a/images/news/1017NeiMengGuVisit/image2.jpg b/images/news/1017NeiMengGuVisit/image2.jpg new file mode 100644 index 00000000..08ef9a9e Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image2.jpg differ diff --git a/images/news/1017NeiMengGuVisit/image3.jpg b/images/news/1017NeiMengGuVisit/image3.jpg new file mode 100644 index 00000000..1c03fa2b Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image3.jpg differ diff --git a/images/news/1017NeiMengGuVisit/image4.jpg b/images/news/1017NeiMengGuVisit/image4.jpg new file mode 100644 index 00000000..96fc336c Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image4.jpg differ diff --git a/images/news/1017NeiMengGuVisit/image5.jpg b/images/news/1017NeiMengGuVisit/image5.jpg new file mode 100644 index 00000000..89cacf18 Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image5.jpg differ diff --git a/images/news/1017NeiMengGuVisit/image6.jpg b/images/news/1017NeiMengGuVisit/image6.jpg new file mode 100644 index 00000000..f3605a69 Binary files /dev/null and b/images/news/1017NeiMengGuVisit/image6.jpg differ diff --git a/images/news/1103fangwu/fangwu1.jpg b/images/news/1103fangwu/fangwu1.jpg new file mode 100644 index 00000000..ffcd8f3b Binary files /dev/null and b/images/news/1103fangwu/fangwu1.jpg differ diff --git a/images/news/1103fangwu/fangwu2.jpg b/images/news/1103fangwu/fangwu2.jpg new file mode 100644 index 00000000..c6262d2a Binary files /dev/null and b/images/news/1103fangwu/fangwu2.jpg differ diff --git a/images/news/1110_heu/heu1.jpg b/images/news/1110_heu/heu1.jpg new file mode 100644 index 00000000..7995277a Binary files /dev/null and b/images/news/1110_heu/heu1.jpg differ diff --git a/images/news/1110_heu/heu2.jpg b/images/news/1110_heu/heu2.jpg new file mode 100644 index 00000000..7ff7f737 Binary files /dev/null and b/images/news/1110_heu/heu2.jpg differ diff --git a/images/news/1110_heu/heu3.jpg b/images/news/1110_heu/heu3.jpg new file mode 100644 index 00000000..028cbaff Binary files /dev/null and b/images/news/1110_heu/heu3.jpg differ diff --git a/images/news/1110_heu/heu4.jpg b/images/news/1110_heu/heu4.jpg new file mode 100644 index 00000000..e7b5307b Binary files /dev/null and b/images/news/1110_heu/heu4.jpg differ diff --git a/images/news/1110_heu/heu5.jpg b/images/news/1110_heu/heu5.jpg new file mode 100644 index 00000000..dd752cd9 Binary files /dev/null and b/images/news/1110_heu/heu5.jpg differ diff --git a/images/news/1112HK_BJ_sym/1.jpg b/images/news/1112HK_BJ_sym/1.jpg new file mode 100644 index 00000000..067627ed Binary files /dev/null and b/images/news/1112HK_BJ_sym/1.jpg differ diff --git a/images/news/1112HK_BJ_sym/2.jpg b/images/news/1112HK_BJ_sym/2.jpg new file mode 100644 index 00000000..5ae6a5f6 Binary files /dev/null and b/images/news/1112HK_BJ_sym/2.jpg differ diff --git a/images/news/1112HK_BJ_sym/3.jpg b/images/news/1112HK_BJ_sym/3.jpg new file mode 100644 index 00000000..9c27490f Binary files /dev/null and b/images/news/1112HK_BJ_sym/3.jpg differ diff --git a/images/news/1112HK_BJ_sym/4.jpg b/images/news/1112HK_BJ_sym/4.jpg new file mode 100644 index 00000000..c7c2626d Binary files /dev/null and b/images/news/1112HK_BJ_sym/4.jpg differ diff --git a/images/news/1213Jinjiang/1.jpg b/images/news/1213Jinjiang/1.jpg new file mode 100644 index 00000000..555ad4c7 Binary files /dev/null and b/images/news/1213Jinjiang/1.jpg differ diff --git a/images/news/1213Jinjiang/2.jpg b/images/news/1213Jinjiang/2.jpg new file mode 100644 index 00000000..5283ce38 Binary files /dev/null and b/images/news/1213Jinjiang/2.jpg differ diff --git a/images/news/1213Jinjiang/3.jpg b/images/news/1213Jinjiang/3.jpg new file mode 100644 index 00000000..e0a6f840 Binary files /dev/null and b/images/news/1213Jinjiang/3.jpg differ diff --git a/images/news/1213Jinjiang/4.jpg b/images/news/1213Jinjiang/4.jpg new file mode 100644 index 00000000..c682cb9f Binary files /dev/null and b/images/news/1213Jinjiang/4.jpg differ diff --git a/images/news/1213Jinjiang/5.jpg b/images/news/1213Jinjiang/5.jpg new file mode 100644 index 00000000..9001e8f8 Binary files /dev/null and b/images/news/1213Jinjiang/5.jpg differ diff --git a/images/news/1213Jinjiang/readme.md b/images/news/1213Jinjiang/readme.md new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/images/news/1213Jinjiang/readme.md @@ -0,0 +1 @@ + diff --git a/images/news/1218/demo.mp4 b/images/news/1218/demo.mp4 new file mode 100644 index 00000000..aa725aee Binary files /dev/null and b/images/news/1218/demo.mp4 differ diff --git a/images/news/1218/silent.jpg b/images/news/1218/silent.jpg new file mode 100644 index 00000000..3ce53e47 Binary files /dev/null and b/images/news/1218/silent.jpg differ diff --git a/images/news/1218/silent2.jpg b/images/news/1218/silent2.jpg new file mode 100644 index 00000000..98ad79b7 Binary files /dev/null and b/images/news/1218/silent2.jpg differ diff --git a/images/news/1219/image1.jpg b/images/news/1219/image1.jpg new file mode 100644 index 00000000..14f51292 Binary files /dev/null and b/images/news/1219/image1.jpg differ diff --git a/images/news/1219/image2.jpg b/images/news/1219/image2.jpg new file mode 100644 index 00000000..fa37f9f6 Binary files /dev/null and b/images/news/1219/image2.jpg differ diff --git a/images/news/1219/image3.jpg b/images/news/1219/image3.jpg new file mode 100644 index 00000000..14f51292 Binary files /dev/null and b/images/news/1219/image3.jpg differ diff --git a/images/news/1219/image4.jpg b/images/news/1219/image4.jpg new file mode 100644 index 00000000..fc20f3b7 Binary files /dev/null and b/images/news/1219/image4.jpg differ diff --git a/images/news/20251103_Zhengxi/zhengxi_oral_defense.jpg b/images/news/20251103_Zhengxi/zhengxi_oral_defense.jpg new file mode 100644 index 00000000..8686e034 Binary files /dev/null and b/images/news/20251103_Zhengxi/zhengxi_oral_defense.jpg differ diff --git a/images/news/20260113_RinoaiMoU/MoU.png b/images/news/20260113_RinoaiMoU/MoU.png new file mode 100644 index 00000000..8ee06d8f Binary files /dev/null and b/images/news/20260113_RinoaiMoU/MoU.png differ diff --git a/images/news/20260204_CFSO_WuxiRI/1.jpg b/images/news/20260204_CFSO_WuxiRI/1.jpg new file mode 100644 index 00000000..0fc54592 Binary files /dev/null and b/images/news/20260204_CFSO_WuxiRI/1.jpg differ diff --git a/images/news/20260204_CFSO_WuxiRI/2.png b/images/news/20260204_CFSO_WuxiRI/2.png new file mode 100644 index 00000000..54538a5e Binary files /dev/null and b/images/news/20260204_CFSO_WuxiRI/2.png differ diff --git a/images/news/20260204_CFSO_WuxiRI/3.png b/images/news/20260204_CFSO_WuxiRI/3.png new file mode 100644 index 00000000..cf3b2f77 Binary files /dev/null and b/images/news/20260204_CFSO_WuxiRI/3.png differ diff --git a/images/news/20260204_CFSO_WuxiRI/4.jpg b/images/news/20260204_CFSO_WuxiRI/4.jpg new file mode 100644 index 00000000..0fc54592 Binary files /dev/null and b/images/news/20260204_CFSO_WuxiRI/4.jpg differ diff --git a/images/news/20260204_CFSO_WuxiRI/5.jpg b/images/news/20260204_CFSO_WuxiRI/5.jpg new file mode 100644 index 00000000..f9884ee4 Binary files /dev/null and b/images/news/20260204_CFSO_WuxiRI/5.jpg differ diff --git a/images/news/20260204_CFSO_WuxiRI/pic_note.txt b/images/news/20260204_CFSO_WuxiRI/pic_note.txt new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/images/news/20260204_CFSO_WuxiRI/pic_note.txt @@ -0,0 +1 @@ + diff --git a/images/news/2026ICRA/Poster_WS_RobotMeetsGNSSRanging.png b/images/news/2026ICRA/Poster_WS_RobotMeetsGNSSRanging.png new file mode 100644 index 00000000..970629b5 Binary files /dev/null and b/images/news/2026ICRA/Poster_WS_RobotMeetsGNSSRanging.png differ diff --git a/images/news/2026ICRA/system_framework.png b/images/news/2026ICRA/system_framework.png new file mode 100644 index 00000000..b8b07970 Binary files /dev/null and b/images/news/2026ICRA/system_framework.png differ diff --git a/images/news/260128/260128.jpeg b/images/news/260128/260128.jpeg new file mode 100644 index 00000000..4e7df9c5 Binary files /dev/null and b/images/news/260128/260128.jpeg differ diff --git a/images/news/IROS2025/oral_present.jpg b/images/news/IROS2025/oral_present.jpg new file mode 100644 index 00000000..44f6dba4 Binary files /dev/null and b/images/news/IROS2025/oral_present.jpg differ diff --git a/images/news/IROS2025/poster.jpg b/images/news/IROS2025/poster.jpg new file mode 100644 index 00000000..9fd429a1 Binary files /dev/null and b/images/news/IROS2025/poster.jpg differ diff --git a/images/news/IROS2025/poster_present.jpg b/images/news/IROS2025/poster_present.jpg new file mode 100644 index 00000000..4d7c00d7 Binary files /dev/null and b/images/news/IROS2025/poster_present.jpg differ diff --git a/images/news/IROS2025/robot.jpg b/images/news/IROS2025/robot.jpg new file mode 100644 index 00000000..b125de46 Binary files /dev/null and b/images/news/IROS2025/robot.jpg differ diff --git a/images/news/ITSC2025/group_photo.jpg b/images/news/ITSC2025/group_photo.jpg new file mode 100644 index 00000000..56cdf628 Binary files /dev/null and b/images/news/ITSC2025/group_photo.jpg differ diff --git a/images/news/ITSC2025/hsu.png b/images/news/ITSC2025/hsu.png new file mode 100644 index 00000000..0cc7cb67 Binary files /dev/null and b/images/news/ITSC2025/hsu.png differ diff --git a/images/news/ITSC2025/join.JPG b/images/news/ITSC2025/join.JPG new file mode 100644 index 00000000..27f57ac0 Binary files /dev/null and b/images/news/ITSC2025/join.JPG differ diff --git a/images/news/ITSC2025/kousik.jpg b/images/news/ITSC2025/kousik.jpg new file mode 100644 index 00000000..97de3547 Binary files /dev/null and b/images/news/ITSC2025/kousik.jpg differ diff --git a/images/news/ITSC2025/kousik.png b/images/news/ITSC2025/kousik.png new file mode 100644 index 00000000..0f2893c2 Binary files /dev/null and b/images/news/ITSC2025/kousik.png differ diff --git a/images/news/ITSC2025/shan.JPG b/images/news/ITSC2025/shan.JPG new file mode 100644 index 00000000..86942ad4 Binary files /dev/null and b/images/news/ITSC2025/shan.JPG differ diff --git a/images/news/ITSC2025/tim.png b/images/news/ITSC2025/tim.png new file mode 100644 index 00000000..c4d33ff4 Binary files /dev/null and b/images/news/ITSC2025/tim.png differ diff --git a/images/news/ITSC2025/zhangfu.png b/images/news/ITSC2025/zhangfu.png new file mode 100644 index 00000000..ba41c3ad Binary files /dev/null and b/images/news/ITSC2025/zhangfu.png differ diff --git a/images/news/ITSC2025/zhou.JPG b/images/news/ITSC2025/zhou.JPG new file mode 100644 index 00000000..75cd7338 Binary files /dev/null and b/images/news/ITSC2025/zhou.JPG differ diff --git a/images/news/Shougang/shougang1.png b/images/news/Shougang/shougang1.png new file mode 100644 index 00000000..ddba5bf2 Binary files /dev/null and b/images/news/Shougang/shougang1.png differ diff --git a/images/news/Shougang/shougang2.jpg b/images/news/Shougang/shougang2.jpg new file mode 100644 index 00000000..6cda0e1b Binary files /dev/null and b/images/news/Shougang/shougang2.jpg differ diff --git a/images/news/Shougang/shougang3.jpg b/images/news/Shougang/shougang3.jpg new file mode 100644 index 00000000..6d502d43 Binary files /dev/null and b/images/news/Shougang/shougang3.jpg differ diff --git a/images/news/Shougang/shougang4.jpg b/images/news/Shougang/shougang4.jpg new file mode 100644 index 00000000..a8b419e7 Binary files /dev/null and b/images/news/Shougang/shougang4.jpg differ diff --git a/images/news/Ubeat/image1.png b/images/news/Ubeat/image1.png new file mode 100644 index 00000000..7490ab2c Binary files /dev/null and b/images/news/Ubeat/image1.png differ diff --git a/images/news/Ubeat/image2.png b/images/news/Ubeat/image2.png new file mode 100644 index 00000000..bd484ea5 Binary files /dev/null and b/images/news/Ubeat/image2.png differ diff --git a/images/news/Ubeat/image3.png b/images/news/Ubeat/image3.png new file mode 100644 index 00000000..0923d9f3 Binary files /dev/null and b/images/news/Ubeat/image3.png differ diff --git a/images/news/nanjingjiangning/1.jpg b/images/news/nanjingjiangning/1.jpg new file mode 100644 index 00000000..fbdf4a3e Binary files /dev/null and b/images/news/nanjingjiangning/1.jpg differ diff --git a/images/news/nanjingjiangning/2.jpg b/images/news/nanjingjiangning/2.jpg new file mode 100644 index 00000000..a88a915e Binary files /dev/null and b/images/news/nanjingjiangning/2.jpg differ diff --git a/images/news/ruijie_rttlio.png b/images/news/ruijie_rttlio.png new file mode 100644 index 00000000..09d9cc5c Binary files /dev/null and b/images/news/ruijie_rttlio.png differ diff --git a/images/opensource/HDMap/garage.png b/images/opensource/HDMap/garage.png new file mode 100644 index 00000000..46bae70d Binary files /dev/null and b/images/opensource/HDMap/garage.png differ diff --git a/images/opensource/HDMap/garage_half.gif b/images/opensource/HDMap/garage_half.gif new file mode 100644 index 00000000..3355baca Binary files /dev/null and b/images/opensource/HDMap/garage_half.gif differ diff --git a/images/opensource/TDL-GNSS/TDL_structure.png b/images/opensource/TDL-GNSS/TDL_structure.png new file mode 100644 index 00000000..cf5b2a93 Binary files /dev/null and b/images/opensource/TDL-GNSS/TDL_structure.png differ diff --git a/images/opensource/TasFusion/board.png b/images/opensource/TasFusion/board.png new file mode 100644 index 00000000..96d1abe2 Binary files /dev/null and b/images/opensource/TasFusion/board.png differ diff --git a/images/opensource/TasFusion/demo.gif b/images/opensource/TasFusion/demo.gif new file mode 100644 index 00000000..5545affe Binary files /dev/null and b/images/opensource/TasFusion/demo.gif differ diff --git a/images/opensource/TasFusion/longdata.png b/images/opensource/TasFusion/longdata.png new file mode 100644 index 00000000..83913d9d Binary files /dev/null and b/images/opensource/TasFusion/longdata.png differ diff --git a/images/opensource/kltdataset/NLOS_crop.gif b/images/opensource/kltdataset/NLOS_crop.gif new file mode 100644 index 00000000..8713c039 Binary files /dev/null and b/images/opensource/kltdataset/NLOS_crop.gif differ diff --git a/images/opensource/zhengxi/framework.png b/images/opensource/zhengxi/framework.png new file mode 100644 index 00000000..2836ccd9 Binary files /dev/null and b/images/opensource/zhengxi/framework.png differ diff --git a/images/opensource/zhengxi/framework2.png b/images/opensource/zhengxi/framework2.png new file mode 100644 index 00000000..906fb328 Binary files /dev/null and b/images/opensource/zhengxi/framework2.png differ diff --git a/images/project/3DLA-GNSS.jpg b/images/project/3DLA-GNSS.jpg new file mode 100644 index 00000000..2fe6fe08 Binary files /dev/null and b/images/project/3DLA-GNSS.jpg differ diff --git a/images/project/AGV_demo.png b/images/project/AGV_demo.png new file mode 100644 index 00000000..a97940ba Binary files /dev/null and b/images/project/AGV_demo.png differ diff --git a/images/project/E2EDrone.png b/images/project/E2EDrone.png new file mode 100644 index 00000000..37609c47 Binary files /dev/null and b/images/project/E2EDrone.png differ diff --git a/images/project/E2EDrone2.png b/images/project/E2EDrone2.png new file mode 100644 index 00000000..57076704 Binary files /dev/null and b/images/project/E2EDrone2.png differ diff --git a/images/project/E2EHL.png b/images/project/E2EHL.png new file mode 100644 index 00000000..34f0ff5a Binary files /dev/null and b/images/project/E2EHL.png differ diff --git a/images/project/E2ELV.png b/images/project/E2ELV.png new file mode 100644 index 00000000..40009e1d Binary files /dev/null and b/images/project/E2ELV.png differ diff --git a/images/project/EBAIEdu.png b/images/project/EBAIEdu.png new file mode 100644 index 00000000..ce4bd0dc Binary files /dev/null and b/images/project/EBAIEdu.png differ diff --git a/images/project/demo_20220923.jpg b/images/project/demo_20220923.jpg new file mode 100644 index 00000000..d5498f77 Binary files /dev/null and b/images/project/demo_20220923.jpg differ diff --git a/images/project/funding.jpg b/images/project/funding.jpg new file mode 100644 index 00000000..bec4a4bf Binary files /dev/null and b/images/project/funding.jpg differ diff --git a/images/project/multi-sensor.jpg b/images/project/multi-sensor.jpg new file mode 100644 index 00000000..7557a4ff Binary files /dev/null and b/images/project/multi-sensor.jpg differ diff --git a/images/project/robotics_education.jpg b/images/project/robotics_education.jpg new file mode 100644 index 00000000..2a83513f Binary files /dev/null and b/images/project/robotics_education.jpg differ diff --git a/images/project/visualSafety.png b/images/project/visualSafety.png new file mode 100644 index 00000000..ce01d2de Binary files /dev/null and b/images/project/visualSafety.png differ diff --git a/images/team/Akida.jpg b/images/team/Akida.jpg new file mode 100644 index 00000000..66ae6dc4 Binary files /dev/null and b/images/team/Akida.jpg differ diff --git a/images/team/LiHeng.jpg b/images/team/LiHeng.jpg new file mode 100644 index 00000000..0528a31b Binary files /dev/null and b/images/team/LiHeng.jpg differ diff --git a/images/team/fengchiZHU.jpg b/images/team/fengchiZHU.jpg new file mode 100644 index 00000000..9c64422f Binary files /dev/null and b/images/team/fengchiZHU.jpg differ diff --git a/images/team/guangyanGuo.jpg b/images/team/guangyanGuo.jpg new file mode 100644 index 00000000..e04da31d Binary files /dev/null and b/images/team/guangyanGuo.jpg differ diff --git a/images/team/hongchang.jpg b/images/team/hongchang.jpg new file mode 100644 index 00000000..19009001 Binary files /dev/null and b/images/team/hongchang.jpg differ diff --git a/images/team/jianhaojiao_pict_2023.jpg b/images/team/jianhaojiao_pict_2023.jpg new file mode 100644 index 00000000..947ca729 Binary files /dev/null and b/images/team/jianhaojiao_pict_2023.jpg differ diff --git a/images/team/wang_junzhe.png b/images/team/wang_junzhe.png new file mode 100644 index 00000000..b887d04d Binary files /dev/null and b/images/team/wang_junzhe.png differ diff --git a/images/team/wang_xiangru.jpg b/images/team/wang_xiangru.jpg index e4fa8a6e..4acbffad 100644 Binary files a/images/team/wang_xiangru.jpg and b/images/team/wang_xiangru.jpg differ diff --git a/images/team/yang_mokui.jpg b/images/team/yang_mokui.jpg new file mode 100644 index 00000000..94406e69 Binary files /dev/null and b/images/team/yang_mokui.jpg differ diff --git a/images/team/zhongqi_wang.jpg b/images/team/zhongqi_wang.jpg new file mode 100644 index 00000000..eb14d4df Binary files /dev/null and b/images/team/zhongqi_wang.jpg differ diff --git a/index.md b/index.md index fe5bff19..4f91793f 100644 --- a/index.md +++ b/index.md @@ -3,84 +3,136 @@ # PolyU TAS LAB's Website -
-The Trustworthy AI and Autonomous Systems (TAS) Laboratory is at the forefront of pioneering advancements in autonomous systems (such as UAV and self-driving cars) technology, emphasizing the importance of safety, reliability, and ethical standards. Our laboratory is home to a diverse group of researchers and engineers who specialize in artificial intelligence, robotics, cybersecurity, and human-system interaction. Together, we are committed to developing autonomous systems that inspire confidence and trust among users and stakeholders. Through collaborative efforts with industry partners, academic institutions, and policymakers, our team addresses the complex challenges of integrating autonomous systems into society, ensuring they operate transparently and responsibly. +
+The Trustworthy AI and Autonomous Systems (TAS) Laboratory is at the forefront of pioneering advancements in autonomous systems (such as UAV and self-driving cars) technology, emphasizing the importance of safety, reliability, and ethical standards. Our laboratory is home to a diverse group of researchers and engineers who specialize in artificial intelligence, robotics, cybersecurity, and human-system interaction. Together, we are committed to developing autonomous systems that inspire confidence and trust among users and stakeholders. Through collaborative efforts with industry partners, academic institutions, and policymakers, our team addresses the complex challenges of integrating autonomous systems into society, ensuring they operate transparently and responsibly.
- {% include section.html %} -## Highlights - -{% capture text %} - - - -{% - include button.html - link="research" - text="See our publications" - icon="fa-solid fa-arrow-right" - flip=true - style="bare" -%} - -{% endcapture %} - -{% - include feature.html - image="images/coding.jpg" - link="research" - title="Our Research" - text=text -%} - -{% capture text %} - - +## Research Topics -{% - include button.html - link="projects" - text="Browse our projects" - icon="fa-solid fa-arrow-right" - flip=true - style="bare" -%} - -{% endcapture %} - -{% - include feature.html - image="images/project/all_set.jpg" - link="projects" - title="Our Projects" - flip=true - style="bare" - text=text -%} +
+Our research aims to build algorithm foundations for embodied AI that enable trustworthy perception, navigation, and control of autonomous systems. We develop practical embodied AI-driven autonomous systems — including drones, intelligent vehicles, and legged/humanoid robots — with end-to-end learning and safety certification capabilities, enabling them to perceive, reason, and interact with the physical world safely and reliably for the future society. Our work spans large AI models for autonomous systems, foundation models and vision-language-action models for robotic perception and control, AI-enabled multi-sensor fusion, and software-hardware co-design for efficient embodied AI systems. +
-{% capture text %} + + +
+ +
+

🛰️ 3D LiDAR Aided GNSS Positioning

+

AI-driven GNSS positioning (RTK, PPP, PPP-RTK), 3D LiDAR aided NLOS/multipath mitigation, multi-sensor fusion for robust urban navigation.

+
+
+ +
+

🔒 Safety-Certifiable Multi-Sensor Fusion

+

Safety-certifiable AI for autonomous navigation, AI-enabled multi-sensor fusion (LiDAR/Camera/IMU/GNSS), integrity monitoring and navigation-control joint optimization.

+
+
+ +
+

🚗 End-to-End Autonomous Vehicles

+

End-to-end learning for self-driving, safety certification for logistics applications, V2X-assisted connected autonomous driving.

+
+
+ +
+

🤖 Embodied AI for Legged/Humanoid Robotics

+

Large AI models and vision-language-action models for robotic perception and control, bio-inspired embodied intelligence, multimodal learning for legged/humanoid robots.

+
+
+ +
+

🚁 Embodied Drones for City Maintenance

+

Intelligent drones and UAV swarm systems, aerial manipulation for urban infrastructure, software-hardware co-design for efficient embodied AI drone systems.

+
+
+ +
+

🎓 Embodied AI for Robotics Education

+

AI-powered robotics education platforms, hands-on project-based learning with drones and ground robots, GitHub-based collaborative learning pedagogy.

+
+
+
+{% include section.html %} +## Videos -{% - include button.html - link="team" - text="Meet our team" - icon="fa-solid fa-arrow-right" - flip=true - style="bare" -%} +
+ + + + + +
-{% endcapture %} +{% include section.html %} -{% - include feature.html - image="images/team/team.png" - link="team" - title="Our Team" - text=text -%} +## Visitor Map - \ No newline at end of file +
+
+ +
+
diff --git a/news/index.md b/news/index.md index a0e2d9df..32cf2c38 100644 --- a/news/index.md +++ b/news/index.md @@ -1,11 +1,11 @@ --- -title: Events & News +title: News nav: order: 1 - tooltip: Recent News + tooltip: Events and news --- -# {% include icon.html icon="fa-light fa-bullhorn" %}Events & News +# {% include icon.html icon="fa-solid fa-newspaper" %}News diff --git a/openings/index.md b/openings/index.md new file mode 100644 index 00000000..867f176f --- /dev/null +++ b/openings/index.md @@ -0,0 +1,99 @@ +--- +title: Openings +nav: + order: 7 + tooltip: Openings and contact +--- + +# {% include icon.html icon="fa-regular fa-envelope" %}Openings + +We regularly have multiple openings for Postdoc/PhD/MPhil/RA/Internships (all year round) to work on research related to AI-driven trustworthy autonomous systems, with a focus on end-to-end autonomous UAVs and end-to-end self-driving cars. If you are a PolyU student (Undergraduate and MSc students seeking URIS or dissertation supervision) interested in working with me, feel free to drop me an email at welson.wen@polyu.edu.hk (together with your transcript and brief introduction) or walk into my office at room R820! + +--- + +#### Postdoc/PhD/MPhil/RA Research Directions + +- Embodied AI and foundation models for robotics (drones, autonomous vehicles, ground robots) +- High-precision positioning with multi-sensor fusion (LiDAR/Camera/IMU/GNSS) and integrity monitoring +- End-to-end learning for self-driving cars and autonomous UAVs +- Trustworthy and safety-certifiable AI for navigation and control +- Software-hardware co-design for efficient embodied AI systems +- Vision-language-action models and multimodal learning for autonomous systems + +For more specific topics, please refer to our [TAS Lab website](https://polyu-taslab.github.io/) and [research page](https://polyu-taslab.github.io/research/). + +--- + +#### Application Requirements + +For those interested, please send the following materials to welson.wen@polyu.edu.hk: + +1. CV (with education background, publications, awards, and coding experience) +2. Representative publications list (if any) +3. A detailed research proposal (~6 pages) including abstract, background and literature review, research objectives, proposed methodology, expected outcomes, timeline, and references. + +We will reply to you within one week if you are shortlisted for an interview. + +For any candidate, you MUST have at least two of the following: + +1. A strong publication record in top-tier AI/robotics venues (e.g., NeurIPS, ICML, ICRA, IROS, CoRL, CVPR, ICCV); +2. Strong capabilities in coding (proficient in C++ and/or Python, experience with PyTorch/TensorFlow/ROS); +3. Awards or demonstrated excellence in robotics competitions (e.g., RoboMaster, ICRA competitions) are strongly preferred for PhD/MPhil applicants. + +--- + +#### What We Offer + +- Access to cutting-edge UAV platforms, self-driving car testbeds, and GPU computing clusters +- Collaboration with leading industry partners (Huawei, Tencent, Meituan, HONOR) +- Opportunities to publish in top AI/robotics conferences and journals +- A vibrant, diverse, and inclusive research environment with 30+ lab members +- Funding support for conference travel and research equipment + +**Application materials:** CV + Publications/Coding portfolio + Research statement → welson.wen@polyu.edu.hk + +{% + include button.html + type="email" + text="welson.wen@polyu.edu.hk" + link="welson.wen@polyu.edu.hk" +%} +{% + include button.html + type="phone" + text="(852) 3400 8234" + link="+852 3400 8234" +%} +{% + include button.html + type="address" + tooltip="Our location on Google Maps for easy navigation" + link="https://maps.app.goo.gl/Aj8Zj2xQ8KzHSRtr9" +%} + +{% include section.html %} + +{% capture col1 %} + +{% + include figure.html + image="images/AboutPolyU_Campus3.png" + width="66%" + caption=" " +%} + +{% endcapture %} + +{% capture col2 %} + +{% + include figure.html + image="images/AboutPolyU_Campus5.jpg" + width="66%" + caption=" " +%} + +{% endcapture %} + +{% include cols.html col1=col1 col2=col2 %} + diff --git a/opensource/index.md b/opensource/index.md new file mode 100644 index 00000000..113ab1da --- /dev/null +++ b/opensource/index.md @@ -0,0 +1,20 @@ +--- +title: Dataset & Code +nav: + order: 6 + tooltip: Open-source datasets and code +--- + +# {% include icon.html icon="fa-solid fa-code" %}Dataset & Code + +
+We are committed to open science and reproducible research by sharing our datasets, software packages, and code with the broader research community. Below are the open-source tools and resources developed by TAS Lab, spanning GNSS positioning, multi-sensor fusion, visual localization, and HD mapping. All repositories are publicly available on the TAS Lab GitHub. +
+ +{% include section.html %} + +{% include search-box.html %} + +{% include search-info.html %} + +{% include list.html data="opensource" component="post-excerpt" %} diff --git a/projects/index.md b/projects/index.md deleted file mode 100644 index 9084d468..00000000 --- a/projects/index.md +++ /dev/null @@ -1,22 +0,0 @@ ---- -title: Projects -nav: - order: 3 - tooltip: ---- - -# {% include icon.html icon="fa-solid fa-wrench" %}Projects - - - - - -{% include section.html %} - -{% include search-box.html %} - -{% include tags.html tags=site.tags %} - -{% include search-info.html %} - -{% include list.html data="posts" component="post-excerpt" %} \ No newline at end of file diff --git a/publications/index.md b/publications/index.md new file mode 100644 index 00000000..66ab9c4e --- /dev/null +++ b/publications/index.md @@ -0,0 +1,21 @@ +--- +title: Publications +nav: + order: 2 + tooltip: Journal and conference papers +--- + +# {% include icon.html icon="fa-solid fa-book" %}Publications + +More on [Google Scholar](https://scholar.google.com/citations?user=N-AFqt8AAAAJ&hl=en){:target="_blank"} \| *: Corresponding author +{:.center} + +{% include section.html %} + +## All + +{% include search-box.html %} + +{% include search-info.html %} + +{% include list.html data="citations" component="citation" style="rich" %} diff --git a/research/papers/2004.10572v2.pdf b/publications/papers/2004.10572v2.pdf similarity index 100% rename from research/papers/2004.10572v2.pdf rename to publications/papers/2004.10572v2.pdf diff --git a/research/papers/2024/Bai2024InvariantEKF.pdf b/publications/papers/2024/Bai2024InvariantEKF.pdf similarity index 100% rename from research/papers/2024/Bai2024InvariantEKF.pdf rename to publications/papers/2024/Bai2024InvariantEKF.pdf diff --git a/research/papers/2024/BaiShiyu2024Factor_Graph_Optimization-Based_Smartphone_IMU-Only_Indoor_SLAM_With_Multihypothesis_Turning_Behavior_Loop_Closures.pdf b/publications/papers/2024/BaiShiyu2024Factor_Graph_Optimization-Based_Smartphone_IMU-Only_Indoor_SLAM_With_Multihypothesis_Turning_Behavior_Loop_Closures.pdf similarity index 100% rename from research/papers/2024/BaiShiyu2024Factor_Graph_Optimization-Based_Smartphone_IMU-Only_Indoor_SLAM_With_Multihypothesis_Turning_Behavior_Loop_Closures.pdf rename to publications/papers/2024/BaiShiyu2024Factor_Graph_Optimization-Based_Smartphone_IMU-Only_Indoor_SLAM_With_Multihypothesis_Turning_Behavior_Loop_Closures.pdf diff --git a/research/papers/2024/BaiShiyu2024Toward_Persistent_Spatial_Awareness_A_Review_of_Pedestrian_Dead_Reckoning-Centric_Indoor_Positioning_With_Smartphones.pdf b/publications/papers/2024/BaiShiyu2024Toward_Persistent_Spatial_Awareness_A_Review_of_Pedestrian_Dead_Reckoning-Centric_Indoor_Positioning_With_Smartphones.pdf similarity index 100% rename from research/papers/2024/BaiShiyu2024Toward_Persistent_Spatial_Awareness_A_Review_of_Pedestrian_Dead_Reckoning-Centric_Indoor_Positioning_With_Smartphones.pdf rename to publications/papers/2024/BaiShiyu2024Toward_Persistent_Spatial_Awareness_A_Review_of_Pedestrian_Dead_Reckoning-Centric_Indoor_Positioning_With_Smartphones.pdf diff --git a/research/papers/2024/ChenZhiqiang2024ICRARELEAD.pdf b/publications/papers/2024/ChenZhiqiang2024ICRARELEAD.pdf similarity index 100% rename from research/papers/2024/ChenZhiqiang2024ICRARELEAD.pdf rename to publications/papers/2024/ChenZhiqiang2024ICRARELEAD.pdf diff --git a/research/papers/2024/Ivan2024GNSS24-0239.pdf b/publications/papers/2024/Ivan2024GNSS24-0239.pdf similarity index 100% rename from research/papers/2024/Ivan2024GNSS24-0239.pdf rename to publications/papers/2024/Ivan2024GNSS24-0239.pdf diff --git a/research/papers/2024/Jiachen2024Safety-Quantifiable_Planar-Feature-based_LiDAR_Localization.pdf b/publications/papers/2024/Jiachen2024Safety-Quantifiable_Planar-Feature-based_LiDAR_Localization.pdf similarity index 100% rename from research/papers/2024/Jiachen2024Safety-Quantifiable_Planar-Feature-based_LiDAR_Localization.pdf rename to publications/papers/2024/Jiachen2024Safety-Quantifiable_Planar-Feature-based_LiDAR_Localization.pdf diff --git a/research/papers/2024/Liyuan2024GNSS24-0200.pdf b/publications/papers/2024/Liyuan2024GNSS24-0200.pdf similarity index 100% rename from research/papers/2024/Liyuan2024GNSS24-0200.pdf rename to publications/papers/2024/Liyuan2024GNSS24-0200.pdf diff --git a/research/papers/2024/PeiwenYang2024TightlyJoined.pdf b/publications/papers/2024/PeiwenYang2024TightlyJoined.pdf similarity index 100% rename from research/papers/2024/PeiwenYang2024TightlyJoined.pdf rename to publications/papers/2024/PeiwenYang2024TightlyJoined.pdf diff --git a/research/papers/2024/SiyuanDu2024A_Novel_Consistent-Robust_SINS_GNSS_NHC_Integrated_Navigation_Method_for_Autonomous_Vehicles_Under_Intermittent_GNSS_Outage.pdf b/publications/papers/2024/SiyuanDu2024A_Novel_Consistent-Robust_SINS_GNSS_NHC_Integrated_Navigation_Method_for_Autonomous_Vehicles_Under_Intermittent_GNSS_Outage.pdf similarity index 100% rename from research/papers/2024/SiyuanDu2024A_Novel_Consistent-Robust_SINS_GNSS_NHC_Integrated_Navigation_Method_for_Autonomous_Vehicles_Under_Intermittent_GNSS_Outage.pdf rename to publications/papers/2024/SiyuanDu2024A_Novel_Consistent-Robust_SINS_GNSS_NHC_Integrated_Navigation_Method_for_Autonomous_Vehicles_Under_Intermittent_GNSS_Outage.pdf diff --git a/research/papers/2024/WenyuYang2024LiDAR_Stereo_Visual_Inertial_Pose_Estimation_Based_on_Feedforward_and_Feedbacks.pdf b/publications/papers/2024/WenyuYang2024LiDAR_Stereo_Visual_Inertial_Pose_Estimation_Based_on_Feedforward_and_Feedbacks.pdf similarity index 100% rename from research/papers/2024/WenyuYang2024LiDAR_Stereo_Visual_Inertial_Pose_Estimation_Based_on_Feedforward_and_Feedbacks.pdf rename to publications/papers/2024/WenyuYang2024LiDAR_Stereo_Visual_Inertial_Pose_Estimation_Based_on_Feedforward_and_Feedbacks.pdf diff --git a/research/papers/2024/Xia2024_Meas._Sci._Technol._35_126302.pdf b/publications/papers/2024/Xia2024_Meas._Sci._Technol._35_126302.pdf similarity index 100% rename from research/papers/2024/Xia2024_Meas._Sci._Technol._35_126302.pdf rename to publications/papers/2024/Xia2024_Meas._Sci._Technol._35_126302.pdf diff --git a/research/papers/2024/XiaoXia2024Integrity.pdf b/publications/papers/2024/XiaoXia2024Integrity.pdf similarity index 100% rename from research/papers/2024/XiaoXia2024Integrity.pdf rename to publications/papers/2024/XiaoXia2024Integrity.pdf diff --git a/research/papers/2024/XuRuijie2024Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation.pdf b/publications/papers/2024/XuRuijie2024Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation.pdf similarity index 100% rename from research/papers/2024/XuRuijie2024Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation.pdf rename to publications/papers/2024/XuRuijie2024Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation.pdf diff --git a/research/papers/2024/YangPenggao2024Subspace-based_Adaptive_GMM_Error_Modeling_for_Fault-Aware_Pseudorange-based_Positioning_in_Urban_Canyons.pdf b/publications/papers/2024/YangPenggao2024Subspace-based_Adaptive_GMM_Error_Modeling_for_Fault-Aware_Pseudorange-based_Positioning_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/2024/YangPenggao2024Subspace-based_Adaptive_GMM_Error_Modeling_for_Fault-Aware_Pseudorange-based_Positioning_in_Urban_Canyons.pdf rename to publications/papers/2024/YangPenggao2024Subspace-based_Adaptive_GMM_Error_Modeling_for_Fault-Aware_Pseudorange-based_Positioning_in_Urban_Canyons.pdf diff --git a/research/papers/2024/YihanZhong2024Trajectory_Smoothing_Using_GNSS_PDR_Integration_via_Factor_Graph_Optimization_in_Urban_Canyons.pdf b/publications/papers/2024/YihanZhong2024Trajectory_Smoothing_Using_GNSS_PDR_Integration_via_Factor_Graph_Optimization_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/2024/YihanZhong2024Trajectory_Smoothing_Using_GNSS_PDR_Integration_via_Factor_Graph_Optimization_in_Urban_Canyons.pdf rename to publications/papers/2024/YihanZhong2024Trajectory_Smoothing_Using_GNSS_PDR_Integration_via_Factor_Graph_Optimization_in_Urban_Canyons.pdf diff --git a/research/papers/2024/Zhengxi2024Tightly-coupled_Visual_Inertial_Map_Integration.pdf b/publications/papers/2024/Zhengxi2024Tightly-coupled_Visual_Inertial_Map_Integration.pdf similarity index 100% rename from research/papers/2024/Zhengxi2024Tightly-coupled_Visual_Inertial_Map_Integration.pdf rename to publications/papers/2024/Zhengxi2024Tightly-coupled_Visual_Inertial_Map_Integration.pdf diff --git a/research/papers/2024/Zhu_2024_Meas._Sci._Technol._35_086303.pdf b/publications/papers/2024/Zhu_2024_Meas._Sci._Technol._35_086303.pdf similarity index 100% rename from research/papers/2024/Zhu_2024_Meas._Sci._Technol._35_086303.pdf rename to publications/papers/2024/Zhu_2024_Meas._Sci._Technol._35_086303.pdf diff --git a/research/papers/2024/runzhihu2024pyrtklib2409.12996v1.pdf b/publications/papers/2024/runzhihu2024pyrtklib2409.12996v1.pdf similarity index 100% rename from research/papers/2024/runzhihu2024pyrtklib2409.12996v1.pdf rename to publications/papers/2024/runzhihu2024pyrtklib2409.12996v1.pdf diff --git a/research/papers/2211.15127v1.pdf b/publications/papers/2211.15127v1.pdf similarity index 100% rename from research/papers/2211.15127v1.pdf rename to publications/papers/2211.15127v1.pdf diff --git a/research/papers/2212.05477v1.pdf b/publications/papers/2212.05477v1.pdf similarity index 100% rename from research/papers/2212.05477v1.pdf rename to publications/papers/2212.05477v1.pdf diff --git a/research/papers/2311.02327v1.pdf b/publications/papers/2311.02327v1.pdf similarity index 100% rename from research/papers/2311.02327v1.pdf rename to publications/papers/2311.02327v1.pdf diff --git a/research/papers/2402.11790v2.pdf b/publications/papers/2402.11790v2.pdf similarity index 100% rename from research/papers/2402.11790v2.pdf rename to publications/papers/2402.11790v2.pdf diff --git a/research/papers/2404.14724v2.pdf b/publications/papers/2404.14724v2.pdf similarity index 100% rename from research/papers/2404.14724v2.pdf rename to publications/papers/2404.14724v2.pdf diff --git a/research/papers/2507.03987v1.pdf b/publications/papers/2507.03987v1.pdf similarity index 100% rename from research/papers/2507.03987v1.pdf rename to publications/papers/2507.03987v1.pdf diff --git a/publications/papers/2509.17198v1.pdf b/publications/papers/2509.17198v1.pdf new file mode 100644 index 00000000..869bda7e Binary files /dev/null and b/publications/papers/2509.17198v1.pdf differ diff --git a/publications/papers/2509.21496v1.pdf b/publications/papers/2509.21496v1.pdf new file mode 100644 index 00000000..2875f164 Binary files /dev/null and b/publications/papers/2509.21496v1.pdf differ diff --git a/publications/papers/2510.00524v1.pdf b/publications/papers/2510.00524v1.pdf new file mode 100644 index 00000000..d1612079 Binary files /dev/null and b/publications/papers/2510.00524v1.pdf differ diff --git a/publications/papers/2510.04278v1.pdf b/publications/papers/2510.04278v1.pdf new file mode 100644 index 00000000..9cee2f9d Binary files /dev/null and b/publications/papers/2510.04278v1.pdf differ diff --git a/publications/papers/2510.08880v1.pdf b/publications/papers/2510.08880v1.pdf new file mode 100644 index 00000000..2253c3b7 Binary files /dev/null and b/publications/papers/2510.08880v1.pdf differ diff --git a/publications/papers/2512.20224v1.pdf b/publications/papers/2512.20224v1.pdf new file mode 100644 index 00000000..e44d1102 Binary files /dev/null and b/publications/papers/2512.20224v1.pdf differ diff --git a/research/papers/3DIO_Low-Drift_3-D_Deep-Inertial_Odometry_for_Indoor_Localization_Using_an_IMU.pdf b/publications/papers/3DIO_Low-Drift_3-D_Deep-Inertial_Odometry_for_Indoor_Localization_Using_an_IMU.pdf similarity index 100% rename from research/papers/3DIO_Low-Drift_3-D_Deep-Inertial_Odometry_for_Indoor_Localization_Using_an_IMU.pdf rename to publications/papers/3DIO_Low-Drift_3-D_Deep-Inertial_Odometry_for_Indoor_Localization_Using_an_IMU.pdf diff --git a/research/papers/3DVAGRTK.pdf b/publications/papers/3DVAGRTK.pdf similarity index 100% rename from research/papers/3DVAGRTK.pdf rename to publications/papers/3DVAGRTK.pdf diff --git a/research/papers/3D_LiDAR_Aided_GNSS_NLOS_Mitigation_in_Urban_Canyons.pdf b/publications/papers/3D_LiDAR_Aided_GNSS_NLOS_Mitigation_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/3D_LiDAR_Aided_GNSS_NLOS_Mitigation_in_Urban_Canyons.pdf rename to publications/papers/3D_LiDAR_Aided_GNSS_NLOS_Mitigation_in_Urban_Canyons.pdf diff --git a/research/papers/3D_LiDAR_Aided_GNSS_and_Its_Tightly_Coupled_Integration_with_INS_Via_FGO.pdf b/publications/papers/3D_LiDAR_Aided_GNSS_and_Its_Tightly_Coupled_Integration_with_INS_Via_FGO.pdf similarity index 100% rename from research/papers/3D_LiDAR_Aided_GNSS_and_Its_Tightly_Coupled_Integration_with_INS_Via_FGO.pdf rename to publications/papers/3D_LiDAR_Aided_GNSS_and_Its_Tightly_Coupled_Integration_with_INS_Via_FGO.pdf diff --git a/research/papers/3D_Point_Clouds_Data_Super_Resolution-Aided_LiDAR_Odometry_for_Vehicular_Positioning_in_Urban_Canyons.pdf b/publications/papers/3D_Point_Clouds_Data_Super_Resolution-Aided_LiDAR_Odometry_for_Vehicular_Positioning_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/3D_Point_Clouds_Data_Super_Resolution-Aided_LiDAR_Odometry_for_Vehicular_Positioning_in_Urban_Canyons.pdf rename to publications/papers/3D_Point_Clouds_Data_Super_Resolution-Aided_LiDAR_Odometry_for_Vehicular_Positioning_in_Urban_Canyons.pdf diff --git a/research/papers/3MDA.pdf b/publications/papers/3MDA.pdf similarity index 100% rename from research/papers/3MDA.pdf rename to publications/papers/3MDA.pdf diff --git a/research/papers/7594.pdf b/publications/papers/7594.pdf similarity index 100% rename from research/papers/7594.pdf rename to publications/papers/7594.pdf diff --git a/research/papers/A-Fault-Detection-Algorithm-for-LiDAR-IMU-Integrated-Localization-Systems-with-Non-Gaussian-Noises.pdf b/publications/papers/A-Fault-Detection-Algorithm-for-LiDAR-IMU-Integrated-Localization-Systems-with-Non-Gaussian-Noises.pdf similarity index 100% rename from research/papers/A-Fault-Detection-Algorithm-for-LiDAR-IMU-Integrated-Localization-Systems-with-Non-Gaussian-Noises.pdf rename to publications/papers/A-Fault-Detection-Algorithm-for-LiDAR-IMU-Integrated-Localization-Systems-with-Non-Gaussian-Noises.pdf diff --git a/research/papers/AGPC-SLAM.pdf b/publications/papers/AGPC-SLAM.pdf similarity index 100% rename from research/papers/AGPC-SLAM.pdf rename to publications/papers/AGPC-SLAM.pdf diff --git a/research/papers/ANGV.pdf b/publications/papers/ANGV.pdf similarity index 100% rename from research/papers/ANGV.pdf rename to publications/papers/ANGV.pdf diff --git a/research/papers/Adaptive_Multi-Sensor_Integrated_Navigation_System_Aided_by_Continuous_Error_Map_from_RSU_for_Autonomous_Vehicles_in_Urban_Areas.pdf b/publications/papers/Adaptive_Multi-Sensor_Integrated_Navigation_System_Aided_by_Continuous_Error_Map_from_RSU_for_Autonomous_Vehicles_in_Urban_Areas.pdf similarity index 100% rename from research/papers/Adaptive_Multi-Sensor_Integrated_Navigation_System_Aided_by_Continuous_Error_Map_from_RSU_for_Autonomous_Vehicles_in_Urban_Areas.pdf rename to publications/papers/Adaptive_Multi-Sensor_Integrated_Navigation_System_Aided_by_Continuous_Error_Map_from_RSU_for_Autonomous_Vehicles_in_Urban_Areas.pdf diff --git a/research/papers/An_Improved_Inertial_Preintegration_Model_in_Factor_Graph_Optimization_for_High_Accuracy_Positioning_of_Intelligent_Vehicles.pdf b/publications/papers/An_Improved_Inertial_Preintegration_Model_in_Factor_Graph_Optimization_for_High_Accuracy_Positioning_of_Intelligent_Vehicles.pdf similarity index 100% rename from research/papers/An_Improved_Inertial_Preintegration_Model_in_Factor_Graph_Optimization_for_High_Accuracy_Positioning_of_Intelligent_Vehicles.pdf rename to publications/papers/An_Improved_Inertial_Preintegration_Model_in_Factor_Graph_Optimization_for_High_Accuracy_Positioning_of_Intelligent_Vehicles.pdf diff --git a/research/papers/CDGP.pdf b/publications/papers/CDGP.pdf similarity index 100% rename from research/papers/CDGP.pdf rename to publications/papers/CDGP.pdf diff --git a/research/papers/CGN3.pdf b/publications/papers/CGN3.pdf similarity index 100% rename from research/papers/CGN3.pdf rename to publications/papers/CGN3.pdf diff --git a/research/papers/CGPA.pdf b/publications/papers/CGPA.pdf similarity index 100% rename from research/papers/CGPA.pdf rename to publications/papers/CGPA.pdf diff --git a/research/papers/CN3L.pdf b/publications/papers/CN3L.pdf similarity index 100% rename from research/papers/CN3L.pdf rename to publications/papers/CN3L.pdf diff --git a/research/papers/Continuous_Error_Map_Aided_Adaptive_MMulti-Sensor_Integration_for_Connected_Autonomous_Vehicles_in_Urban_Scenarios.pdf b/publications/papers/Continuous_Error_Map_Aided_Adaptive_MMulti-Sensor_Integration_for_Connected_Autonomous_Vehicles_in_Urban_Scenarios.pdf similarity index 100% rename from research/papers/Continuous_Error_Map_Aided_Adaptive_MMulti-Sensor_Integration_for_Connected_Autonomous_Vehicles_in_Urban_Scenarios.pdf rename to publications/papers/Continuous_Error_Map_Aided_Adaptive_MMulti-Sensor_Integration_for_Connected_Autonomous_Vehicles_in_Urban_Scenarios.pdf diff --git a/research/papers/Degeneration-Aware_Outlier_Mitigation_for_Visual_Inertial_Integrated_Navigation_System_in_Urban_Canyons.pdf b/publications/papers/Degeneration-Aware_Outlier_Mitigation_for_Visual_Inertial_Integrated_Navigation_System_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/Degeneration-Aware_Outlier_Mitigation_for_Visual_Inertial_Integrated_Navigation_System_in_Urban_Canyons.pdf rename to publications/papers/Degeneration-Aware_Outlier_Mitigation_for_Visual_Inertial_Integrated_Navigation_System_in_Urban_Canyons.pdf diff --git a/research/papers/Dynamic_Object-Aware_LiDAR_Odometry_Aided_by_Joint_Weightings_Estimation_in_Urban_Areas.pdf b/publications/papers/Dynamic_Object-Aware_LiDAR_Odometry_Aided_by_Joint_Weightings_Estimation_in_Urban_Areas.pdf similarity index 100% rename from research/papers/Dynamic_Object-Aware_LiDAR_Odometry_Aided_by_Joint_Weightings_Estimation_in_Urban_Areas.pdf rename to publications/papers/Dynamic_Object-Aware_LiDAR_Odometry_Aided_by_Joint_Weightings_Estimation_in_Urban_Areas.pdf diff --git a/research/papers/EGNR.pdf b/publications/papers/EGNR.pdf similarity index 100% rename from research/papers/EGNR.pdf rename to publications/papers/EGNR.pdf diff --git a/research/papers/ESMT.pdf b/publications/papers/ESMT.pdf similarity index 100% rename from research/papers/ESMT.pdf rename to publications/papers/ESMT.pdf diff --git a/research/papers/Enhancing_GNSS_Positioning_Accuracy_for_Road_Monitoring_Systems_A_Factor_Graph_Optimization_Approach_Aided_by_Geospatial_Information.pdf b/publications/papers/Enhancing_GNSS_Positioning_Accuracy_for_Road_Monitoring_Systems_A_Factor_Graph_Optimization_Approach_Aided_by_Geospatial_Information.pdf similarity index 100% rename from research/papers/Enhancing_GNSS_Positioning_Accuracy_for_Road_Monitoring_Systems_A_Factor_Graph_Optimization_Approach_Aided_by_Geospatial_Information.pdf rename to publications/papers/Enhancing_GNSS_Positioning_Accuracy_for_Road_Monitoring_Systems_A_Factor_Graph_Optimization_Approach_Aided_by_Geospatial_Information.pdf diff --git a/research/papers/Enhancing_Smartphone_based.pdf b/publications/papers/Enhancing_Smartphone_based.pdf similarity index 100% rename from research/papers/Enhancing_Smartphone_based.pdf rename to publications/papers/Enhancing_Smartphone_based.pdf diff --git a/research/papers/Extending-Navigation-Service-Under-Sensor-Failures-An-Approach-by-Integrating-System-Identification-and-Vehicle-Dynamic-Model.pdf b/publications/papers/Extending-Navigation-Service-Under-Sensor-Failures-An-Approach-by-Integrating-System-Identification-and-Vehicle-Dynamic-Model.pdf similarity index 100% rename from research/papers/Extending-Navigation-Service-Under-Sensor-Failures-An-Approach-by-Integrating-System-Identification-and-Vehicle-Dynamic-Model.pdf rename to publications/papers/Extending-Navigation-Service-Under-Sensor-Failures-An-Approach-by-Integrating-System-Identification-and-Vehicle-Dynamic-Model.pdf diff --git a/research/papers/Factor_Graph_Optimization-based_Indoor_Pedestrian_SLAM_with_Probabilistic_Exact_Activity_Loop_Closures_using_Smartphone.pdf b/publications/papers/Factor_Graph_Optimization-based_Indoor_Pedestrian_SLAM_with_Probabilistic_Exact_Activity_Loop_Closures_using_Smartphone.pdf similarity index 100% rename from research/papers/Factor_Graph_Optimization-based_Indoor_Pedestrian_SLAM_with_Probabilistic_Exact_Activity_Loop_Closures_using_Smartphone.pdf rename to publications/papers/Factor_Graph_Optimization-based_Indoor_Pedestrian_SLAM_with_Probabilistic_Exact_Activity_Loop_Closures_using_Smartphone.pdf diff --git a/research/papers/Fisheye_Camera_Aided_GNSS_NLOS_Detection_and_Learning-Based_Pseudorange_Bias_Correction_for_Intelligent_Vehicles_in_Urban_Canyons.pdf b/publications/papers/Fisheye_Camera_Aided_GNSS_NLOS_Detection_and_Learning-Based_Pseudorange_Bias_Correction_for_Intelligent_Vehicles_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/Fisheye_Camera_Aided_GNSS_NLOS_Detection_and_Learning-Based_Pseudorange_Bias_Correction_for_Intelligent_Vehicles_in_Urban_Canyons.pdf rename to publications/papers/Fisheye_Camera_Aided_GNSS_NLOS_Detection_and_Learning-Based_Pseudorange_Bias_Correction_for_Intelligent_Vehicles_in_Urban_Canyons.pdf diff --git a/research/papers/GLIA.pdf b/publications/papers/GLIA.pdf similarity index 100% rename from research/papers/GLIA.pdf rename to publications/papers/GLIA.pdf diff --git a/research/papers/GLIO_Tightly-Coupled_GNSS_LiDAR_IMU_Integration_for_Continuous_and_Drift-Free_State_Estimation_of_Intelligent_Vehicles_in_Urban_Areas.pdf b/publications/papers/GLIO_Tightly-Coupled_GNSS_LiDAR_IMU_Integration_for_Continuous_and_Drift-Free_State_Estimation_of_Intelligent_Vehicles_in_Urban_Areas.pdf similarity index 100% rename from research/papers/GLIO_Tightly-Coupled_GNSS_LiDAR_IMU_Integration_for_Continuous_and_Drift-Free_State_Estimation_of_Intelligent_Vehicles_in_Urban_Areas.pdf rename to publications/papers/GLIO_Tightly-Coupled_GNSS_LiDAR_IMU_Integration_for_Continuous_and_Drift-Free_State_Estimation_of_Intelligent_Vehicles_in_Urban_Areas.pdf diff --git a/research/papers/GNEB.pdf b/publications/papers/GNEB.pdf similarity index 100% rename from research/papers/GNEB.pdf rename to publications/papers/GNEB.pdf diff --git a/research/papers/GNSS-RTK_Adaptively_Integrated.pdf b/publications/papers/GNSS-RTK_Adaptively_Integrated.pdf similarity index 100% rename from research/papers/GNSS-RTK_Adaptively_Integrated.pdf rename to publications/papers/GNSS-RTK_Adaptively_Integrated.pdf diff --git a/research/papers/GNSS_6297.pdf b/publications/papers/GNSS_6297.pdf similarity index 100% rename from research/papers/GNSS_6297.pdf rename to publications/papers/GNSS_6297.pdf diff --git a/research/papers/GNSS_Outliers_Mitigation_in_Urban_Areas_Using_Sparse_Estimation_Based_on_Factor_Graph_Optimization.pdf b/publications/papers/GNSS_Outliers_Mitigation_in_Urban_Areas_Using_Sparse_Estimation_Based_on_Factor_Graph_Optimization.pdf similarity index 100% rename from research/papers/GNSS_Outliers_Mitigation_in_Urban_Areas_Using_Sparse_Estimation_Based_on_Factor_Graph_Optimization.pdf rename to publications/papers/GNSS_Outliers_Mitigation_in_Urban_Areas_Using_Sparse_Estimation_Based_on_Factor_Graph_Optimization.pdf diff --git a/research/papers/Graph-Based_Indoor_3D_Pedestrian_Location_Tracking_With_Inertial-Only_Perception.pdf b/publications/papers/Graph-Based_Indoor_3D_Pedestrian_Location_Tracking_With_Inertial-Only_Perception.pdf similarity index 100% rename from research/papers/Graph-Based_Indoor_3D_Pedestrian_Location_Tracking_With_Inertial-Only_Perception.pdf rename to publications/papers/Graph-Based_Indoor_3D_Pedestrian_Location_Tracking_With_Inertial-Only_Perception.pdf diff --git a/research/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.pdf b/publications/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.pdf similarity index 100% rename from research/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.pdf rename to publications/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.pdf diff --git a/research/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.png b/publications/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.png similarity index 100% rename from research/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.png rename to publications/papers/Huang_Coarse-To-Fine_Lidar-Based_Slam.png diff --git "a/research/papers/IET Intelligent Trans Sys - 2022 - Zhong - Low\342\200\220cost solid\342\200\220state LiDAR inertial\342\200\220based localization with prior map for.pdf" "b/publications/papers/IET Intelligent Trans Sys - 2022 - Zhong - Low\342\200\220cost solid\342\200\220state LiDAR inertial\342\200\220based localization with prior map for.pdf" similarity index 100% rename from "research/papers/IET Intelligent Trans Sys - 2022 - Zhong - Low\342\200\220cost solid\342\200\220state LiDAR inertial\342\200\220based localization with prior map for.pdf" rename to "publications/papers/IET Intelligent Trans Sys - 2022 - Zhong - Low\342\200\220cost solid\342\200\220state LiDAR inertial\342\200\220based localization with prior map for.pdf" diff --git a/research/papers/IET_Intelligent_Trans_Sys_2024.pdf b/publications/papers/IET_Intelligent_Trans_Sys_2024.pdf similarity index 100% rename from research/papers/IET_Intelligent_Trans_Sys_2024.pdf rename to publications/papers/IET_Intelligent_Trans_Sys_2024.pdf diff --git a/research/papers/ITM_fullpaper-submit_v3_1.pdf b/publications/papers/ITM_fullpaper-submit_v3_1.pdf similarity index 100% rename from research/papers/ITM_fullpaper-submit_v3_1.pdf rename to publications/papers/ITM_fullpaper-submit_v3_1.pdf diff --git a/research/papers/IVDMS.pdf b/publications/papers/IVDMS.pdf similarity index 100% rename from research/papers/IVDMS.pdf rename to publications/papers/IVDMS.pdf diff --git a/research/papers/LDEUDL.pdf b/publications/papers/LDEUDL.pdf similarity index 100% rename from research/papers/LDEUDL.pdf rename to publications/papers/LDEUDL.pdf diff --git a/publications/papers/Learning_Safe_Optimal_and_Real-Time_Flight_Interaction_With_Deep_Confidence-Enhanced_Reachability_Guarantee.pdf b/publications/papers/Learning_Safe_Optimal_and_Real-Time_Flight_Interaction_With_Deep_Confidence-Enhanced_Reachability_Guarantee.pdf new file mode 100644 index 00000000..7ec3e2c8 Binary files /dev/null and b/publications/papers/Learning_Safe_Optimal_and_Real-Time_Flight_Interaction_With_Deep_Confidence-Enhanced_Reachability_Guarantee.pdf differ diff --git a/research/papers/Lidar_Feature_outlier.pdf b/publications/papers/Lidar_Feature_outlier.pdf similarity index 100% rename from research/papers/Lidar_Feature_outlier.pdf rename to publications/papers/Lidar_Feature_outlier.pdf diff --git a/research/papers/Lidar_aided_cycle.pdf b/publications/papers/Lidar_aided_cycle.pdf similarity index 100% rename from research/papers/Lidar_aided_cycle.pdf rename to publications/papers/Lidar_aided_cycle.pdf diff --git a/research/papers/MACG copy.pdf b/publications/papers/MACG copy.pdf similarity index 100% rename from research/papers/MACG copy.pdf rename to publications/papers/MACG copy.pdf diff --git a/research/papers/MACG.pdf b/publications/papers/MACG.pdf similarity index 100% rename from research/papers/MACG.pdf rename to publications/papers/MACG.pdf diff --git a/research/papers/Multisensor_Plug-and-Play_Navigation_Based_on_Resilient_Information_Filter.pdf b/publications/papers/Multisensor_Plug-and-Play_Navigation_Based_on_Resilient_Information_Filter.pdf similarity index 100% rename from research/papers/Multisensor_Plug-and-Play_Navigation_Based_on_Resilient_Information_Filter.pdf rename to publications/papers/Multisensor_Plug-and-Play_Navigation_Based_on_Resilient_Information_Filter.pdf diff --git a/research/papers/NINS.pdf b/publications/papers/NINS.pdf similarity index 100% rename from research/papers/NINS.pdf rename to publications/papers/NINS.pdf diff --git a/research/papers/Ng_3D_Mapping_Aided.pdf b/publications/papers/Ng_3D_Mapping_Aided.pdf similarity index 100% rename from research/papers/Ng_3D_Mapping_Aided.pdf rename to publications/papers/Ng_3D_Mapping_Aided.pdf diff --git a/research/papers/Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation_Based_on_Cubature_Kalman_Filter_in_Urban_Canyons.pdf b/publications/papers/Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation_Based_on_Cubature_Kalman_Filter_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation_Based_on_Cubature_Kalman_Filter_in_Urban_Canyons.pdf rename to publications/papers/Nonlinearity-Aware_ZUPT-Aided_Pedestrian_Inertial_Navigation_Based_on_Cubature_Kalman_Filter_in_Urban_Canyons.pdf diff --git a/research/papers/ODAG.pdf b/publications/papers/ODAG.pdf similarity index 100% rename from research/papers/ODAG.pdf rename to publications/papers/ODAG.pdf diff --git a/research/papers/Online_Dynamic_Model_Calibration_for_Reliable_Control_of_Quadrotor_Based_on_Factor_Graph_Optimization.pdf b/publications/papers/Online_Dynamic_Model_Calibration_for_Reliable_Control_of_Quadrotor_Based_on_Factor_Graph_Optimization.pdf similarity index 100% rename from research/papers/Online_Dynamic_Model_Calibration_for_Reliable_Control_of_Quadrotor_Based_on_Factor_Graph_Optimization.pdf rename to publications/papers/Online_Dynamic_Model_Calibration_for_Reliable_Control_of_Quadrotor_Based_on_Factor_Graph_Optimization.pdf diff --git a/research/papers/PANG.pdf b/publications/papers/PANG.pdf similarity index 100% rename from research/papers/PANG.pdf rename to publications/papers/PANG.pdf diff --git a/research/papers/PAVI.pdf b/publications/papers/PAVI.pdf similarity index 100% rename from research/papers/PAVI.pdf rename to publications/papers/PAVI.pdf diff --git a/research/papers/PCGI.pdf b/publications/papers/PCGI.pdf similarity index 100% rename from research/papers/PCGI.pdf rename to publications/papers/PCGI.pdf diff --git a/research/papers/PEAG.pdf b/publications/papers/PEAG.pdf similarity index 100% rename from research/papers/PEAG.pdf rename to publications/papers/PEAG.pdf diff --git a/research/papers/Perception_aided_Visual-Inertial_Integrated_Positioning_in_Dynamic_Urban_Areas.pdf b/publications/papers/Perception_aided_Visual-Inertial_Integrated_Positioning_in_Dynamic_Urban_Areas.pdf similarity index 100% rename from research/papers/Perception_aided_Visual-Inertial_Integrated_Positioning_in_Dynamic_Urban_Areas.pdf rename to publications/papers/Perception_aided_Visual-Inertial_Integrated_Positioning_in_Dynamic_Urban_Areas.pdf diff --git a/research/papers/Point_Wise_or_Feature_Wise.pdf b/publications/papers/Point_Wise_or_Feature_Wise.pdf similarity index 100% rename from research/papers/Point_Wise_or_Feature_Wise.pdf rename to publications/papers/Point_Wise_or_Feature_Wise.pdf diff --git a/research/papers/RGCP.pdf b/publications/papers/RGCP.pdf similarity index 100% rename from research/papers/RGCP.pdf rename to publications/papers/RGCP.pdf diff --git a/research/papers/RVIN.pdf b/publications/papers/RVIN.pdf similarity index 100% rename from research/papers/RVIN.pdf rename to publications/papers/RVIN.pdf diff --git a/research/papers/Roadside_Infrastructure_assisted_LiDAR_Inertial-based_Mapping_for_Intelligent_Vehicles_in_Urban_Areas.pdf b/publications/papers/Roadside_Infrastructure_assisted_LiDAR_Inertial-based_Mapping_for_Intelligent_Vehicles_in_Urban_Areas.pdf similarity index 100% rename from research/papers/Roadside_Infrastructure_assisted_LiDAR_Inertial-based_Mapping_for_Intelligent_Vehicles_in_Urban_Areas.pdf rename to publications/papers/Roadside_Infrastructure_assisted_LiDAR_Inertial-based_Mapping_for_Intelligent_Vehicles_in_Urban_Areas.pdf diff --git a/research/papers/Safe-Assured_Learning-Based_Deep_SE3_Motion_Joint_Planning_and_Control_for_UAV_Interactions_with_Dynamic_Environments.pdf b/publications/papers/Safe-Assured_Learning-Based_Deep_SE3_Motion_Joint_Planning_and_Control_for_UAV_Interactions_with_Dynamic_Environments.pdf similarity index 100% rename from research/papers/Safe-Assured_Learning-Based_Deep_SE3_Motion_Joint_Planning_and_Control_for_UAV_Interactions_with_Dynamic_Environments.pdf rename to publications/papers/Safe-Assured_Learning-Based_Deep_SE3_Motion_Joint_Planning_and_Control_for_UAV_Interactions_with_Dynamic_Environments.pdf diff --git a/research/papers/Safety-Quantifiable_Line_Feature-Based_Monocular_Visual_Localization_With_3D_Prior_Map.pdf b/publications/papers/Safety-Quantifiable_Line_Feature-Based_Monocular_Visual_Localization_With_3D_Prior_Map.pdf similarity index 100% rename from research/papers/Safety-Quantifiable_Line_Feature-Based_Monocular_Visual_Localization_With_3D_Prior_Map.pdf rename to publications/papers/Safety-Quantifiable_Line_Feature-Based_Monocular_Visual_Localization_With_3D_Prior_Map.pdf diff --git a/research/papers/Self-Attention-Enhanced_PSW-LSTM_for_3-D_Indoor_Pedestrian_Positioning_With_Integrated_Wi-Fi_Magnetometer_and_Barometer_Sensors.pdf b/publications/papers/Self-Attention-Enhanced_PSW-LSTM_for_3-D_Indoor_Pedestrian_Positioning_With_Integrated_Wi-Fi_Magnetometer_and_Barometer_Sensors.pdf similarity index 100% rename from research/papers/Self-Attention-Enhanced_PSW-LSTM_for_3-D_Indoor_Pedestrian_Positioning_With_Integrated_Wi-Fi_Magnetometer_and_Barometer_Sensors.pdf rename to publications/papers/Self-Attention-Enhanced_PSW-LSTM_for_3-D_Indoor_Pedestrian_Positioning_With_Integrated_Wi-Fi_Magnetometer_and_Barometer_Sensors.pdf diff --git a/research/papers/TCGI.pdf b/publications/papers/TCGI.pdf similarity index 100% rename from research/papers/TCGI.pdf rename to publications/papers/TCGI.pdf diff --git a/research/papers/Tightly-coupled_Line_Feature-aided_Visual_Inertial_Localization_within_Lightweight_3D_Prior_Map_for_Intelligent_Vehicles.pdf b/publications/papers/Tightly-coupled_Line_Feature-aided_Visual_Inertial_Localization_within_Lightweight_3D_Prior_Map_for_Intelligent_Vehicles.pdf similarity index 100% rename from research/papers/Tightly-coupled_Line_Feature-aided_Visual_Inertial_Localization_within_Lightweight_3D_Prior_Map_for_Intelligent_Vehicles.pdf rename to publications/papers/Tightly-coupled_Line_Feature-aided_Visual_Inertial_Localization_within_Lightweight_3D_Prior_Map_for_Intelligent_Vehicles.pdf diff --git a/research/papers/Tightly_Coupled_Integration.pdf b/publications/papers/Tightly_Coupled_Integration.pdf similarity index 100% rename from research/papers/Tightly_Coupled_Integration.pdf rename to publications/papers/Tightly_Coupled_Integration.pdf diff --git a/research/papers/Tightly_Joining_Positioning_and_Control_for_Trustworthy_Unmanned_Aerial_Vehicles_Based_on_Factor_Graph_Optimization_in_Urban_Transportation.pdf b/publications/papers/Tightly_Joining_Positioning_and_Control_for_Trustworthy_Unmanned_Aerial_Vehicles_Based_on_Factor_Graph_Optimization_in_Urban_Transportation.pdf similarity index 100% rename from research/papers/Tightly_Joining_Positioning_and_Control_for_Trustworthy_Unmanned_Aerial_Vehicles_Based_on_Factor_Graph_Optimization_in_Urban_Transportation.pdf rename to publications/papers/Tightly_Joining_Positioning_and_Control_for_Trustworthy_Unmanned_Aerial_Vehicles_Based_on_Factor_Graph_Optimization_in_Urban_Transportation.pdf diff --git a/research/papers/Time-correlated.pdf b/publications/papers/Time-correlated.pdf similarity index 100% rename from research/papers/Time-correlated.pdf rename to publications/papers/Time-correlated.pdf diff --git a/research/papers/Towards_Robust_GNSS_Positioning_and_Real-time_Kinematic_Using_Factor_Graph_Optimization.pdf b/publications/papers/Towards_Robust_GNSS_Positioning_and_Real-time_Kinematic_Using_Factor_Graph_Optimization.pdf similarity index 100% rename from research/papers/Towards_Robust_GNSS_Positioning_and_Real-time_Kinematic_Using_Factor_Graph_Optimization.pdf rename to publications/papers/Towards_Robust_GNSS_Positioning_and_Real-time_Kinematic_Using_Factor_Graph_Optimization.pdf diff --git a/research/papers/UELM.pdf b/publications/papers/UELM.pdf similarity index 100% rename from research/papers/UELM.pdf rename to publications/papers/UELM.pdf diff --git a/research/papers/USFC.pdf b/publications/papers/USFC.pdf similarity index 100% rename from research/papers/USFC.pdf rename to publications/papers/USFC.pdf diff --git a/research/papers/UrbanLoco.pdf b/publications/papers/UrbanLoco.pdf similarity index 100% rename from research/papers/UrbanLoco.pdf rename to publications/papers/UrbanLoco.pdf diff --git a/research/papers/UrbanNav.pdf b/publications/papers/UrbanNav.pdf similarity index 100% rename from research/papers/UrbanNav.pdf rename to publications/papers/UrbanNav.pdf diff --git a/research/papers/Urban_GNSS_Positioning_for_Consumer_Electronics_3D_Mapping_and_Advanced_Signal_Processing.pdf b/publications/papers/Urban_GNSS_Positioning_for_Consumer_Electronics_3D_Mapping_and_Advanced_Signal_Processing.pdf similarity index 100% rename from research/papers/Urban_GNSS_Positioning_for_Consumer_Electronics_3D_Mapping_and_Advanced_Signal_Processing.pdf rename to publications/papers/Urban_GNSS_Positioning_for_Consumer_Electronics_3D_Mapping_and_Advanced_Signal_Processing.pdf diff --git a/research/papers/Wen_3D_Lidar_Aided.pdf b/publications/papers/Wen_3D_Lidar_Aided.pdf similarity index 100% rename from research/papers/Wen_3D_Lidar_Aided.pdf rename to publications/papers/Wen_3D_Lidar_Aided.pdf diff --git a/research/papers/Wen_Factor_graph_for_GNSS.pdf b/publications/papers/Wen_Factor_graph_for_GNSS.pdf similarity index 100% rename from research/papers/Wen_Factor_graph_for_GNSS.pdf rename to publications/papers/Wen_Factor_graph_for_GNSS.pdf diff --git a/research/papers/Wen_Integrity_Monitoring_Gnss.pdf b/publications/papers/Wen_Integrity_Monitoring_Gnss.pdf similarity index 100% rename from research/papers/Wen_Integrity_Monitoring_Gnss.pdf rename to publications/papers/Wen_Integrity_Monitoring_Gnss.pdf diff --git a/research/papers/Xia_2024_Meas._Sci._Technol._35_126302.pdf b/publications/papers/Xia_2024_Meas._Sci._Technol._35_126302.pdf similarity index 100% rename from research/papers/Xia_2024_Meas._Sci._Technol._35_126302.pdf rename to publications/papers/Xia_2024_Meas._Sci._Technol._35_126302.pdf diff --git a/research/papers/Zhang_Continuous_Gnss-Rtk_Aided.pdf b/publications/papers/Zhang_Continuous_Gnss-Rtk_Aided.pdf similarity index 100% rename from research/papers/Zhang_Continuous_Gnss-Rtk_Aided.pdf rename to publications/papers/Zhang_Continuous_Gnss-Rtk_Aided.pdf diff --git a/research/papers/Zhong_Low_cost_Lidar.pdf b/publications/papers/Zhong_Low_cost_Lidar.pdf similarity index 100% rename from research/papers/Zhong_Low_cost_Lidar.pdf rename to publications/papers/Zhong_Low_cost_Lidar.pdf diff --git a/research/papers/isprs-archives-XLVIII-1-W1-2023-175-2023.pdf b/publications/papers/isprs-archives-XLVIII-1-W1-2023-175-2023.pdf similarity index 100% rename from research/papers/isprs-archives-XLVIII-1-W1-2023-175-2023.pdf rename to publications/papers/isprs-archives-XLVIII-1-W1-2023-175-2023.pdf diff --git a/research/papers/isprs-archives-XLVIII-1-W1-2023-227-2023.pdf b/publications/papers/isprs-archives-XLVIII-1-W1-2023-227-2023.pdf similarity index 100% rename from research/papers/isprs-archives-XLVIII-1-W1-2023-227-2023.pdf rename to publications/papers/isprs-archives-XLVIII-1-W1-2023-227-2023.pdf diff --git a/research/papers/isprs-archives-XLVIII-1-W1-2023-473-2023.pdf b/publications/papers/isprs-archives-XLVIII-1-W1-2023-473-2023.pdf similarity index 100% rename from research/papers/isprs-archives-XLVIII-1-W1-2023-473-2023.pdf rename to publications/papers/isprs-archives-XLVIII-1-W1-2023-473-2023.pdf diff --git a/research/papers/mstad7487f6_lr.jpg b/publications/papers/mstad7487f6_lr.jpg similarity index 100% rename from research/papers/mstad7487f6_lr.jpg rename to publications/papers/mstad7487f6_lr.jpg diff --git a/research/papers/navi.602.full.pdf b/publications/papers/navi.602.full.pdf similarity index 100% rename from research/papers/navi.602.full.pdf rename to publications/papers/navi.602.full.pdf diff --git a/research/papers/navi.660.full.pdf b/publications/papers/navi.660.full.pdf similarity index 100% rename from research/papers/navi.660.full.pdf rename to publications/papers/navi.660.full.pdf diff --git a/research/papers/navi.660.pdf b/publications/papers/navi.660.pdf similarity index 100% rename from research/papers/navi.660.pdf rename to publications/papers/navi.660.pdf diff --git a/research/papers/navi.684.full.pdf b/publications/papers/navi.684.full.pdf similarity index 100% rename from research/papers/navi.684.full.pdf rename to publications/papers/navi.684.full.pdf diff --git a/research/papers/pyrtklib_An_Open-Source_Package_for_Tightly_Coupled_Deep_Learning_and_GNSS_Integration_for_Positioning_in_Urban_Canyons.pdf b/publications/papers/pyrtklib_An_Open-Source_Package_for_Tightly_Coupled_Deep_Learning_and_GNSS_Integration_for_Positioning_in_Urban_Canyons.pdf similarity index 100% rename from research/papers/pyrtklib_An_Open-Source_Package_for_Tightly_Coupled_Deep_Learning_and_GNSS_Integration_for_Positioning_in_Urban_Canyons.pdf rename to publications/papers/pyrtklib_An_Open-Source_Package_for_Tightly_Coupled_Deep_Learning_and_GNSS_Integration_for_Positioning_in_Urban_Canyons.pdf diff --git a/research/papers/remotesensing-13-02371-v2-1.pdf b/publications/papers/remotesensing-13-02371-v2-1.pdf similarity index 100% rename from research/papers/remotesensing-13-02371-v2-1.pdf rename to publications/papers/remotesensing-13-02371-v2-1.pdf diff --git a/research/papers/s11804_023_00367_1.pdf b/publications/papers/s11804_023_00367_1.pdf similarity index 100% rename from research/papers/s11804_023_00367_1.pdf rename to publications/papers/s11804_023_00367_1.pdf diff --git a/research/drones.md b/research/drones.md new file mode 100644 index 00000000..d1e41645 --- /dev/null +++ b/research/drones.md @@ -0,0 +1,138 @@ +--- +title: Embodied Drones for City Maintenance +--- + +# 🚁 Embodied Drones for City Maintenance and Manipulation + +
+Maintaining urban infrastructure in dense city environments — particularly external wall cleaning of high-rise buildings and structural inspection in urban canyons — presents significant challenges that demand intelligent, physically interactive drone systems. This research develops embodied drone platforms that combine autonomous navigation in GPS-degraded urban canyons with contact-based manipulation capabilities for real-world city maintenance tasks. +
+ +
+Our approach addresses three fundamental challenges: +
    +
  1. Autonomous Inspection in Urban Canyons — Dense urban environments with tall buildings, narrow streets, and GPS-degraded conditions pose severe challenges for drone navigation. We develop AI-driven multi-sensor fusion algorithms (LiDAR/Camera/IMU/GNSS) and robust localization methods that enable drones to navigate safely and precisely in complex urban canyon environments. Our systems provide centimeter-level positioning for close-proximity inspection of building facades, bridges, and other urban structures.
  2. +
  3. External Wall Cleaning with Drones — High-rise external wall cleaning is one of the most hazardous tasks in urban maintenance. We develop drone-based cleaning systems that integrate aerial manipulation with contact-aware flight control, enabling drones to approach building surfaces, maintain stable contact, and perform cleaning operations autonomously. Our force-controlled manipulation strategies ensure safe and effective cleaning while accommodating varying surface geometries, wind disturbances, and dynamic environmental conditions.
  4. +
  5. Software-Hardware Co-Design for Maintenance Drones — We pursue an integrated approach to drone system design, jointly optimizing the AI software stack (perception, planning, contact control) with the hardware platform (airframe, cleaning/manipulation end-effectors, onboard compute) to achieve reliable embodied AI performance under the strict size, weight, and power (SWaP) constraints of aerial platforms.
  6. +
+
+ +

+ Embodied Drones +

+
Embodied Drones for City Maintenance and Manipulation
+ +← Back to all Research Directions + +{% include section.html %} + +## Demo Videos + +

+ +

+
Intelligent Cleaning UAV Demonstration — PolyU Wuxi Research Institute
+ +

+ +

+
UAV System Demonstration — TAS Lab, PolyU
+ +{% include section.html %} + +## Key Research Directions + + + +{% include section.html %} + +## Target Applications + + + +{% include section.html %} + +## Recent News + + + +{% include section.html %} + +## Press Coverage Photos + +
RTHK Interview
RTHK Interview
TVB Coverage
TVB Coverage
Ming Pao Coverage
Ming Pao Coverage
+ +{% include section.html %} + +## Selected Publications (*: Corresponding author) + + + + + +

→ Full publication list

+ +{% include section.html %} + +## Acknowledgement and Collaborators + +
+This research is supported by The Hong Kong Polytechnic University, the Department of Science and Technology of Guangdong Province (Drone System and Offshore Wind Turbines Inspection), Esri China (HK) Limited (Vision-Language-Action Models for Intelligent UAV Systems), and Meituan (Vision Aided GNSS-RTK Positioning for UAV System in Urban Canyons). We collaborate with leading research groups and industry partners in intelligent drone systems and urban maintenance solutions. +
+ +{% include section.html %} + +{% assign posts = site.posts | where: "research_direction", "drones" | sort: "date" | reverse %} + +## Projects ({{ posts.size }}) + +{% for post in posts %} + {% include post-excerpt.html title=post.title url=post.url image=post.image content=post.content excerpt=post.excerpt date=post.date author=post.author tags=post.tags last_modified_at=post.last_modified_at %} +{% endfor %} diff --git a/research/education.md b/research/education.md new file mode 100644 index 00000000..3971cbd8 --- /dev/null +++ b/research/education.md @@ -0,0 +1,122 @@ +--- +title: Embodied AI for Robotics Education +--- + +# Embodied AI for Robotics Education + +← Back to all Research Directions + + + +--- + +## Abstract + +
+Embodied AI for Robotics Education +
+ +
+At the TAS Lab, we believe that cutting-edge research should go hand-in-hand with innovative education. Our research in Embodied AI for Robotics Education focuses on developing AI-powered educational platforms and hands-on learning experiences that bridge the gap between academic research and industry-ready skills. We leverage our expertise in autonomous systems — including drones, ground robots, and intelligent vehicles — to create immersive, project-based curricula that empower the next generation of roboticists and AI engineers. +
+ +
+Our educational research integrates embodied AI concepts into university courses and outreach programs, enabling students to interact with real robotic hardware and state-of-the-art AI algorithms. From GNSS-based navigation labs to end-to-end autonomous driving projects, we design curricula that combine theoretical foundations with practical implementations, fostering both deep understanding and engineering competency. +
+ +--- + +## Key Research Directions + + + +--- + +## Courses and Teaching Platforms + +
+Our education research is tightly integrated with the following courses at The Hong Kong Polytechnic University: +
+ + + +--- + +## Video Lectures + +
+ +

AAE4203 Guidance and Navigation — Lecture Series on YouTube

+
+ +--- + +## Robotics Competitions + +
+TAS Lab actively supports and supervises students in robotics competitions, providing mentorship, technical resources, and hands-on training. Competitions are a vital platform for students to apply their theoretical knowledge, develop teamwork skills, and push the boundaries of autonomous systems engineering. +
+ + + +--- + +## Student Supervision Highlights + + + +--- + +## Acknowledgement and Collaborators + +
+Our robotics education initiatives are supported by The Hong Kong Polytechnic University, the Department of Aeronautical and Aviation Engineering, and the Faculty of Engineering. We are grateful to our collaborators at Wuhan University, Beihang University, UC Berkeley, and industry partners for adopting and contributing to our open-source educational resources. +
+ +

+ Funding and Collaborators +

diff --git a/research/fusion.md b/research/fusion.md new file mode 100644 index 00000000..ed7bbe11 --- /dev/null +++ b/research/fusion.md @@ -0,0 +1,160 @@ +--- +title: Safety-certifiable Multi-Sensor Fusion +--- + +# 🔒 Safety-certifiable Multi-Sensor Fusion for Robotic Navigation in Urban Scenes + +
+The visual/LiDAR SLAM methods are challenged in complex urban scenarios, especially when safety certification is required for autonomous systems. In this project, we aim to study the mechanism of the impacts caused by dynamic scenarios on the visual/LiDAR SLAM methods, and develop safety-certifiable navigation algorithms that can quantify and guarantee the reliability of localization results. We try to answer the questions of how dynamic objects affect the state estimation of visual/LiDAR SLAM methods, how to improve robustness, and how to provide safety-quantifiable localization for robotics in complex urban environments. +
+ +

+ Multi-Sensor Fusion +

+
GNSS/LiDAR/Visual/INS Integration for Robotics Navigation
+ +

+ Safety-certifiable Visual Localization +

+
Safety-certifiable Visual Localization with 3D Prior Map
+ +← Back to all Research Directions + +{% include section.html %} + +## Recent News + + + +{% include section.html %} + +## Video Demonstration + +

+ +

+
Safety-quantifiable Line Feature-based Monocular Visual Localization with 3D Prior Map
+ +

+ +

+
Multi-sensor Integration Navigation System for Autonomous Driving
+ +

+ +

+
Demonstration: Low-cost Solid-state LiDAR/Inertial Based Localization with Prior Map
+ +

+ +

+
Presentation in ION GNSS+ 2021: A Coarse-to-Fine LiDAR-Based SLAM with Dynamic Object Removal
+ +

+ +

+
Presentation in ION GNSS+ 2021: Continuous GNSS-RTK Aided by LiDAR/Inertial Odometry
+ +{% include section.html %} + +## Related Papers (*: Corresponding author) + + + +

2025

+ + + +

2024

+ + + +

2023

+ + + +

2018–2022

+ + + +

→ Full publication list

+ +{% include section.html %} + +## Acknowledgement and Collaborators + +
+This research is supported by government and industry partners, including Hong Kong Polytechnic University, Guangdong Basic and Applied Basic Research Foundation, and Huawei Technologies. We collaborate with leading research groups in multi-sensor fusion and safety-certifiable navigation. +
+ +

+ Funding and Collaborators +

+ +{% include section.html %} + +{% assign posts = site.posts | where: "research_direction", "fusion" | sort: "date" | reverse %} + +## Projects ({{ posts.size }}) + +{% for post in posts %} + {% include post-excerpt.html title=post.title url=post.url image=post.image content=post.content excerpt=post.excerpt date=post.date author=post.author tags=post.tags last_modified_at=post.last_modified_at %} +{% endfor %} diff --git a/research/gnss.md b/research/gnss.md new file mode 100644 index 00000000..0663d5f9 --- /dev/null +++ b/research/gnss.md @@ -0,0 +1,183 @@ +--- +title: 3D LiDAR Aided GNSS Positioning +--- + +# 🛰️ 3D LiDAR Aided GNSS Positioning for Robotics Navigation + +
+Positioning in urban environments is becoming essential due to the increasing demand for autonomous driving vehicles (ADV). The global navigation satellite system (GNSS) is currently one of the principal means of providing globally-referenced positioning for ADV localization. With the increased availability of multiple satellite constellations, GNSS can provide satisfactory performance in open-sky areas. However, the positioning accuracy is significantly degraded in highly-urbanized cities such as Hong Kong, due to signal reflection caused by static buildings and dynamic objects such as double-decker buses. If the direct line-of-sight (LOS) is blocked, and reflected signals from the same satellite are received, the notorious non-line-of-sight (NLOS) receptions occur. According to a recent review paper, NLOS is currently the major difficulty in the use of GNSS in intelligent transportation systems. +
+ +
+Inspired by the strong perception capability of ADV using onboard sensors (such as 3D LiDAR), we continuously developed the perception-aided NLOS mitigation methods where the 3D LiDAR is employed to timely reconstruct the surrounding environments to identify the NLOS receptions. The idea was also reported in the industrial magazine in 2018. The work was further improved in 2020, where several drawbacks are relaxed and was awarded the Best Presentation Award in the session of Navigation in Urban Environments. Interestingly, this award is selected by the session chairs from Waymo and Swift Navigation. Meanwhile, the idea is transferred into industrial applications for high-accuracy offline mapping applications. Recently, we extended the LiDAR aided GNSS NLOS mitigation to the GNSS Real-time Kinematic (RTK), leading to sub-meter level accuracy. Unfortunately, the fixed rate of the RTK is still not guaranteed as: +
+ +
+ +
+ +

+ 3D LiDAR Aided GNSS Positioning +

+
3D LiDAR Aided GNSS Positioning for Robotics Urban Navigation
+ +← Back to all Research Directions + +{% include section.html %} + +## Recent News + + + +{% include section.html %} + +## Video Demonstration + +

+ +

+
Demonstration: 3D LiDAR Aided NLOS Exclusion for GNSS Real-time Kinematic (RTK) Positioning in Urban Canyons
+ +

+ +

+
Presentation in ION GNSS+ 2021: 3D LiDAR Aided NLOS Exclusion for GNSS RTK Positioning
+ +

+ +

+
Demonstration: 3D LiDAR Aided NLOS Exclusion for GNSS Single Point Positioning
+ +

+ +

+
Presentation in ION GNSS+ 2020: 3D LiDAR Aided GNSS and Its Tightly Coupled Integration with INS
+ +

+ +

+
Presentation in ION GNSS+ 2021: Continuous GNSS-RTK Aided by LiDAR/Inertial Odometry
+ +{% include section.html %} + +## Related Papers (*: Corresponding author) + + + +

2025

+ + + +

2024

+ + + +

2023

+ + + +

2021–2022

+ + + +

2018–2020

+ + + +

→ Full publication list

+ +{% include section.html %} + +## Press Coverage + + + +{% include section.html %} + +## Acknowledgement and Collaborators + +
+This research was funded by government and industry partners, including Hong Kong Polytechnic University, Guangdong Basic and Applied Basic Research Foundation, Riemann Laboratory, and Huawei Technologies. +
+ +

+ Funding and Collaborators +

+ +{% include section.html %} + +{% assign posts = site.posts | where: "research_direction", "gnss" | sort: "date" | reverse %} + +## Projects ({{ posts.size }}) + +{% for post in posts %} + {% include post-excerpt.html title=post.title url=post.url image=post.image content=post.content excerpt=post.excerpt date=post.date author=post.author tags=post.tags last_modified_at=post.last_modified_at %} +{% endfor %} diff --git a/research/humanoid.md b/research/humanoid.md new file mode 100644 index 00000000..5decf5fe --- /dev/null +++ b/research/humanoid.md @@ -0,0 +1,114 @@ +--- +title: Embodied AI for Humanoid/Legged Robotics +--- + +# 🤖 Embodied AI for Humanoid/Legged Robotics + +
+Humanoid and legged robots represent the next frontier of embodied AI — machines that can perceive, reason, and physically interact with the world in a human-like manner. This research focuses on developing large AI models and vision-language-action (VLA) frameworks that enable humanoid and legged robots to autonomously navigate, manipulate, and collaborate in complex real-world environments. +
+ +
+Our approach integrates three core pillars: +
    +
  1. Foundation Models for Robotic Perception and Control — We develop vision-language-action models that bridge high-level semantic understanding with low-level motor control, enabling robots to interpret natural language instructions and execute complex manipulation and locomotion tasks. Our models leverage large-scale pre-training on multimodal data (vision, language, proprioception) and are fine-tuned for real-world deployment on humanoid platforms.
  2. +
  3. Bio-Inspired Embodied Intelligence — Drawing inspiration from biological locomotion and sensorimotor systems, we design control architectures that enable robust and adaptive walking, running, climbing, and manipulation on diverse terrains. Our work combines reinforcement learning, model predictive control, and sim-to-real transfer to achieve agile and stable locomotion for legged robots in unstructured environments.
  4. +
  5. Multimodal Learning for Humanoid Robots — We investigate how robots can learn from multimodal sensory inputs (RGB-D cameras, IMUs, tactile sensors, force/torque sensors) to build rich world models that support whole-body planning and contact-rich manipulation. Our research enables humanoid robots to perform tasks in human-centric environments such as homes, offices, and warehouses.
  6. +
+
+ +

+ Embodied AI for Humanoid/Legged Robotics +

+
Embodied AI for Humanoid/Legged Robotics
+ +← Back to all Research Directions + +{% include section.html %} + +## Demo Video + +

+ +

+
Embodied AI for Humanoid/Legged Robotics — TAS Lab, PolyU
+ +{% include section.html %} + +## Key Research Directions + + + +{% include section.html %} + +## Target Applications + + + +{% include section.html %} + +## Selected Publications (*: Corresponding author) + + + + + +

→ Full publication list

+ +{% include section.html %} + +## Acknowledgement and Collaborators + +
+This research is supported by The Hong Kong Polytechnic University and industry partners. We collaborate with leading research groups in embodied AI and robotics worldwide. +
+ +{% include section.html %} + +{% assign posts = site.posts | where: "research_direction", "humanoid" | sort: "date" | reverse %} + +## Projects ({{ posts.size }}) + +{% for post in posts %} + {% include post-excerpt.html title=post.title url=post.url image=post.image content=post.content excerpt=post.excerpt date=post.date author=post.author tags=post.tags last_modified_at=post.last_modified_at %} +{% endfor %} diff --git a/research/index.md b/research/index.md index 4f3eaf54..413243c9 100644 --- a/research/index.md +++ b/research/index.md @@ -1,35 +1,190 @@ --- -title: Publications +title: Research nav: - order: 2 - tooltip: Published works + order: 3 + tooltip: Research projects and directions --- -# {% include icon.html icon="fa-solid fa-microscope" %}Publications +# {% include icon.html icon="fa-solid fa-flask" %}Research Topics - +
+Our research aims to build algorithm foundations for embodied AI that enable trustworthy perception, navigation, and control of autonomous systems. We develop practical embodied AI-driven autonomous systems — including drones, intelligent vehicles, and legged/humanoid robots — with end-to-end learning and safety certification capabilities, enabling them to perceive, reason, and interact with the physical world safely and reliably for the future society. Our work spans large AI models for autonomous systems, foundation models and vision-language-action models for robotic perception and control, AI-enabled multi-sensor fusion, and software-hardware co-design for efficient embodied AI systems. +
+ +
+Research Directions:
+1) 3D LiDAR Aided GNSS Positioning — AI-driven GNSS positioning (RTK, PPP, PPP-RTK), 3D LiDAR aided NLOS/multipath mitigation, multi-sensor fusion for robust urban navigation;
+2) Safety-certifiable Multi-Sensor Fusion — safety-certifiable AI for autonomous navigation, AI-enabled multi-sensor fusion (LiDAR/Camera/IMU/GNSS), integrity monitoring and navigation-control joint optimization;
+3) End-to-End and Safety-Certifiable Autonomous Vehicles — end-to-end learning for self-driving, safety certification for logistics applications, V2X-assisted connected autonomous driving;
+4) Embodied AI for Humanoid/Legged Robotics — large AI models and vision-language-action models for robotic perception and control, bio-inspired embodied intelligence, multimodal learning for humanoid/legged robots;
+5) Embodied Drones for City Maintenance and Manipulation — intelligent drones and UAV swarm systems, aerial manipulation for urban infrastructure, software-hardware co-design for efficient embodied AI drone systems;
+6) Embodied AI for Robotics Education — AI-powered robotics education platforms, hands-on project-based learning with drones and ground robots, GitHub-based collaborative learning pedagogy, bridging academic research and industry-ready skills. +
{% include section.html %} -## Videos + - -
- - -
+
-
- - -
+ +
+
+ 3D LiDAR Aided GNSS Positioning for Robotics Navigation +
+
+

3D LiDAR Aided GNSS Positioning for Robotics Navigation

+ › Find out more +
+
+
-{% include section.html %} + +
+
+ Safety-certifiable Multi-Sensor Fusion +
+
+

Safety-certifiable Multi-Sensor Fusion for Robotic Navigation in Urban Scenes

+ › Find out more +
+
+
-## All + +
+
+ End-to-End Autonomous Vehicles +
+
+

End-to-End and Safety-Certifiable Autonomous Vehicles for Logistics Applications

+ › Find out more +
+
+
-{% include search-box.html %} + +
+
+ Embodied AI for Humanoid/Legged Robotics +
+
+

Embodied AI for Humanoid/Legged Robotics

+ › Find out more +
+
+
-{% include search-info.html %} + +
+
+ Embodied Drones for City Maintenance and Manipulation +
+
+

Embodied Drones for City Maintenance and Manipulation

+ › Find out more +
+
+
-{% include list.html data="citations" component="citation" style="rich" %} + +
+
+ Embodied AI for Robotics Education +
+
+

Embodied AI for Robotics Education

+ › Find out more +
+
+
+ +
diff --git a/research/vehicles.md b/research/vehicles.md new file mode 100644 index 00000000..4daf1bc4 --- /dev/null +++ b/research/vehicles.md @@ -0,0 +1,128 @@ +--- +title: End-to-End Autonomous Vehicles +--- + +# 🚗 End-to-End and Safety-Certifiable Autonomous Vehicles for Logistics Applications + +
+Autonomous vehicles hold transformative potential for logistics and urban mobility, yet deploying them safely in real-world environments remains a grand challenge. This research focuses on developing end-to-end learning frameworks and safety-certifiable navigation systems for autonomous vehicles in logistics applications — from campus delivery and last-mile transportation to urban freight operations. +
+ +
+Our approach integrates three core elements: +
    +
  1. End-to-End Autonomous Driving — We develop neural network architectures that learn to drive directly from raw sensor inputs (LiDAR, camera, IMU, GNSS) to control outputs, enabling autonomous vehicles to handle complex urban scenarios including dense traffic, dynamic obstacles, and GPS-degraded environments. Our end-to-end pipelines unify perception, prediction, planning, and control into a single differentiable framework.
  2. +
  3. Safety Certification and Integrity Monitoring — Unlike conventional black-box approaches, our systems incorporate rigorous safety certification mechanisms. We design integrity monitoring algorithms that quantify the trustworthiness of navigation solutions in real time, enabling the vehicle to detect unsafe states and trigger fail-safe maneuvers. This is critical for logistics applications where reliability and regulatory compliance are paramount.
  4. +
  5. Real-World Deployment for Logistics — We bridge the gap between research and application by developing full-stack autonomous vehicle platforms for logistics use cases, including campus patrol, autonomous delivery, and connected fleet management. Our platforms feature multi-sensor fusion (GNSS-RTK/LiDAR/Camera/IMU), V2X communication, and robust localization in challenging urban canyon environments.
  6. +
+
+ +

+ End-to-End Autonomous Vehicles +

+
End-to-End and Safety-Certifiable Autonomous Vehicles for Logistics Applications
+ +

+ Autonomous Vehicle Platform +

+
Autonomous Vehicle Platform for Campus Logistics and Urban Navigation
+ +← Back to all Research Directions + +{% include section.html %} + +## Demo Videos & Photos + +

+ Campus UGV patrol demonstration +

+
Campus Security Patrol Demonstration with UGV — PolyU AAE/CFSO, Sept 2022
+ +

+ +

+
Autonomous Driving Test — TAS Lab, PolyU
+ +

Autonomous driving PolyU campus demo

Localization and Control

Perception and Control
+ +{% include section.html %} + +## News + + + +{% include section.html %} + +## Selected Publications (*: Corresponding author) + + + + + +

→ Full publication list

+ +{% include section.html %} + +## Acknowledgement and Collaborators + +
+This research is supported by government and industry partners, including the Hong Kong Polytechnic University, Guangdong Basic and Applied Basic Research Foundation, Hong Kong Smart Traffic Fund, Innovation and Technology Fund, Huawei Technologies, Meituan, Tencent, and iDriverplus. We also collaborate closely with the Mechanical Systems Control Lab at the University of California, Berkeley, and the Chemnitz University of Technology in Germany. +
+ +

+ Funding and Collaborators +

+ +{% include section.html %} + +{% assign posts = site.posts | where: "research_direction", "vehicles" | sort: "date" | reverse %} + +## Projects ({{ posts.size }}) + +{% for post in posts %} + {% include post-excerpt.html title=post.title url=post.url image=post.image content=post.content excerpt=post.excerpt date=post.date author=post.author tags=post.tags last_modified_at=post.last_modified_at %} +{% endfor %} diff --git a/teaching/index.md b/teaching/index.md new file mode 100644 index 00000000..1321d2d8 --- /dev/null +++ b/teaching/index.md @@ -0,0 +1,286 @@ +--- +title: Teaching +nav: + order: 5 + tooltip: Courses and student supervision +--- + +# {% include icon.html icon="fa-solid fa-chalkboard-teacher" %}Teaching + +
+Teaching is one of the most important parts of our academic mission. We are passionate about inspiring young students to explore the frontiers of aerospace engineering, AI, and autonomous systems. +
+ +{% include section.html %} + +## 📚 Courses + + + +
+ +
+
+ AAE4011 + S2 2024/25 +
+
+

Artificial Intelligence in Unmanned Autonomous Systems

+

Undergraduate course · PolyU AAE

+

Covers AI fundamentals for autonomous drones and ground robots, including perception, planning, and decision-making using deep learning and reinforcement learning.

+
+
+ +
+
+ AAE4203 + S1 2024/25 · S2 2023/24 · S1 2022/23 · S2 2021/22 +
+
+

Guidance and Navigation

+

Undergraduate course · PolyU AAE

+

GNSS positioning (SPP, DGNSS, RTK), visual navigation, state estimation using Kalman filtering and factor graph optimization. Includes video lectures and hands-on tutorials.

+ › View lecture videos on YouTube +
+
+ +
+
+ AAE3004 + S1 2023/24 +
+
+

Dynamical Systems and Control

+

Undergraduate course · PolyU AAE

+

Fundamentals of dynamical systems modeling, stability analysis, and feedback control design for aerospace and robotics applications.

+
+
+ +
+
+ AAE2004 + 2022 +
+
+

Introduction to Aviation System and Air Transport Regulation

+

Undergraduate course · PolyU AAE

+

Path planning for aerospace and aviation systems, introducing foundational concepts in aviation regulation and system design.

+
+
+ +
+
+ AAE4002 + 2021 – Present +
+
+

Undergraduate Capstone Project

+

Final Year Project supervision · PolyU AAE

+

Supervising undergraduate capstone projects on UAV systems, multi-sensor fusion, autonomous vehicles, and robotic perception.

+
+
+ +
+
+ AAE6102 + Invited Lecture +
+
+

Satellite Communication and Navigation

+

Postgraduate course · PolyU AAE

+

Invited lecture on advanced GNSS positioning techniques, multi-sensor integration, and AI-aided navigation in urban environments.

+
+
+ +
+ +{% include section.html %} + +## 💡 Teaching Innovations + +
+ +
+
🎓
+
Programme Development Leadership
+
As Associate Programme Leader, drafted MSc in Low-altitude Economy proposal to meet emerging industry needs in drone technology and urban air mobility.
+
+ +
+
💻
+
GitHub-based Learning Pedagogy
+
Pioneered GitHub-based collaborative learning with 50+ code examples; adopted by Wuhan University, Beihang University, and UC Berkeley.
+
+ +
+
🤖
+
Hands-on Project-Based Learning
+
ROS car projects, drone programming workshops (PX4/ArduPilot), MATLAB/Python demonstrations, and deep learning frameworks integration.
+
+ +
+ +{% include section.html %} + +## 🏅 Student Supervision Highlights + +
+ +
+ FYP + Reliable UAV Perception and Perching Solutions in Urban Areas — ZHAO Jiaqi, LI Mingjue To, FU Chenlei (AAE10, 2024/25) +
+ +
+ FYP + Handheld Multi-sensor Fusion Mapping System — QIN Qijun, WANG Yuteng (AAE11, 2024/25) +
+ +
+ URIS + A High-Definition Map with Traffic Signs Based on LiDAR-Visual-IMU Fusion SLAM — QIN Qijun (Merit Award, Best URIS Research Project 2024) +
+ +
+ FYP + An Adaptive Drilling Process for the Aircraft Skin — LAU Chun Ho, LEUNG Cheuk To, CHAN Hei Lam Joshua (DD01, 2022/23) +
+ +
+ FYP + UAS for Situation Awareness and Risk Assessment — LAM Yat Long, CHEN Yat Nam (AAE39, 2022/23) +
+ +
+ FYP + Person-following Mobile Robotics — MOHAMMAD Tamz (AAE33, 2021/22) +
+ +
diff --git a/team/index.md b/team/index.md index df8b51fa..b0753de1 100644 --- a/team/index.md +++ b/team/index.md @@ -7,61 +7,210 @@ nav: # {% include icon.html icon="fa-solid fa-users" %}Team - +
+ Team Banner +
-Our lab is made up of a highly engaged and collaborative team of researchers. We recognize that diverse teams do better research. We foster an environment where team members are treated equally, and where we respect and admire our differences. The team includes postdocs, students at all levels, staff, and our lab mascots. +Our lab is made up of a highly engaged and collaborative team of researchers. We recognize that diverse teams do better research. We foster an environment where team members are treated equally, and where we respect and admire our differences. The team includes postdocs, students at all levels, staff, and our lab mascots. +--- + + +{% assign pi_members = site.members | where: "role", "pi" %} +{% assign postdoc_members = site.members | where: "role", "postdoc" %} +{% assign phd_members = site.members | where: "role", "phd" %} +{% assign ms_members = site.members | where: "role", "ms" %} +{% assign phd_ms_count = phd_members.size | plus: ms_members.size %} +{% assign ra_members = site.members | where: "role", "ra" %} +{% assign under_members = site.members | where: "role", "under" %} +{% assign visiting_members = site.members | where: "role", "visiting" %} +{% assign alumni_members = site.members | where: "role", "alumni" %} + +
Faculty / Principal Investigator ({{ pi_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'pi'" %} +
-
- Team Banner +
+

About Dr. Weisong Wen — Homepage

+

+Dr. Weisong Wen is an Assistant Professor at the Department of Aeronautical and Aviation Engineering, The Hong Kong Polytechnic University, and the Director of the Trustworthy AI and Autonomous Systems Laboratory (TAS Lab). He is also a member of IEEE and the Institute of Navigation (ION). Dr. Wen aims to build algorithm foundations for embodied AI that enable trustworthy perception, navigation, and control of autonomous systems. In particular, he aims to develop practical embodied AI-driven autonomous systems (drones, intelligent vehicles, and humanoid robots) with end-to-end learning and safety certification capabilities, enabling them to perceive, reason, and interact with the physical world safely and reliably for the future society. +

+

+Dr. Wen received a BEng degree in Mechanical Engineering from Beijing Information Science and Technology University (BISTU) in 2015, and a MEng degree from the China Agricultural University (CAU) in 2017. He received a PhD degree from The Hong Kong Polytechnic University (PolyU) supervised by Dr. Li-ta Hsu in 2020. He was also a visiting PhD student at the University of California, Berkeley (UC Berkeley) in 2018, supervised by Dr. Zhan and Prof. Tomizuka. +

+

+He has published more than 62 SCI journal papers and 56 conference papers (total citations: 2,600+, h-index: 27) and has secured over HK$28M in research funding as PI. He was ranked in the World's Top 2% Most-cited Scientists by Stanford University in both 2023 and 2024. He won the Innovation Award from TechConnect 2021, the Best Presentation Award from ION in 2020, the Top Cited Paper Award from NAVIGATION (Journal of ION) in 2022, and the Faculty of Engineering Research Grant Achievement Award from PolyU in 2025. He is also the Associate Editor of IEEE Transactions on Vehicular Technology (JCR Q1, IF: 7.1). +

-#### Faculty (Principal Investigator) -{% include list_pi.html data="members" component="portrait_pi" filters="role == 'pi'" %} -#### Postdoctoral Fellows -{% include list_students.html data="members" component="portrait_students" filters="role == 'postdoc'" %} -#### Ph.D./MPhil Students -{% include list_students.html data="members" component="portrait_students" filters="role == 'phd'" %} - -{% include list_students.html data="members" component="portrait_students" filters="role == 'ms'" %} -#### Research/Project Assistant -{% include list_students.html data="members" component="portrait_students" filters="role == 'ra'" %} -#### Undergraduate Students -{% include list_students.html data="members" component="portrait_students" filters="role == 'under'" %} -#### Visiting Scholar/Students -{% include list_students.html data="members" component="portrait_students" filters="role == 'visiting'" %} -#### Alumni -{% include list_students.html data="members" component="portrait_students" filters="role == 'alumni'" %} +
Postdoctoral Fellows ({{ postdoc_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'postdoc'" %} +
+
Ph.D. / MPhil Students ({{ phd_ms_count }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'phd'" %} + {% include list_students.html data="members" component="portrait_students" filters="role == 'ms'" %} +
+
Research / Project Assistant ({{ ra_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'ra'" %} +
+
Undergraduate Students ({{ under_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'under'" %} +
-#### Inclusion and diversity: +
Visiting Scholars / Students ({{ visiting_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'visiting'" %} +
-To fulfill our mission to advance collaborative approaches and practical solutions to global poverty challenges, PolyU TAS Lab strives to foster diversity, equity, inclusion, and belonging in all we do. +
Alumni ({{ alumni_members.size }})
+
+ {% include list_students.html data="members" component="portrait_students" filters="role == 'alumni'" %} +
-We strive to do so as a moral imperative and also because: +--- -- Diversity drives richer ideas and solutions. +#### Inclusion and Diversity -- Equity ensures that all voices are heard and valued. +To fulfill our mission to advance collaborative approaches and practical solutions to global challenges, PolyU TAS Lab strives to foster diversity, equity, inclusion, and belonging in all we do. -- Inclusion results in a seat at the decision-making table. +We strive to do so as a moral imperative and also because: -- Belonging means that we all feel welcome and confident in our roles. +- Diversity drives richer ideas and solutions. +- Equity ensures that all voices are heard and valued. +- Inclusion results in a seat at the decision-making table. +- Belonging means that we all feel welcome and confident in our roles. As such, TAS Lab is committed to: -- Dedicating time and creating safe spaces for people to voice diverse perspectives in decision making, teaching, research, and in our work with community partners. - -- Acknowledging, working to understand, and repairing the power imbalances that have historically marginalized many voices, including in the field of international development. +- Dedicating time and creating safe spaces for people to voice diverse perspectives in decision making, teaching, research, and in our work with community partners. +- Acknowledging, working to understand, and repairing the power imbalances that have historically marginalized many voices, including in the field of international development. +- Progressively becoming more diverse, equitable, and inclusive, and ultimately becoming an anti-racist organization. -- Progressively becoming more diverse, equitable, and inclusive, and ultimately becoming an anti-racist organization. In this way, we aim for TAS Lab staff, students, and collaborators around the world to be able to design for a more equitable world. - +--- #### We are grateful for the continued support we receive from: @@ -75,7 +224,6 @@ In this way, we aim for TAS Lab staff, students, and collaborators around the wo
-
@@ -85,9 +233,7 @@ In this way, we aim for TAS Lab staff, students, and collaborators around the wo style="width: 100%; height: auto; object-fit: cover; max-width: 250px; margin: 30px auto; vertical-align: middle;"> -
-
@@ -99,4 +245,4 @@ In this way, we aim for TAS Lab staff, students, and collaborators around the wo style="width: 100%; height: auto; object-fit: cover; max-width: 120px; margin: 30px auto; vertical-align: middle;"> -
\ No newline at end of file +