AHCI RESEARCH GROUP
Publications
Papers published in international journals,
proceedings of conferences, workshops and books.
OUR RESEARCH
Scientific Publications
How to
You can use the tag cloud to select only the papers dealing with specific research topics.
You can expand the Abstract, Links and BibTex record for each paper.
2023
Xu, M.; Niyato, D.; Chen, J.; Zhang, H.; Kang, J.; Xiong, Z.; Mao, S.; Han, Z.
Generative AI-Empowered Simulation for Autonomous Driving in Vehicular Mixed Reality Metaverses Journal Article
In: IEEE Journal on Selected Topics in Signal Processing, vol. 17, no. 5, pp. 1064–1079, 2023, ISSN: 19324553 (ISSN).
Abstract | Links | BibTeX | Tags: Auction theory, Autonomous driving, Autonomous Vehicles, Computation theory, Computational modelling, generative artificial intelligence, Job analysis, Metaverse, Metaverses, Mixed reality, Online systems, Roadside units, Task analysis
@article{xu_generative_2023,
title = {Generative AI-Empowered Simulation for Autonomous Driving in Vehicular Mixed Reality Metaverses},
author = {M. Xu and D. Niyato and J. Chen and H. Zhang and J. Kang and Z. Xiong and S. Mao and Z. Han},
url = {https://www.scopus.com/inward/record.uri?eid=2-s2.0-85164670036&doi=10.1109%2fJSTSP.2023.3293650&partnerID=40&md5=f28390de62f0f44c38a902e6c32dcd16},
doi = {10.1109/JSTSP.2023.3293650},
issn = {19324553 (ISSN)},
year = {2023},
date = {2023-01-01},
journal = {IEEE Journal on Selected Topics in Signal Processing},
volume = {17},
number = {5},
pages = {1064–1079},
abstract = {In the vehicular mixed reality (MR) Metaverse, the discrepancy between physical and virtual entities can be overcome by fusing the physical and virtual environments with multi-dimensional communications in autonomous driving systems. Assisted by digital twin (DT) technologies, connected autonomous vehicles (AVs), roadside units (RSUs), and virtual simulators can maintain the vehicular MR Metaverse via simulations for sharing data and making driving decisions collaboratively. However, it is challenging and costly to enable large-scale traffic and driving simulation via realistic data collection and fusion from the physical world for online prediction and offline training in autonomous driving systems. In this paper, we propose an autonomous driving architecture, where generative AI is leveraged to synthesize unlimited conditioned traffic and driving data via simulations for improving driving safety and traffic control efficiency. First, we propose a multi-task DT offloading model for the reliable execution of heterogeneous DT tasks with different requirements at RSUs. Then, based on the preferences of AV's DTs and real-world data, virtual simulators can synthesize unlimited conditioned driving and traffic datasets for improved robustness. Finally, we propose a multi-task enhanced auction-based mechanism to provide fine-grained incentives for RSUs on providing resources for autonomous driving. The property analysis and experimental results demonstrate that the proposed mechanism and architecture are strategy-proof and effective. © 2007-2012 IEEE.},
keywords = {Auction theory, Autonomous driving, Autonomous Vehicles, Computation theory, Computational modelling, generative artificial intelligence, Job analysis, Metaverse, Metaverses, Mixed reality, Online systems, Roadside units, Task analysis},
pubstate = {published},
tppubtype = {article}
}
2019
Caggianese, Giuseppe; Colonnese, Valerio; Gallo, Luigi
Situated Visualization in Augmented Reality: Exploring Information Seeking Strategies Proceedings Article
In: 2019 15th International Conference on Signal-Image Technology Internet-Based Systems (SITIS), pp. 390–395, 2019.
Abstract | Links | BibTeX | Tags: Augmented Reality, Human computer interaction, Task analysis, Visualization
@inproceedings{caggianeseSituatedVisualizationAugmented2019,
title = {Situated Visualization in Augmented Reality: Exploring Information Seeking Strategies},
author = { Giuseppe Caggianese and Valerio Colonnese and Luigi Gallo},
doi = {10.1109/SITIS.2019.00069},
year = {2019},
date = {2019-11-01},
booktitle = {2019 15th International Conference on Signal-Image Technology Internet-Based Systems (SITIS)},
pages = {390--395},
abstract = {In recent years augmented reality applications have been increasingly demonstrating the requirement for an interaction with information related to and directly shown in the surrounding environment. Situated information is visualized in its semantic and spatial context, building up an environment enhanced by an information level that dynamically adapts to the production of the information and to the actions of the user. The exploration and manipulation of this type of data through see-through augmented reality devices still represents a challenging task. The development of specific interaction strategies capable to mitigating the current limitations of augmented reality devices is essential. In this context, our contribution has been to design possible solutions to address some of these challenges allowing a dynamic interaction with situated information. Following the visual "information-seeking mantra" proposed by Shneiderman and introducing some "superpowers" for the users, in this work we present different strategies aimed at obtaining an overview and filtering, and acquiring details of a collection of situated data.},
keywords = {Augmented Reality, Human computer interaction, Task analysis, Visualization},
pubstate = {published},
tppubtype = {inproceedings}
}
Caggianese, Giuseppe; Colonnese, Valerio; Gallo, Luigi
Situated Visualization in Augmented Reality: Exploring Information Seeking Strategies Proceedings Article
In: 2019 15th International Conference on Signal-Image Technology Internet-Based Systems (SITIS), pp. 390–395, 2019.
Abstract | Links | BibTeX | Tags: Augmented Reality, Human computer interaction, Task analysis, Visualization
@inproceedings{caggianese_situated_2019,
title = {Situated Visualization in Augmented Reality: Exploring Information Seeking Strategies},
author = {Giuseppe Caggianese and Valerio Colonnese and Luigi Gallo},
doi = {10.1109/SITIS.2019.00069},
year = {2019},
date = {2019-11-01},
booktitle = {2019 15th International Conference on Signal-Image Technology Internet-Based Systems (SITIS)},
pages = {390–395},
abstract = {In recent years augmented reality applications have been increasingly demonstrating the requirement for an interaction with information related to and directly shown in the surrounding environment. Situated information is visualized in its semantic and spatial context, building up an environment enhanced by an information level that dynamically adapts to the production of the information and to the actions of the user. The exploration and manipulation of this type of data through see-through augmented reality devices still represents a challenging task. The development of specific interaction strategies capable to mitigating the current limitations of augmented reality devices is essential. In this context, our contribution has been to design possible solutions to address some of these challenges allowing a dynamic interaction with situated information. Following the visual "information-seeking mantra" proposed by Shneiderman and introducing some "superpowers" for the users, in this work we present different strategies aimed at obtaining an overview and filtering, and acquiring details of a collection of situated data.},
keywords = {Augmented Reality, Human computer interaction, Task analysis, Visualization},
pubstate = {published},
tppubtype = {inproceedings}
}