core.package_info#
Module Contents#
Data#
API#
- core.package_info.MAJOR#
0
- core.package_info.MINOR#
16
- core.package_info.PATCH#
0
- core.package_info.PRE_RELEASE#
‘rc0’
- core.package_info.VERSION#
()
- core.package_info.__shortversion__#
‘join(…)’
- core.package_info.__version__#
None
- core.package_info.__package_name__#
‘megatron_core’
- core.package_info.__contact_names__#
‘NVIDIA’
- core.package_info.__contact_emails__#
‘nemo-toolkit@nvidia.com’
- core.package_info.__homepage__#
‘https://docs.nvidia.com/megatron-core/developer-guide/latest/user-guide/index.html’
- core.package_info.__repository_url__#
‘https://github.com/NVIDIA/Megatron-LM/tree/main/megatron/core’
- core.package_info.__download_url__#
‘https://github.com/NVIDIA/Megatron-LM/releases’
- core.package_info.__description__#
‘Megatron Core - a library for efficient and scalable training of transformer based models’
- core.package_info.__license__#
‘BSD-3’
- core.package_info.__keywords__#
‘deep learning, machine learning, gpu, NLP, NLU, language, transformer, nvidia, pytorch, torch’