{"id":8526,"date":"2024-11-05T15:36:05","date_gmt":"2024-11-05T20:36:05","guid":{"rendered":"https:\/\/labs.icahn.mssm.edu\/minervalab\/?page_id=8526"},"modified":"2026-02-23T13:41:35","modified_gmt":"2026-02-23T18:41:35","slug":"hardware-technical-specs","status":"publish","type":"page","link":"https:\/\/labs.icahn.mssm.edu\/minervalab\/resources\/hardware-technical-specs\/","title":{"rendered":"Hardware and Technical Specs"},"content":{"rendered":"<p>[et_pb_section bb_built=&#8221;1&#8243; fullwidth=&#8221;on&#8221; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221; next_background_color=&#8221;#000000&#8243;][et_pb_fullwidth_menu menu_id=&#8221;15&#8243; menu_style=&#8221;centered&#8221; fullwidth_menu=&#8221;on&#8221; active_link_color=&#8221;#d80b8c&#8221; dropdown_menu_bg_color=&#8221;#221f72&#8243; dropdown_menu_line_color=&#8221;#221f72&#8243; dropdown_menu_active_link_color=&#8221;#d80b8c&#8221; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; menu_font=&#8221;|600|||||||&#8221; menu_text_color=&#8221;#FFFFFF&#8221; menu_font_size=&#8221;16px&#8221; background_color=&#8221;#221f72&#8243; background_layout=&#8221;dark&#8221; sticky_position=&#8221;top&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>[\/et_pb_fullwidth_menu][\/et_pb_section][et_pb_section bb_built=&#8221;1&#8243; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;0px||0px||false|false&#8221; global_colors_info=&#8221;{}&#8221; prev_background_color=&#8221;#000000&#8243; next_background_color=&#8221;#000000&#8243;][et_pb_row _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; custom_padding=&#8221;||0px||false|false&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;4_4&#8243; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_text admin_label=&#8221;Breadcrumb&#8221; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p><a href=\"https:\/\/labs.icahn.mssm.edu\/minervalab\/scientific-computing-and-data\/\">Scientific Computing and Data<\/a>\u00a0\/\u00a0<a href=\"https:\/\/labs.icahn.mssm.edu\/minervalab\/\">High Performance Computing<\/a> \/ Hardware and Technical Specs<\/p>\n<p>[\/et_pb_text][\/et_pb_column][\/et_pb_row][\/et_pb_section][et_pb_section bb_built=&#8221;1&#8243; _builder_version=&#8221;4.16&#8243; global_colors_info=&#8221;{}&#8221; prev_background_color=&#8221;#000000&#8243; next_background_color=&#8221;#000000&#8243;][et_pb_row _builder_version=&#8221;4.16&#8243; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;4_4&#8243; _builder_version=&#8221;4.16&#8243; custom_padding=&#8221;|||&#8221; global_colors_info=&#8221;{}&#8221; custom_padding__hover=&#8221;|||&#8221;][et_pb_text admin_label=&#8221;Hardware and Specs Before the Image&#8221; _builder_version=&#8221;4.27.4&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; global_colors_info=&#8221;{}&#8221; background_pattern_color=&#8221;rgba(0,0,0,0.2)&#8221; background_mask_color=&#8221;#ffffff&#8221; text_text_shadow_horizontal_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; text_text_shadow_vertical_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_vertical_length_tablet=&#8221;0px&#8221; text_text_shadow_blur_strength=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_blur_strength_tablet=&#8221;1px&#8221; link_text_shadow_horizontal_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; link_text_shadow_vertical_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_vertical_length_tablet=&#8221;0px&#8221; link_text_shadow_blur_strength=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ul_text_shadow_horizontal_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ul_text_shadow_vertical_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ul_text_shadow_blur_strength=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ol_text_shadow_horizontal_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ol_text_shadow_vertical_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ol_text_shadow_blur_strength=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_blur_strength_tablet=&#8221;1px&#8221; quote_text_shadow_horizontal_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; quote_text_shadow_vertical_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_vertical_length_tablet=&#8221;0px&#8221; quote_text_shadow_blur_strength=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_text_shadow_horizontal_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_text_shadow_vertical_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_text_shadow_blur_strength=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_2_text_shadow_horizontal_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_2_text_shadow_vertical_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_2_text_shadow_blur_strength=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_3_text_shadow_horizontal_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_3_text_shadow_vertical_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_3_text_shadow_blur_strength=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_4_text_shadow_horizontal_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_4_text_shadow_vertical_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_4_text_shadow_blur_strength=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_5_text_shadow_horizontal_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_5_text_shadow_vertical_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_5_text_shadow_blur_strength=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_6_text_shadow_horizontal_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_6_text_shadow_vertical_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_6_text_shadow_blur_strength=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_blur_strength_tablet=&#8221;1px&#8221; box_shadow_horizontal_tablet=&#8221;0px&#8221; box_shadow_vertical_tablet=&#8221;0px&#8221; box_shadow_blur_tablet=&#8221;40px&#8221; box_shadow_spread_tablet=&#8221;0px&#8221; vertical_offset_tablet=&#8221;0&#8243; horizontal_offset_tablet=&#8221;0&#8243; z_index_tablet=&#8221;0&#8243;]<\/p>\n<h1>Hardware and Technical Specs<\/h1>\n<ul>\n<li>The Minerva supercomputer is maintained by Scientific Computing and Data (SCD) at the Icahn School of Medicine, Mount Sinai.<\/li>\n<li>Minerva was created in 2012 and has been upgraded several times (most recently in Nov. 2024 and Feb. 2026) and has over 20 petaflops of compute power.<\/li>\n<li>It consists of 25,584 Intel Platinum processors in different generations including 2.1 GHz, 2.3GHz, 2.6 GHz, and 2.9 GHz compute cores (112 cores or 96 cores or 64 cores or 48 cores per node with two sockets in each node) with 1.5 terabytes (TB) or 2 TB of memory per node. In addition, Minerva also has 408 graphics processing units (GPUs), including 48 Nvidia B200s, 236 H100 Nvidia GPUs, 32 L40S Nvidia servers, 44 A100 Nvidia GPUs, 48 V100 Nvidia GPUs, 452 TB of total memory, and 32 petabytes of spinning storage accessed via IBM\u2019s Spectrum Scale\/General Parallel File System (GPFS).<\/li>\n<li>Minerva has contributed to over 2,100 peer-reviewed publications since 2012.<\/li>\n<\/ul>\n<p>The following diagram shows the overall Minerva configuration:<\/p>\n<p>[\/et_pb_text][et_pb_image src=&#8221;https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-content\/uploads\/sites\/342\/2025\/02\/Minerva-diagram-2025.jpg&#8221; title_text=&#8221;Minerva-diagram-2025&#8243; align=&#8221;center&#8221; _builder_version=&#8221;4.27.4&#8243; _module_preset=&#8221;default&#8221; width=&#8221;80%&#8221; max_width=&#8221;80%&#8221; custom_margin=&#8221;50px||50px||false|false&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>[\/et_pb_image][et_pb_text admin_label=&#8221;Chimera&#8221; _builder_version=&#8221;4.27.4&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; global_colors_info=&#8221;{}&#8221; background_pattern_color=&#8221;rgba(0,0,0,0.2)&#8221; background_mask_color=&#8221;#ffffff&#8221; text_text_shadow_horizontal_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; text_text_shadow_vertical_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_vertical_length_tablet=&#8221;0px&#8221; text_text_shadow_blur_strength=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_blur_strength_tablet=&#8221;1px&#8221; link_text_shadow_horizontal_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; link_text_shadow_vertical_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_vertical_length_tablet=&#8221;0px&#8221; link_text_shadow_blur_strength=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ul_text_shadow_horizontal_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ul_text_shadow_vertical_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ul_text_shadow_blur_strength=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ol_text_shadow_horizontal_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ol_text_shadow_vertical_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ol_text_shadow_blur_strength=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_blur_strength_tablet=&#8221;1px&#8221; quote_text_shadow_horizontal_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; quote_text_shadow_vertical_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_vertical_length_tablet=&#8221;0px&#8221; quote_text_shadow_blur_strength=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_text_shadow_horizontal_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_text_shadow_vertical_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_text_shadow_blur_strength=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_2_text_shadow_horizontal_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_2_text_shadow_vertical_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_2_text_shadow_blur_strength=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_3_text_shadow_horizontal_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_3_text_shadow_vertical_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_3_text_shadow_blur_strength=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_4_text_shadow_horizontal_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_4_text_shadow_vertical_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_4_text_shadow_blur_strength=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_5_text_shadow_horizontal_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_5_text_shadow_vertical_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_5_text_shadow_blur_strength=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_6_text_shadow_horizontal_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_6_text_shadow_vertical_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_6_text_shadow_blur_strength=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_blur_strength_tablet=&#8221;1px&#8221; box_shadow_horizontal_tablet=&#8221;0px&#8221; box_shadow_vertical_tablet=&#8221;0px&#8221; box_shadow_blur_tablet=&#8221;40px&#8221; box_shadow_spread_tablet=&#8221;0px&#8221; vertical_offset_tablet=&#8221;0&#8243; horizontal_offset_tablet=&#8221;0&#8243; z_index_tablet=&#8221;0&#8243;]<\/p>\n<h2>Compute Nodes<\/h2>\n<h4><span style=\"color: #221f72\"><strong>Chimera Partition<\/strong><\/span><\/h4>\n<table style=\"height: 485px;width: 1172px\">\n<tbody>\n<tr style=\"height: 17px\">\n<td style=\"width: 520.047px;height: 17px\"><u><i>Added in Nov. 2024.<\/i><\/u><\/td>\n<td style=\"width: 635.953px;height: 17px\"><u><i>Nodes purchased prior to 2024 and integrated to new NDR network via HDR 100Gb\/s.<\/i><\/u><\/td>\n<\/tr>\n<tr style=\"height: 440.25px\">\n<td style=\"width: 520.047px;height: 440.25px\">\n<ul>\n<li><b>4 login nodes <\/b>\u2013 Intel Emerald Rapids 8568Y+, 2.3GHz \u2013 96 cores with 512 GB memory per node.<\/li>\n<li><b>146 compute nodes* <\/b>\u2013 Intel Emerald Rapids 8568Y+, 2.3GHz\u2013 96 cores with 1.5 TB memory per node.\n<ul>\n<li>14,016 cores in total.<\/li>\n<\/ul>\n<\/li>\n<li><b>188 H100 in 47 nodes<\/b> \u2013 Intel ER 8568Y+, 2.3GHz\u2013 96 cores with 1.5 TB memory per node.\n<ul>\n<li>4 x H100-80GB(SXM5) NVLinked GPUs per node.<\/li>\n<\/ul>\n<\/li>\n<li><b>32 L40s GPUs in 4 nodes<\/b> \u2013 AMD Genoa 9334 2.7GHz \u2013 64 cores with 1.5TB memory per node.\n<ul>\n<li>8x L40s-48GB GPUs per node. L40s doesn\u2019t support FP64.<\/li>\n<\/ul>\n<\/li>\n<li><strong>3.84 TB Local NVME SSD (3.5TB usable) per node.<\/strong>\n<ul>\n<li>It can deliver a sustained read-write speed of 3.5 GB\/s in contrast with SATA SSDs that limit at 600 MB\/s.<\/li>\n<\/ul>\n<\/li>\n<li><strong>NDR InfiniBand<\/strong> fat tree fabric networking (400Gb\/s).\n<ul>\n<li>6 service nodes.<\/li>\n<li>295.5 TB memory in total.<\/li>\n<li>Direct water-cooling solution.<\/li>\n<li>New NFS storage (for users\u2019 home directories) \u2013140 TB usable.<\/li>\n<\/ul>\n<\/li>\n<\/ul>\n<\/td>\n<td style=\"width: 635.953px;height: 440.25px\">\n<ul>\n<li><b>33 high memory nodes<\/b> \u2013 Intel 8268 24C, 2.9GHZ \u2013 1.5 TB memory.<\/li>\n<li><b>48 V100 GPUs in 12 node <\/b> \u2013 Intel 6142 16C, 2.6GHz \u2013 384 GB memory \u2013 4x V100-16 GB GPU.<\/li>\n<li><b>32 A100 GPUs in 8 nodes<\/b> \u2013 Intel 8268 24C, 2.9GHz \u2013 384 GB memory \u2013 4x A100-40 GB GPU.\u00a0 1.92TB SSD (1.8 TB usable) per node.<\/li>\n<li><b>8 A100 GPUs in 2 nodes<\/b> \u2013 Intel 8358 32C, 2.6GHz \u2013 2 TB memory \u2013 4x A100-80 GB GPU.\n<ul>\n<li>A100 is connected via NVLink.<\/li>\n<li>7.68 TB NVMe SSD (7.0TB usable) per node.<\/li>\n<\/ul>\n<\/li>\n<li><b>8 H100 GPUs in 2 nodes<\/b> &#8211; Intel 8358 32C, 2.6 GHz &#8211; 0.5 TB memory \u2013 4xH100-80GB GPU.\n<ul>\n<li>3.84 TB NVMe SSD (3.5 TB usable) per node.<\/li>\n<\/ul>\n<\/li>\n<li><strong>[Decommissioned on Nov. 5th 2024] 4 login nodes<\/strong> \u2013 Intel Xeon(R) Platinum 8168 24C, 2.7GHz \u2013 384 GB memory.<\/li>\n<li><strong>[Decommissioned on July 17th and Nov. 5th 2024] 275 compute nodes*<\/strong> \u2013 Intel 8168 24C, 2.7GHz \u2013 192 GB memory.\n<ul>\n<li>13,152 cores (48 cores per node).<\/li>\n<\/ul>\n<\/li>\n<li style=\"list-style-type: none;margin: -5px;padding: -5px\">*<em>Compute Node<\/em>\u00a0\u2014where you run your applications. Users do not have direct access to these machines. Access is managed through the LSF job scheduler.<\/li>\n<\/ul>\n<p>&nbsp;<\/p>\n<p>&nbsp;<\/p>\n<\/td>\n<\/tr>\n<tr style=\"height: 17px\">\n<td style=\"width: 520.047px;height: 17px\"><u><i>Added in Feb. 2026.<\/i><\/u><\/td>\n<td style=\"width: 635.953px\"><b>\u00a0<\/b><\/td>\n<\/tr>\n<tr>\n<td style=\"width: 520.047px\">\n<ul>\n<li><b>48 B200 in 6 nodes<\/b> \u2013 Lenovo SR780a V3 DGX.\n<ul>\n<li>8x NVLinked B200 GPUs \u2013 192 GB memory per GPU, 9 TB GPU memory in total.<\/li>\n<li>112 Intel Xeon Platinum 8570 2.1GHz cores and 2 TB memory per node, 672 cores and 12 TB memory in total.<\/li>\n<li>25 TB NVMe SSD local storage per node.<\/li>\n<li>FP4 (4-bit floating point) format, enabling nearly an exaflop with FP4 for AI inference.<\/li>\n<\/ul>\n<\/li>\n<\/ul>\n<\/td>\n<td style=\"width: 635.953px\"><b>\u00a0<\/b><\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<p>[\/et_pb_text][et_pb_text admin_label=&#8221;Integration with Existing Chimera Nodes&#8221; _builder_version=&#8221;4.16&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; custom_margin=&#8221;10px||||false|false&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<ul>\n<li style=\"list-style-type: none;margin: -5px;padding: -5px\">\u00a0<\/li>\n<\/ul>\n<p>[\/et_pb_text][et_pb_text admin_label=&#8221;BODE2&#8243; _builder_version=&#8221;4.27.4&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; custom_margin=&#8221;10px||||false|false&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>&nbsp;<\/p>\n<table style=\"height: 54px;width: 1190px;border-color: #NaNNaNNaN;background-color: #ffffff\">\n<tbody>\n<tr style=\"height: 1px\">\n<td style=\"width: 430px;height: 1px\">\n<h4 style=\"text-align: center\"><span style=\"color: #221f72\"><strong>BODE2 Partition<\/strong><\/span><\/h4>\n<\/td>\n<td style=\"width: 382.562px;height: 1px\">\n<h4 style=\"text-align: center\"><span style=\"color: #221f72\"><strong>CATS Partition<\/strong><\/span><\/h4>\n<\/td>\n<td style=\"width: 345.438px;height: 1px\">\n<h4 style=\"text-align: center\"><strong style=\"color: #221f72\">AIMS Partition<\/strong><\/h4>\n<\/td>\n<\/tr>\n<tr style=\"height: 13px\">\n<td style=\"width: 430px;height: 13px\">\n<h6 style=\"text-align: center\"><span style=\"color: #221f72\"><strong>[Decommissioned on July 17th and Nov. 5th, 2024].<\/strong><\/span><\/h6>\n<p><span style=\"color: #999999\">$2M S10 BODE2 awarded by NIH (Kovatch PI).<\/span><\/p>\n<ul>\n<li style=\"color: #999999\"><span style=\"color: #999999\">3,744 48-core 2.9 GHz Intel Cascade Lake 8268 processors in 78 nodes.<\/span><\/li>\n<\/ul>\n<\/td>\n<td style=\"width: 382.562px;height: 13px\">\n<p>$2M CATS awarded by NIH (Kovatch PI).<\/p>\n<ul type=\"disc\">\n<li class=\"m_4748730477779924083MsoListParagraph\">3,520 cores in 55 nodes- Intel IceLake 8358, 26GHz \u2013 64 cores with 1.5 TB memory per node.<\/li>\n<li class=\"m_4748730477779924083MsoListParagraph\">82.5 TB memory (collectively).<\/li>\n<li>Open to eligible NIH funded projects.<\/li>\n<\/ul>\n<\/td>\n<td style=\"width: 382.562px;height: 13px\">\n<p>$2M AIMS awarded by NIH (Kovatch PI), launched in Feb, 2026<\/p>\n<ul type=\"disc\">\n<li class=\"m_4748730477779924083MsoListParagraph\">48 NVIDIA B200 GPUs in 6 nodes, 8 NVLinked per node, 9 TB total GPU memory.<\/li>\n<li class=\"m_4748730477779924083MsoListParagraph\">672 Intel Xeon Platinum 8570 cores, 12\u202fTB system memory, 25\u202fTB NVMe per node, supporting FP4 (4-bit) for near-exaflop AI inference.<\/li>\n<li>Open to eligible NIH funded GPU projects.<\/li>\n<\/ul>\n<\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<p>&nbsp;<\/p>\n<p>[\/et_pb_text][et_pb_text admin_label=&#8221;Summary&#8221; _builder_version=&#8221;4.27.4&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; custom_margin=&#8221;||||false|false&#8221; global_colors_info=&#8221;{}&#8221; background_pattern_color=&#8221;rgba(0,0,0,0.2)&#8221; background_mask_color=&#8221;#ffffff&#8221; text_text_shadow_horizontal_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; text_text_shadow_vertical_length=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_vertical_length_tablet=&#8221;0px&#8221; text_text_shadow_blur_strength=&#8221;text_text_shadow_style,%91object Object%93&#8243; text_text_shadow_blur_strength_tablet=&#8221;1px&#8221; link_text_shadow_horizontal_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; link_text_shadow_vertical_length=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_vertical_length_tablet=&#8221;0px&#8221; link_text_shadow_blur_strength=&#8221;link_text_shadow_style,%91object Object%93&#8243; link_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ul_text_shadow_horizontal_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ul_text_shadow_vertical_length=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ul_text_shadow_blur_strength=&#8221;ul_text_shadow_style,%91object Object%93&#8243; ul_text_shadow_blur_strength_tablet=&#8221;1px&#8221; ol_text_shadow_horizontal_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; ol_text_shadow_vertical_length=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_vertical_length_tablet=&#8221;0px&#8221; ol_text_shadow_blur_strength=&#8221;ol_text_shadow_style,%91object Object%93&#8243; ol_text_shadow_blur_strength_tablet=&#8221;1px&#8221; quote_text_shadow_horizontal_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; quote_text_shadow_vertical_length=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_vertical_length_tablet=&#8221;0px&#8221; quote_text_shadow_blur_strength=&#8221;quote_text_shadow_style,%91object Object%93&#8243; quote_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_text_shadow_horizontal_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_text_shadow_vertical_length=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_text_shadow_blur_strength=&#8221;header_text_shadow_style,%91object Object%93&#8243; header_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_2_text_shadow_horizontal_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_2_text_shadow_vertical_length=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_2_text_shadow_blur_strength=&#8221;header_2_text_shadow_style,%91object Object%93&#8243; header_2_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_3_text_shadow_horizontal_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_3_text_shadow_vertical_length=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_3_text_shadow_blur_strength=&#8221;header_3_text_shadow_style,%91object Object%93&#8243; header_3_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_4_text_shadow_horizontal_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_4_text_shadow_vertical_length=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_4_text_shadow_blur_strength=&#8221;header_4_text_shadow_style,%91object Object%93&#8243; header_4_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_5_text_shadow_horizontal_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_5_text_shadow_vertical_length=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_5_text_shadow_blur_strength=&#8221;header_5_text_shadow_style,%91object Object%93&#8243; header_5_text_shadow_blur_strength_tablet=&#8221;1px&#8221; header_6_text_shadow_horizontal_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_horizontal_length_tablet=&#8221;0px&#8221; header_6_text_shadow_vertical_length=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_vertical_length_tablet=&#8221;0px&#8221; header_6_text_shadow_blur_strength=&#8221;header_6_text_shadow_style,%91object Object%93&#8243; header_6_text_shadow_blur_strength_tablet=&#8221;1px&#8221; box_shadow_horizontal_tablet=&#8221;0px&#8221; box_shadow_vertical_tablet=&#8221;0px&#8221; box_shadow_blur_tablet=&#8221;40px&#8221; box_shadow_spread_tablet=&#8221;0px&#8221; vertical_offset_tablet=&#8221;0&#8243; horizontal_offset_tablet=&#8221;0&#8243; z_index_tablet=&#8221;0&#8243;]<\/p>\n<h4 style=\"text-align: left\"><span style=\"color: #221f72\"><strong>Summary<\/strong><\/span><\/h4>\n<table style=\"height: 91px;width: 940.703px\">\n<tbody>\n<tr>\n<td style=\"width: 428px\"><b>Total system memory\u00a0<\/b>(computes + GPU) =\u00a0<strong>452 TB\u00a0<\/strong><\/td>\n<td style=\"width: 493.703px\"><b>Total number of cores\u00a0<\/b>(computes + GPU) =\u00a0<strong>25,584 cores<\/strong><\/td>\n<\/tr>\n<tr>\n<td style=\"width: 428px\"><b>CPU Peak performance\u00a0<\/b>of all nodes = &gt; <strong>1.9 PFLOPS<\/strong><\/td>\n<td style=\"width: 493.703px\"><b>H100 Peak performance<\/b> based FP64 Tensor cores = 15.2 PFLOPS.<\/td>\n<\/tr>\n<tr>\n<td style=\"width: 428px\">\u00a0<\/td>\n<td style=\"width: 493.703px\">\u00a0<\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<p>\u00a0<\/p>\n<p>[\/et_pb_text][et_pb_text admin_label=&#8221;File System Storage&#8221; _builder_version=&#8221;4.16&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; custom_margin=&#8221;10px||||false|false&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<h2>File System Storage<\/h2>\n<p>Minerva uses IBM\u2019s General Parallel File System (GPFS) because it has advantages that are specifically useful for informatics workflows that involve high speed metadata access, tiered storage, and sub-block allocation. Metadata is the information about the data in the file system, and it is stored in flash memory for fast access. A parallel file system was used for Minerva because NFS and other file systems cannot scale to the number of nodes or provide performance for the large number of files involved in typical genomics workflows.\u00a0<\/p>\n<p>Currently we have one parallel file system on Minerva, Arion, which users can access at \/sc\/arion. The Hydra file system was retired at the end of 2020.<\/p>\n<table style=\"border-collapse: collapse;width: 1079px;height: 157px\" border=\"0\" cellspacing=\"0\" cellpadding=\"0\">\n<colgroup>\n<col style=\"width: 65pt\" span=\"5\" width=\"65\" \/> <\/colgroup>\n<tbody>\n<tr style=\"height: 30.0pt\">\n<td class=\"xl64\" style=\"background-color: #00aeef;text-align: center;width: 157px\" height=\"30\"><strong><span style=\"color: #ffffff\">GPFS Name<\/span><\/strong><\/td>\n<td class=\"xl64\" style=\"background-color: #00aeef;text-align: center;width: 252.188px\"><strong><span style=\"color: #ffffff\">Lifetime<\/span><\/strong><\/td>\n<td class=\"xl64\" style=\"background-color: #00aeef;text-align: center;width: 212.812px\"><strong><span style=\"color: #ffffff\">Storage Type<\/span><\/strong><\/td>\n<td class=\"xl64\" style=\"background-color: #00aeef;width: 225px;text-align: right\"><strong><span style=\"color: #ffffff\">Raw PB<\/span><\/strong><\/td>\n<td class=\"xl64\" style=\"background-color: #00aeef;width: 220px;text-align: right\"><strong><span style=\"color: #ffffff\">Usable PB<\/span><\/strong><\/td>\n<\/tr>\n<tr style=\"height: 15.0pt\">\n<td class=\"xl63\" style=\"height: 15pt;width: 157px;text-align: center\" height=\"15\">Arion<\/td>\n<td class=\"xl63\" style=\"width: 252.188px;text-align: center\">2019 &#8211;<\/td>\n<td class=\"xl63\" style=\"width: 212.812px;text-align: center\">Lenovo DSS<\/td>\n<td class=\"xl63\" style=\"width: 225px;text-align: right\" align=\"right\">14<\/td>\n<td class=\"xl63\" style=\"width: 220px;text-align: right\" align=\"right\">9.6<\/td>\n<\/tr>\n<tr style=\"height: 30.0pt\">\n<td class=\"xl63\" style=\"height: 30pt;width: 157px;text-align: center\" height=\"30\">Arion<\/td>\n<td class=\"xl63\" style=\"width: 252.188px;text-align: center\">2019 &#8211;<\/td>\n<td class=\"xl63\" style=\"width: 212.812px;text-align: center\">Lenovo G201 flash<\/td>\n<td class=\"xl63\" style=\"width: 225px;text-align: right\" align=\"right\">0.12<\/td>\n<td class=\"xl63\" style=\"width: 220px;text-align: right\" align=\"right\">0.12<\/td>\n<\/tr>\n<tr style=\"height: 15.0pt\">\n<td class=\"xl63\" style=\"height: 15pt;width: 157px;text-align: center\" height=\"15\">Arion<\/td>\n<td class=\"xl63\" style=\"width: 252.188px;text-align: center\">2020 &#8211;<\/td>\n<td class=\"xl63\" style=\"width: 212.812px;text-align: center\">Lenovo DSS<\/td>\n<td class=\"xl63\" style=\"width: 225px;text-align: right\" align=\"right\">16<\/td>\n<td class=\"xl63\" style=\"width: 220px;text-align: right\" align=\"right\">11.2<\/td>\n<\/tr>\n<tr style=\"height: 15.0pt\">\n<td style=\"height: 15pt;width: 157px;text-align: center\" height=\"15\">Arion<\/td>\n<td style=\"width: 252.188px;text-align: center\">2021 &#8211;<\/td>\n<td class=\"xl63\" style=\"width: 212.812px;text-align: center\">Lenovo DSS<\/td>\n<td class=\"xl63\" style=\"width: 225px;text-align: right\" align=\"right\">16<\/td>\n<td class=\"xl63\" style=\"width: 220px;text-align: right\" align=\"right\">11.2<\/td>\n<\/tr>\n<tr style=\"height: 15.0pt\">\n<td style=\"height: 15pt;width: 157px\" height=\"15\">\u00a0<\/td>\n<td style=\"width: 252.188px\">\u00a0<\/td>\n<td class=\"xl63\" style=\"width: 212.812px;text-align: right\"><strong>Total<\/strong><\/td>\n<td class=\"xl63\" style=\"width: 225px;text-align: right\" align=\"right\"><strong>46<\/strong><\/td>\n<td class=\"xl63\" style=\"width: 220px;text-align: right\" align=\"right\"><strong>32<\/strong><\/td>\n<\/tr>\n<\/tbody>\n<\/table>\n<p>&nbsp;<\/p>\n<p>&nbsp;<\/p>\n<p>[\/et_pb_text][et_pb_text admin_label=&#8221;Acknowledgement&#8221; _builder_version=&#8221;4.16&#8243; header_font=&#8221;|600|||||||&#8221; header_text_color=&#8221;#221f72&#8243; header_2_text_color=&#8221;#221f72&#8243; header_2_font_size=&#8221;24px&#8221; background_size=&#8221;initial&#8221; background_position=&#8221;top_left&#8221; background_repeat=&#8221;repeat&#8221; custom_margin=&#8221;20px||||false|false&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<h2>Acknowledging Mount Sinai in Your Work<\/h2>\n<p>This work was supported by grant UL1TR004419 from the National Center for Advancing Translational Sciences, National Institutes of Health.<\/p>\n<p>Using the S10 BODE and CATS Minerva partitions requires acknowledgements of support by NIH in your publications. To assist, we have provided exact wording of acknowledgements required by NIH for use in publications and other work. <a href=\"https:\/\/labs.icahn.mssm.edu\/minervalab\/mount-sinai-data-warehouse-msdw\/acknowledge-scientific-computing-at-mount-sinai\/\">Click here to learn how to acknowledge Minerva and NIH support in your publications<\/a>.<\/p>\n<p>&nbsp;<\/p>\n<p>[\/et_pb_text][\/et_pb_column][\/et_pb_row][\/et_pb_section][et_pb_section bb_built=&#8221;1&#8243; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221; prev_background_color=&#8221;#000000&#8243;][et_pb_row _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_column type=&#8221;4_4&#8243; _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;][et_pb_image _builder_version=&#8221;4.16&#8243; _module_preset=&#8221;default&#8221; global_colors_info=&#8221;{}&#8221;]<\/p>\n<p>[\/et_pb_image][\/et_pb_column][\/et_pb_row][\/et_pb_section]<\/p>\n","protected":false},"excerpt":{"rendered":"<p>Scientific Computing and Data\u00a0\/\u00a0High Performance Computing \/ Hardware and Technical Specs Hardware and Technical Specs The Minerva supercomputer is maintained by Scientific Computing and Data (SCD) at the Icahn School of Medicine, Mount Sinai. Minerva was created in 2012 and has been upgraded several times (most recently in Nov. 2024 and Feb. 2026) and has [&hellip;]<\/p>\n","protected":false},"author":624,"featured_media":0,"parent":48,"menu_order":0,"comment_status":"closed","ping_status":"closed","template":"","meta":{"_et_pb_use_builder":"on","_et_pb_old_content":"","_et_gb_content_width":"","footnotes":""},"class_list":["post-8526","page","type-page","status-publish","hentry"],"aioseo_notices":[],"_links":{"self":[{"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/pages\/8526","targetHints":{"allow":["GET"]}}],"collection":[{"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/pages"}],"about":[{"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/types\/page"}],"author":[{"embeddable":true,"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/users\/624"}],"replies":[{"embeddable":true,"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/comments?post=8526"}],"version-history":[{"count":75,"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/pages\/8526\/revisions"}],"predecessor-version":[{"id":13426,"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/pages\/8526\/revisions\/13426"}],"up":[{"embeddable":true,"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/pages\/48"}],"wp:attachment":[{"href":"https:\/\/labs.icahn.mssm.edu\/minervalab\/wp-json\/wp\/v2\/media?parent=8526"}],"curies":[{"name":"wp","href":"https:\/\/api.w.org\/{rel}","templated":true}]}}