NimbleSM
NimbleSM is a solid mechanics simulation code for dynamic systems
Loading...
Searching...
No Matches
nimble_kokkos::ModelData Class Reference

#include <nimble_kokkos_model_data.h>

Inheritance diagram for nimble_kokkos::ModelData:
nimble::ModelDataBase

Public Member Functions

 ModelData ()
 
 ~ModelData () override
 
int AllocateNodeData (nimble::Length length, std::string label, int num_objects) override
 Allocate data storage for a node-based quantity.
 
int GetFieldId (const std::string &field_label) const override
 Returns the field ID for a specific label.
 
void InitializeBlocks (nimble::DataManager &data_manager, const std::shared_ptr< nimble::MaterialFactoryBase > &material_factory_base) override
 Initialize the different blocks in the mesh.
 
void UpdateStates (const nimble::DataManager &data_manager) override
 Copy time state (n+1) into time state (n)
 
nimble::Viewify< 1 > GetScalarNodeData (int field_id) override
 Get view of scalar quantity defined on nodes.
 
nimble::Viewify< 2 > GetVectorNodeData (int field_id) override
 Get view of vector quantity defined on nodes.
 
void ComputeLumpedMass (nimble::DataManager &data_manager) override
 Compute the lumped mass.
 
void InitializeExodusOutput (nimble::DataManager &data_manager) override
 
void WriteExodusOutput (nimble::DataManager &data_manager, double time_current) override
 Write output of simulation in Exodus format.
 
void ComputeInternalForce (nimble::DataManager &data_manager, double time_previous, double time_current, bool is_output_step, const nimble::Viewify< 2 > &displacement, nimble::Viewify< 2 > &force) override
 Compute the internal force.
 
void UpdateWithNewVelocity (nimble::DataManager &data_manager, double dt) override
 Update model with new velocity.
 
void UpdateWithNewDisplacement (nimble::DataManager &data_manager, double dt) override
 Update model with new displacement.
 
int AllocateElementData (int block_id, nimble::Length length, std::string label, int num_objects)
 
int AllocateIntegrationPointData (int block_id, nimble::Length length, std::string label, int num_objects, std::vector< double > initial_value=std::vector< double >())
 
std::vector< int > GetBlockIds () const
 
std::vector< std::string > GetScalarNodeDataLabels () const
 
std::vector< std::string > GetVectorNodeDataLabels () const
 
std::vector< std::string > GetSymmetricTensorIntegrationPointDataLabels (int block_id) const
 
std::vector< std::string > GetFullTensorIntegrationPointDataLabels (int block_id) const
 
HostScalarNodeView GetHostScalarNodeData (int field_id)
 
HostVectorNodeView GetHostVectorNodeData (int field_id)
 
HostSymTensorIntPtView GetHostSymTensorIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
HostFullTensorIntPtView GetHostFullTensorIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
HostScalarElemView GetHostScalarElementData (int block_id, int field_id)
 
HostSymTensorElemView GetHostSymTensorElementData (int block_id, int field_id)
 
HostFullTensorElemView GetHostFullTensorElementData (int block_id, int field_id)
 
DeviceScalarNodeView GetDeviceScalarNodeData (int field_id)
 
DeviceVectorNodeView GetDeviceVectorNodeData (int field_id)
 
DeviceSymTensorIntPtView GetDeviceSymTensorIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
DeviceFullTensorIntPtView GetDeviceFullTensorIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
DeviceScalarIntPtView GetDeviceScalarIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
DeviceVectorIntPtView GetDeviceVectorIntegrationPointData (int block_id, int field_id, nimble::Step step)
 
DeviceScalarElemView GetDeviceScalarElementData (int block_id, int field_id)
 
DeviceSymTensorElemView GetDeviceSymTensorElementData (int block_id, int field_id)
 
DeviceFullTensorElemView GetDeviceFullTensorElementData (int block_id, int field_id)
 
DeviceScalarNodeGatheredView GatherScalarNodeData (int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, DeviceScalarNodeGatheredView gathered_view_d)
 
DeviceVectorNodeGatheredView GatherVectorNodeData (int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, DeviceVectorNodeGatheredView gathered_view_d)
 
void ScatterScalarNodeData (int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, const DeviceScalarNodeGatheredView &gathered_view_d)
 
void ScatterVectorNodeData (int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, const DeviceVectorNodeGatheredView &gathered_view_d)
 
void ScatterScalarNodeDataUsingKokkosScatterView (int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, const DeviceScalarNodeGatheredView &gathered_view_d)
 
- Public Member Functions inherited from nimble::ModelDataBase
 ModelDataBase ()=default
 Constructor.
 
virtual ~ModelDataBase ()=default
 Destructor.
 
int GetFieldIdChecked (const std::string &field_label) const
 
virtual void InitializeBlocks (nimble::DataManager &data_manager, const std::shared_ptr< nimble::MaterialFactoryBase > &material_factory_base)=0
 Initialize the different blocks in the mesh.
 
nimble::Viewify< 1 > GetScalarNodeData (const std::string &label)
 Get view of scalar quantity defined on nodes.
 
nimble::Viewify< 2 > GetVectorNodeData (const std::string &label)
 Get view of vector quantity defined on nodes.
 
virtual void ComputeExternalForce (nimble::DataManager &data_manager, double time_previous, double time_current, bool is_output_step)
 Compute the external force.
 
virtual void ApplyInitialConditions (nimble::DataManager &data_manager)
 Apply initial conditions.
 
virtual void ApplyKinematicConditions (nimble::DataManager &data_manager, double time_current, double time_previous)
 Apply kinematic conditions.
 
int GetDimension () const
 Get the spatial dimension.
 
void SetCriticalTimeStep (double time_step)
 Set the critical time step.
 
void SetDimension (int dim)
 Set spatial dimension.
 
void SetReferenceCoordinates (const nimble::GenesisMesh &mesh)
 Set reference coordinates.
 
double GetCriticalTimeStep () const
 Get the critical time step.
 
const std::vector< std::string > & GetNodeDataLabelsForOutput () const
 
const std::map< int, std::vector< std::string > > & GetElementDataLabels () const
 
const std::map< int, std::vector< std::string > > & GetElementDataLabelsForOutput () const
 
const std::map< int, std::vector< std::string > > & GetDerivedElementDataLabelsForOutput () const
 

Protected Types

using Data = std::unique_ptr<FieldBase>
 

Protected Member Functions

void InitializeGatheredVectors (const nimble::GenesisMesh &mesh_)
 
void InitializeBlockData (nimble::DataManager &data_manager)
 Initialize block data for material information.
 
template<FieldType ft>
Field< ft >::View GetDeviceElementData (int block_id, int field_id)
 
template<FieldType ft>
Field< ft >::View GetDeviceIntPointData (int block_id, int field_id, nimble::Step step)
 

Protected Attributes

std::map< std::string, int > field_label_to_field_id_map_
 
std::map< int, nimble_kokkos::Blockblocks_
 Blocks.
 
std::vector< Datahost_node_data_
 
std::vector< Datadevice_node_data_
 
std::map< int, int > field_id_to_host_node_data_index_
 
std::map< int, int > field_id_to_device_node_data_index_
 
std::map< int, int > block_id_to_element_data_index_
 
std::vector< std::vector< Data > > host_element_data_
 
std::vector< std::vector< Data > > device_element_data_
 
std::vector< std::map< int, int > > field_id_to_host_element_data_index_
 
std::vector< std::map< int, int > > field_id_to_device_element_data_index_
 
std::map< int, int > block_id_to_integration_point_data_index_
 
std::vector< std::vector< Data > > host_integration_point_data_step_n_
 
std::vector< std::vector< Data > > host_integration_point_data_step_np1_
 
std::vector< std::vector< Data > > device_integration_point_data_step_n_
 
std::vector< std::vector< Data > > device_integration_point_data_step_np1_
 
std::vector< std::map< int, int > > field_id_to_host_integration_point_data_index_
 
std::vector< std::map< int, int > > field_id_to_device_integration_point_data_index_
 
std::unique_ptr< nimble_kokkos::ExodusOutputManagerexodus_output_manager_
 
std::vector< nimble_kokkos::DeviceVectorNodeGatheredViewgathered_reference_coordinate_d
 
std::vector< nimble_kokkos::DeviceVectorNodeGatheredViewgathered_displacement_d
 
std::vector< nimble_kokkos::DeviceVectorNodeGatheredViewgathered_internal_force_d
 
std::vector< nimble_kokkos::DeviceVectorNodeGatheredViewgathered_contact_force_d
 
nimble_kokkos::HostVectorNodeView displacement_h_
 
nimble_kokkos::DeviceVectorNodeView displacement_d_
 
nimble_kokkos::HostVectorNodeView velocity_h_
 
nimble_kokkos::DeviceVectorNodeView velocity_d_
 
std::vector< nimble::BlockDatablock_data_
 
- Protected Attributes inherited from nimble::ModelDataBase
int dim_ = 3
 Problem dimension, either 2 or 3.
 
double critical_time_step_ = 0.0
 Critical time step.
 
std::vector< std::string > output_node_component_labels_
 Output labels for node data that will be written to disk.
 
std::map< int, std::vector< std::string > > element_component_labels_
 
std::map< int, std::vector< std::string > > output_element_component_labels_
 Output labels for element data that will be written to disk.
 
std::map< int, std::vector< std::string > > derived_output_element_data_labels_
 Output labels for derived element data that will be written to disk.
 

Member Typedef Documentation

◆ Data

using nimble_kokkos::ModelData::Data = std::unique_ptr<FieldBase>
protected

Constructor & Destructor Documentation

◆ ModelData()

nimble_kokkos::ModelData::ModelData ( )
592: exodus_output_manager_(std::unique_ptr<ExodusOutputManager>(new ExodusOutputManager())) {}
std::unique_ptr< nimble_kokkos::ExodusOutputManager > exodus_output_manager_
Definition nimble_kokkos_model_data.h:340

◆ ~ModelData()

nimble_kokkos::ModelData::~ModelData ( )
override
594{}

Member Function Documentation

◆ AllocateElementData()

int nimble_kokkos::ModelData::AllocateElementData ( int block_id,
nimble::Length length,
std::string label,
int num_objects )
646{
647 int field_id;
648 auto it = field_label_to_field_id_map_.find(label);
649 if (it == field_label_to_field_id_map_.end()) {
650 field_id = field_label_to_field_id_map_.size();
651 field_label_to_field_id_map_[label] = field_id;
652 } else {
653 field_id = it->second;
654 }
655
658 host_element_data_.emplace_back();
659 device_element_data_.emplace_back();
662 }
663 int block_index = block_id_to_element_data_index_.at(block_id);
664
665 if (length == nimble::SCALAR) {
666 device_element_data_.at(block_index).emplace_back(new Field<FieldType::DeviceScalarElem>(label, num_objects));
667 } else if (length == nimble::SYMMETRIC_TENSOR) {
668 device_element_data_.at(block_index).emplace_back(new Field<FieldType::DeviceSymTensorElem>(label, num_objects));
669 } else if (length == nimble::FULL_TENSOR) {
670 device_element_data_.at(block_index).emplace_back(new Field<FieldType::DeviceFullTensorElem>(label, num_objects));
671 } else {
672 throw std::invalid_argument(
673 "\nError: Invalid device data length in "
674 "nimble_kokkos::ModelData::AllocateElementData().\n");
675 }
676
677 field_id_to_device_element_data_index_.at(block_index)[field_id] = device_element_data_.at(block_index).size() - 1;
678
679 FieldBase* d_field = device_element_data_.at(block_index).back().get();
680
681 if (d_field->type() == FieldType::DeviceScalarElem) {
682 auto field = dynamic_cast<Field<FieldType::DeviceScalarElem>*>(d_field);
683 Field<FieldType::DeviceScalarElem>::View d_view = field->data();
684 auto h_view = Kokkos::create_mirror_view(d_view);
685 host_element_data_.at(block_index).emplace_back(new Field<FieldType::HostScalarElem>(h_view));
686 } else if (d_field->type() == FieldType::DeviceSymTensorElem) {
687 auto field = dynamic_cast<Field<FieldType::DeviceSymTensorElem>*>(d_field);
688 Field<FieldType::DeviceSymTensorElem>::View d_view = field->data();
689 auto h_view = Kokkos::create_mirror_view(d_view);
690 host_element_data_.at(block_index).emplace_back(new Field<FieldType::HostSymTensorElem>(h_view));
691 } else if (d_field->type() == FieldType::DeviceFullTensorElem) {
692 auto field = dynamic_cast<Field<FieldType::DeviceFullTensorElem>*>(d_field);
694 auto h_view = Kokkos::create_mirror_view(d_view);
695 host_element_data_.at(block_index).emplace_back(new Field<FieldType::HostFullTensorElem>(h_view));
696 } else {
697 throw std::invalid_argument(
698 "\nError: Invalid host data length in "
699 "nimble_kokkos::ModelData::AllocateElementData().\n");
700 }
701
702 field_id_to_host_element_data_index_.at(block_index)[field_id] = host_element_data_.at(block_index).size() - 1;
703
704 return field_id;
705}
Kokkos::View< double *[9], kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:501
Kokkos::View< double *, kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:452
Kokkos::View< double *[6], kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:550
std::vector< std::vector< Data > > host_element_data_
Definition nimble_kokkos_model_data.h:327
std::vector< std::map< int, int > > field_id_to_host_element_data_index_
Definition nimble_kokkos_model_data.h:329
std::vector< std::vector< Data > > device_element_data_
Definition nimble_kokkos_model_data.h:328
std::map< int, int > block_id_to_element_data_index_
Definition nimble_kokkos_model_data.h:326
std::vector< std::map< int, int > > field_id_to_device_element_data_index_
Definition nimble_kokkos_model_data.h:330
std::map< std::string, int > field_label_to_field_id_map_
Definition nimble_kokkos_model_data.h:316
@ DeviceFullTensorElem
Definition nimble_kokkos_defs.h:105
@ DeviceScalarElem
Definition nimble_kokkos_defs.h:101
@ DeviceSymTensorElem
Definition nimble_kokkos_defs.h:103
@ SCALAR
Definition nimble_data_utils.h:88
@ SYMMETRIC_TENSOR
Definition nimble_data_utils.h:90
@ FULL_TENSOR
Definition nimble_data_utils.h:91

◆ AllocateIntegrationPointData()

int nimble_kokkos::ModelData::AllocateIntegrationPointData ( int block_id,
nimble::Length length,
std::string label,
int num_objects,
std::vector< double > initial_value = std::vector<double>() )
714{
715 bool set_initial_value = false;
716 if (!initial_value.empty()) set_initial_value = true;
717
718 int field_id;
719 auto it = field_label_to_field_id_map_.find(label);
720 if (it == field_label_to_field_id_map_.end()) {
721 field_id = field_label_to_field_id_map_.size();
722 field_label_to_field_id_map_[label] = field_id;
723 } else {
724 field_id = it->second;
725 }
726
735 }
736 int block_index = block_id_to_integration_point_data_index_.at(block_id);
737
738 if (length == nimble::SCALAR) {
740 .emplace_back(new Field<FieldType::DeviceScalarNode>(label, num_objects));
742 .emplace_back(new Field<FieldType::DeviceScalarNode>(label, num_objects));
743 } else if (length == nimble::VECTOR) {
745 .emplace_back(new Field<FieldType::DeviceVectorNode>(label, num_objects));
747 .emplace_back(new Field<FieldType::DeviceVectorNode>(label, num_objects));
748 } else if (length == nimble::SYMMETRIC_TENSOR) {
750 .emplace_back(new Field<FieldType::DeviceSymTensorIntPt>(label, num_objects));
752 .emplace_back(new Field<FieldType::DeviceSymTensorIntPt>(label, num_objects));
753 } else if (length == nimble::FULL_TENSOR) {
755 .emplace_back(new Field<FieldType::DeviceFullTensorIntPt>(label, num_objects));
757 .emplace_back(new Field<FieldType::DeviceFullTensorIntPt>(label, num_objects));
758 } else {
759 throw std::invalid_argument(
760 "\nError: Invalid device data length in "
761 "nimble_kokkos::ModelData::AllocateIntegrationPointData().\n");
762 }
763
764 field_id_to_device_integration_point_data_index_.at(block_index)[field_id] =
765 device_integration_point_data_step_n_.at(block_index).size() - 1;
766
767 FieldBase* d_field_step_n = device_integration_point_data_step_n_.at(block_index).back().get();
768 FieldBase* d_field_step_np1 = device_integration_point_data_step_np1_.at(block_index).back().get();
769
770 if (d_field_step_n->type() == FieldType::DeviceScalarNode) {
771 auto field_step_n = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(d_field_step_n);
772 Field<FieldType::DeviceScalarNode>::View d_view_step_n = field_step_n->data();
773 auto h_view_step_n = Kokkos::create_mirror_view(d_view_step_n);
775 .emplace_back(new Field<FieldType::HostScalarNode>(h_view_step_n));
776
777 auto field_step_np1 = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(d_field_step_np1);
778 Field<FieldType::DeviceScalarNode>::View d_view_step_np1 = field_step_np1->data();
779 auto h_view_step_np1 = Kokkos::create_mirror_view(d_view_step_np1);
781 .emplace_back(new Field<FieldType::HostScalarNode>(h_view_step_np1));
782
783 if (set_initial_value) {
784 int num_elem = h_view_step_n.extent(0);
785 for (int i_elem = 0; i_elem < num_elem; ++i_elem) {
786 h_view_step_n(i_elem) = initial_value.at(0);
787 h_view_step_np1(i_elem) = initial_value.at(0);
788 }
789 Kokkos::deep_copy(d_view_step_n, h_view_step_n);
790 Kokkos::deep_copy(d_view_step_np1, h_view_step_np1);
791 }
792 } else if (d_field_step_n->type() == FieldType::DeviceVectorNode) {
793 auto field_step_n = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(d_field_step_n);
794 Field<FieldType::DeviceVectorNode>::View d_view_step_n = field_step_n->data();
795 auto h_view_step_n = Kokkos::create_mirror_view(d_view_step_n);
797 .emplace_back(new Field<FieldType::HostVectorNode>(h_view_step_n));
798
799 auto field_step_np1 = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(d_field_step_np1);
800 Field<FieldType::DeviceVectorNode>::View d_view_step_np1 = field_step_np1->data();
801 auto h_view_step_np1 = Kokkos::create_mirror_view(d_view_step_np1);
803 .emplace_back(new Field<FieldType::HostVectorNode>(h_view_step_np1));
804
805 if (set_initial_value) {
806 int num_elem = h_view_step_n.extent(0);
807 int num_entries = 3;
808 for (int i_elem = 0; i_elem < num_elem; ++i_elem) {
809 for (int i_entry = 0; i_entry < num_entries; ++i_entry) {
810 h_view_step_n(i_elem, i_entry) = initial_value.at(i_entry);
811 h_view_step_np1(i_elem, i_entry) = initial_value.at(i_entry);
812 }
813 }
814 Kokkos::deep_copy(d_view_step_n, h_view_step_n);
815 Kokkos::deep_copy(d_view_step_np1, h_view_step_np1);
816 }
817 } else if (d_field_step_n->type() == FieldType::DeviceSymTensorIntPt) {
818 auto field_step_n = dynamic_cast<Field<FieldType::DeviceSymTensorIntPt>*>(d_field_step_n);
819 Field<FieldType::DeviceSymTensorIntPt>::View d_view_step_n = field_step_n->data();
820 auto h_view_step_n = Kokkos::create_mirror_view(d_view_step_n);
822 .emplace_back(new Field<FieldType::HostSymTensorIntPt>(h_view_step_n));
823
824 auto field_step_np1 = dynamic_cast<Field<FieldType::DeviceSymTensorIntPt>*>(d_field_step_np1);
825 Field<FieldType::DeviceSymTensorIntPt>::View d_view_step_np1 = field_step_np1->data();
826 auto h_view_step_np1 = Kokkos::create_mirror_view(d_view_step_np1);
828 .emplace_back(new Field<FieldType::HostSymTensorIntPt>(h_view_step_np1));
829
830 if (set_initial_value) {
831 int num_elem = h_view_step_n.extent(0);
832 int num_int_pt = h_view_step_n.extent(1);
833 int num_entries = 6;
834 for (int i_elem = 0; i_elem < num_elem; ++i_elem) {
835 for (int i_int_pt = 0; i_int_pt < num_int_pt; ++i_int_pt) {
836 for (int i_entry = 0; i_entry < num_entries; ++i_entry) {
837 h_view_step_n(i_elem, i_int_pt, i_entry) = initial_value.at(i_entry);
838 h_view_step_np1(i_elem, i_int_pt, i_entry) = initial_value.at(i_entry);
839 }
840 }
841 }
842 Kokkos::deep_copy(d_view_step_n, h_view_step_n);
843 Kokkos::deep_copy(d_view_step_np1, h_view_step_np1);
844 }
845 } else if (d_field_step_n->type() == FieldType::DeviceFullTensorIntPt) {
846 auto field_step_n = dynamic_cast<Field<FieldType::DeviceFullTensorIntPt>*>(d_field_step_n);
847 Field<FieldType::DeviceFullTensorIntPt>::View d_view_step_n = field_step_n->data();
848 auto h_view_step_n = Kokkos::create_mirror_view(d_view_step_n);
850 .emplace_back(new Field<FieldType::HostFullTensorIntPt>(h_view_step_n));
851
852 auto field_step_np1 = dynamic_cast<Field<FieldType::DeviceFullTensorIntPt>*>(d_field_step_np1);
853 Field<FieldType::DeviceFullTensorIntPt>::View d_view_step_np1 = field_step_np1->data();
854 auto h_view_step_np1 = Kokkos::create_mirror_view(d_view_step_np1);
856 .emplace_back(new Field<FieldType::HostFullTensorIntPt>(h_view_step_np1));
857
858 if (set_initial_value) {
859 int num_elem = h_view_step_n.extent(0);
860 int num_int_pt = h_view_step_n.extent(1);
861 int num_entries = 9;
862 for (int i_elem = 0; i_elem < num_elem; ++i_elem) {
863 for (int i_int_pt = 0; i_int_pt < num_int_pt; ++i_int_pt) {
864 for (int i_entry = 0; i_entry < num_entries; ++i_entry) {
865 h_view_step_n(i_elem, i_int_pt, i_entry) = initial_value.at(i_entry);
866 h_view_step_np1(i_elem, i_int_pt, i_entry) = initial_value.at(i_entry);
867 }
868 }
869 }
870 Kokkos::deep_copy(d_view_step_n, h_view_step_n);
871 Kokkos::deep_copy(d_view_step_np1, h_view_step_np1);
872 }
873 } else {
874 throw std::invalid_argument(
875 "\nError: Invalid host data length in "
876 "nimble_kokkos::ModelData::AllocateElementData().\n");
877 }
878
879 field_id_to_host_integration_point_data_index_.at(block_index)[field_id] =
880 host_integration_point_data_step_n_.at(block_index).size() - 1;
881
882 return field_id;
883}
Kokkos::View< double *[NUM_INTEGRATION_POINTS_IN_HEX][9], kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:351
Kokkos::View< double *, kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:200
Kokkos::View< double *[NUM_INTEGRATION_POINTS_IN_HEX][6], kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:301
Kokkos::View< double *[3], kokkos_layout, kokkos_device > View
Definition nimble_kokkos_defs.h:250
std::map< int, int > block_id_to_integration_point_data_index_
Definition nimble_kokkos_model_data.h:332
std::vector< std::map< int, int > > field_id_to_host_integration_point_data_index_
Definition nimble_kokkos_model_data.h:337
std::vector< std::vector< Data > > host_integration_point_data_step_n_
Definition nimble_kokkos_model_data.h:333
std::vector< std::vector< Data > > host_integration_point_data_step_np1_
Definition nimble_kokkos_model_data.h:334
std::vector< std::map< int, int > > field_id_to_device_integration_point_data_index_
Definition nimble_kokkos_model_data.h:338
std::vector< std::vector< Data > > device_integration_point_data_step_np1_
Definition nimble_kokkos_model_data.h:336
std::vector< std::vector< Data > > device_integration_point_data_step_n_
Definition nimble_kokkos_model_data.h:335
@ DeviceFullTensorIntPt
Definition nimble_kokkos_defs.h:97
@ DeviceScalarNode
Definition nimble_kokkos_defs.h:91
@ DeviceVectorNode
Definition nimble_kokkos_defs.h:93
@ DeviceSymTensorIntPt
Definition nimble_kokkos_defs.h:95
@ VECTOR
Definition nimble_data_utils.h:89

◆ AllocateNodeData()

int nimble_kokkos::ModelData::AllocateNodeData ( nimble::Length length,
std::string label,
int num_objects )
overridevirtual

Allocate data storage for a node-based quantity.

Parameters
length
label
num_objects
Returns
Field ID for the data allocated

Implements nimble::ModelDataBase.

598{
599 int field_id;
600 auto it = field_label_to_field_id_map_.find(label);
601 if (it == field_label_to_field_id_map_.end()) {
602 field_id = field_label_to_field_id_map_.size();
603 field_label_to_field_id_map_[label] = field_id;
604 } else {
605 field_id = it->second;
606 }
607
608 if (length == nimble::SCALAR) {
609 // device_node_data_ is of type std::vector< std::unique_ptr< FieldBase > >
610 device_node_data_.emplace_back(new Field<FieldType::DeviceScalarNode>(label, num_objects));
611 } else if (length == nimble::VECTOR) {
612 device_node_data_.emplace_back(new Field<FieldType::DeviceVectorNode>(label, num_objects));
613 } else {
614 throw std::invalid_argument(
615 "\nError: Invalid device data length in "
616 "nimble_kokkos::ModelData::AllocateNodeData().\n");
617 }
618
620
621 FieldBase* d_field = device_node_data_.back().get();
622
623 if (d_field->type() == FieldType::DeviceScalarNode) {
624 auto field = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(d_field);
625 Field<FieldType::DeviceScalarNode>::View d_view = field->data();
626 auto h_view = Kokkos::create_mirror_view(d_view);
627 host_node_data_.emplace_back(new Field<FieldType::HostScalarNode>(h_view));
628 } else if (d_field->type() == FieldType::DeviceVectorNode) {
629 auto field = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(d_field);
630 Field<FieldType::DeviceVectorNode>::View d_view = field->data();
631 auto h_view = Kokkos::create_mirror_view(d_view);
632 host_node_data_.emplace_back(new Field<FieldType::HostVectorNode>(h_view));
633 } else {
634 throw std::invalid_argument(
635 "\nError: Invalid host data length in "
636 "nimble_kokkos::ModelData::AllocateNodeData().\n");
637 }
638
640
641 return field_id;
642}
std::map< int, int > field_id_to_host_node_data_index_
Definition nimble_kokkos_model_data.h:323
std::map< int, int > field_id_to_device_node_data_index_
Definition nimble_kokkos_model_data.h:324
std::vector< Data > host_node_data_
Definition nimble_kokkos_model_data.h:321
std::vector< Data > device_node_data_
Definition nimble_kokkos_model_data.h:322

◆ ComputeInternalForce()

void nimble_kokkos::ModelData::ComputeInternalForce ( nimble::DataManager & data_manager,
double time_previous,
double time_current,
bool is_output_step,
const nimble::Viewify< 2 > & displacement,
nimble::Viewify< 2 > & force )
overridevirtual

Compute the internal force.

Parameters
[in]data_manager
[in]time_previous
[in]time_current
[in]is_output_step
[in]displacement
[out]internal_forceOutput for internal force

Reimplemented from nimble::ModelDataBase.

1162{
1163 //
1164 // This version does not use the Viewify objects displacement and force
1165 // It may need to be updated for quasi-static simulations
1166 //
1167
1168 const auto& mesh = data_manager.GetMesh();
1169 const auto& field_ids = data_manager.GetFieldIDs();
1170
1171 auto block_material_interface_factory = data_manager.GetBlockMaterialInterfaceFactory();
1172
1173 nimble_kokkos::DeviceVectorNodeView internal_force_h = GetHostVectorNodeData(field_ids.internal_force);
1174 nimble_kokkos::DeviceVectorNodeView internal_force_d = GetDeviceVectorNodeData(field_ids.internal_force);
1175 Kokkos::deep_copy(internal_force_d, (double)(0.0));
1176
1177 // Compute element-level kinematics
1178 constexpr int mpi_vector_dim = 3;
1179
1180 int block_index = 0;
1181 for (auto& block_it : blocks_) {
1182 //
1183 const int block_id = block_it.first;
1184 const int num_elem_in_block = mesh.GetNumElementsInBlock(block_id);
1185 const int num_nodes_per_elem = mesh.GetNumNodesPerElement(block_id);
1186
1187 nimble_kokkos::Block& block = block_it.second;
1188 nimble::Element* element_d = block.GetDeviceElement();
1189
1190 auto elem_conn_d = block.GetDeviceElementConnectivityView();
1191 auto gathered_reference_coordinate_block_d = gathered_reference_coordinate_d.at(block_index);
1192 auto gathered_displacement_block_d = gathered_displacement_d.at(block_index);
1193 auto gathered_internal_force_block_d = gathered_internal_force_d.at(block_index);
1194
1196 field_ids.reference_coordinates,
1197 num_elem_in_block,
1198 num_nodes_per_elem,
1199 elem_conn_d,
1200 gathered_reference_coordinate_block_d);
1201
1203 field_ids.displacement, num_elem_in_block, num_nodes_per_elem, elem_conn_d, gathered_displacement_block_d);
1204
1205 auto deformation_gradient_step_np1_d =
1206 GetDeviceFullTensorIntegrationPointData(block_id, field_ids.deformation_gradient, nimble::STEP_NP1);
1207
1208 //
1209 // COMPUTE DEFORMATION GRADIENTS
1210 //
1211 Kokkos::parallel_for(
1212 "Deformation Gradient", num_elem_in_block, KOKKOS_LAMBDA(const int i_elem) {
1213 nimble_kokkos::DeviceVectorNodeGatheredSubView element_reference_coordinate_d =
1214 Kokkos::subview(gathered_reference_coordinate_block_d, i_elem, Kokkos::ALL(), Kokkos::ALL());
1215 nimble_kokkos::DeviceVectorNodeGatheredSubView element_displacement_d =
1216 Kokkos::subview(gathered_displacement_block_d, i_elem, Kokkos::ALL(), Kokkos::ALL());
1217 nimble_kokkos::DeviceFullTensorIntPtSubView element_deformation_gradient_step_np1_d =
1218 Kokkos::subview(deformation_gradient_step_np1_d, i_elem, Kokkos::ALL(), Kokkos::ALL());
1219 element_d->ComputeDeformationGradients(
1220 element_reference_coordinate_d, element_displacement_d, element_deformation_gradient_step_np1_d);
1221 });
1222
1223 //
1224 // Insert the update for the state variables
1225 //
1226
1227 block_index += 1;
1228 }
1229
1230 if (block_material_interface_factory) {
1231 auto block_material_interface =
1232 block_material_interface_factory->create(time_previous, time_current, field_ids, block_data_, this);
1233 block_material_interface->ComputeStress();
1234 }
1235
1236 //
1237 // Stress divergence
1238 //
1239 block_index = 0;
1240 for (auto& block_it : blocks_) {
1241 const int block_id = block_it.first;
1242 const int num_elem_in_block = mesh.GetNumElementsInBlock(block_id);
1243 const int num_nodes_per_elem = mesh.GetNumNodesPerElement(block_id);
1244
1245 nimble_kokkos::Block& block = block_it.second;
1246 nimble::Element* element_d = block.GetDeviceElement();
1247
1249 nimble_kokkos::DeviceVectorNodeGatheredView gathered_reference_coordinate_block_d =
1250 gathered_reference_coordinate_d.at(block_index);
1251 nimble_kokkos::DeviceVectorNodeGatheredView gathered_displacement_block_d = gathered_displacement_d.at(block_index);
1252 nimble_kokkos::DeviceVectorNodeGatheredView gathered_internal_force_block_d =
1253 gathered_internal_force_d.at(block_index);
1254
1255 nimble_kokkos::DeviceSymTensorIntPtView stress_step_np1_d =
1256 GetDeviceSymTensorIntegrationPointData(block_id, field_ids.stress, nimble::STEP_NP1);
1257
1258 // COMPUTE NODAL FORCES
1259 Kokkos::parallel_for(
1260 "Force", num_elem_in_block, KOKKOS_LAMBDA(const int i_elem) {
1261 nimble_kokkos::DeviceVectorNodeGatheredSubView element_reference_coordinate_d =
1262 Kokkos::subview(gathered_reference_coordinate_block_d, i_elem, Kokkos::ALL, Kokkos::ALL);
1263 nimble_kokkos::DeviceVectorNodeGatheredSubView element_displacement_d =
1264 Kokkos::subview(gathered_displacement_block_d, i_elem, Kokkos::ALL, Kokkos::ALL);
1265 nimble_kokkos::DeviceSymTensorIntPtSubView element_stress_step_np1_d =
1266 Kokkos::subview(stress_step_np1_d, i_elem, Kokkos::ALL, Kokkos::ALL);
1267 nimble_kokkos::DeviceVectorNodeGatheredSubView element_internal_force_d =
1268 Kokkos::subview(gathered_internal_force_block_d, i_elem, Kokkos::ALL, Kokkos::ALL);
1269 element_d->ComputeNodalForces(
1270 element_reference_coordinate_d,
1271 element_displacement_d,
1272 element_stress_step_np1_d,
1273 element_internal_force_d);
1274 });
1275
1277 field_ids.internal_force, num_elem_in_block, num_nodes_per_elem, elem_conn_d, gathered_internal_force_block_d);
1278
1279 block_index += 1;
1280 } // loop over blocks
1281
1282 Kokkos::deep_copy(internal_force_h, internal_force_d);
1283
1284 auto myVectorCommunicator = data_manager.GetVectorCommunicator();
1285 myVectorCommunicator->VectorReduction(mpi_vector_dim, internal_force_h);
1286}
const std::shared_ptr< nimble::BlockMaterialInterfaceFactoryBase > & GetBlockMaterialInterfaceFactory() const
Return shared pointer to BlockMaterialInterfaceFactoryBase object.
Definition nimble_data_manager.cc:193
const nimble::GenesisMesh & GetMesh() const
Return constant reference to mesh.
Definition nimble_data_manager.h:110
std::shared_ptr< nimble::VectorCommunicator > GetVectorCommunicator()
Return shared pointer to VectorCommunicator objet.
Definition nimble_data_manager.h:146
const nimble::FieldIds & GetFieldIDs() const
Return a const reference to the field IDs.
Definition nimble_data_manager.h:128
virtual void ComputeNodalForces(const double *node_current_coords, const double *int_pt_stresses, double *node_forces)=0
Virtual function to compute the nodal forces.
virtual void ComputeDeformationGradients(const double *node_reference_coords, const double *node_current_coords, double *deformation_gradients) const =0
Virtual function to compute the deformation gradients.
DeviceElementConnectivityView & GetDeviceElementConnectivityView()
Definition nimble_kokkos_block.h:122
nimble::Element * GetDeviceElement()
Definition nimble_kokkos_block.h:104
DeviceFullTensorIntPtView GetDeviceFullTensorIntegrationPointData(int block_id, int field_id, nimble::Step step)
Definition nimble_kokkos_model_data.cc:1580
DeviceVectorNodeView GetDeviceVectorNodeData(int field_id)
Definition nimble_kokkos_model_data.cc:1565
HostVectorNodeView GetHostVectorNodeData(int field_id)
Definition nimble_kokkos_model_data.cc:1457
std::vector< nimble_kokkos::DeviceVectorNodeGatheredView > gathered_reference_coordinate_d
Definition nimble_kokkos_model_data.h:343
std::vector< nimble_kokkos::DeviceVectorNodeGatheredView > gathered_internal_force_d
Definition nimble_kokkos_model_data.h:345
DeviceSymTensorIntPtView GetDeviceSymTensorIntegrationPointData(int block_id, int field_id, nimble::Step step)
Definition nimble_kokkos_model_data.cc:1574
std::vector< nimble_kokkos::DeviceVectorNodeGatheredView > gathered_displacement_d
Definition nimble_kokkos_model_data.h:344
void ScatterVectorNodeData(int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, const DeviceVectorNodeGatheredView &gathered_view_d)
Definition nimble_kokkos_model_data.cc:1681
std::map< int, nimble_kokkos::Block > blocks_
Blocks.
Definition nimble_kokkos_model_data.h:319
std::vector< nimble::BlockData > block_data_
Definition nimble_kokkos_model_data.h:356
DeviceVectorNodeGatheredView GatherVectorNodeData(int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, DeviceVectorNodeGatheredView gathered_view_d)
Definition nimble_kokkos_model_data.cc:1637
Field< FieldType::DeviceFullTensorIntPt >::SubView DeviceFullTensorIntPtSubView
Definition nimble_kokkos_defs.h:589
Field< FieldType::DeviceVectorNode >::View DeviceVectorNodeView
Definition nimble_kokkos_defs.h:585
Field< FieldType::DeviceSymTensorIntPt >::View DeviceSymTensorIntPtView
Definition nimble_kokkos_defs.h:591
Kokkos::View< int *, kokkos_device > DeviceElementConnectivityView
Definition nimble_kokkos_defs.h:607
Field< FieldType::DeviceVectorNode >::GatheredSubView DeviceVectorNodeGatheredSubView
Definition nimble_kokkos_defs.h:587
Field< FieldType::DeviceSymTensorIntPt >::SubView DeviceSymTensorIntPtSubView
Definition nimble_kokkos_defs.h:592
Field< FieldType::DeviceVectorNode >::GatheredView DeviceVectorNodeGatheredView
Definition nimble_kokkos_defs.h:586
@ STEP_NP1
Definition nimble_data_utils.h:99

◆ ComputeLumpedMass()

void nimble_kokkos::ModelData::ComputeLumpedMass ( nimble::DataManager & data_manager)
overridevirtual

Compute the lumped mass.

Parameters
data_managerReference to the data manager

Implements nimble::ModelDataBase.

1069{
1070 const auto& mesh_ = data_manager.GetMesh();
1071 const auto& parser_ = data_manager.GetParser();
1072 auto& field_ids_ = data_manager.GetFieldIDs();
1073 auto vector_communicator = data_manager.GetVectorCommunicator();
1074
1075 int num_nodes = static_cast<int>(mesh_.GetNumNodes());
1076 int num_blocks = static_cast<int>(mesh_.GetNumBlocks());
1077
1078 std::vector<nimble_kokkos::DeviceScalarNodeGatheredView> gathered_lumped_mass_d(
1079 num_blocks, nimble_kokkos::DeviceScalarNodeGatheredView("gathered_lumped_mass", 1));
1080 int block_index = 0;
1081 for (const auto& block_it : blocks_) {
1082 int block_id = block_it.first;
1083 int num_elem_in_block = mesh_.GetNumElementsInBlock(block_id);
1084 Kokkos::resize(gathered_lumped_mass_d.at(block_index), num_elem_in_block);
1085 block_index += 1;
1086 }
1087
1088 auto lumped_mass_h = GetHostScalarNodeData(field_ids_.lumped_mass);
1089 Kokkos::deep_copy(lumped_mass_h, (double)(0.0));
1090
1091 auto lumped_mass_d = GetDeviceScalarNodeData(field_ids_.lumped_mass);
1092 Kokkos::deep_copy(lumped_mass_d, (double)(0.0));
1093
1094 auto reference_coordinate = GetVectorNodeData("reference_coordinate");
1095 auto displacement = GetVectorNodeData("displacement");
1096
1097 critical_time_step_ = std::numeric_limits<double>::max();
1098
1099 block_index = 0;
1100 for (auto& block_it : blocks_) {
1101 int block_id = block_it.first;
1102 nimble_kokkos::Block& block = block_it.second;
1103 nimble::Element* element_d = block.GetDeviceElement();
1104 double density = block.GetDensity();
1105 int num_elem_in_block = mesh_.GetNumElementsInBlock(block_id);
1106 int num_nodes_per_elem = mesh_.GetNumNodesPerElement(block_id);
1107 int elem_conn_length = num_elem_in_block * num_nodes_per_elem;
1108 int const* elem_conn = mesh_.GetConnectivity(block_id);
1109
1110 nimble_kokkos::HostElementConnectivityView elem_conn_h("element_connectivity_h", elem_conn_length);
1111 for (int i = 0; i < elem_conn_length; i++) { elem_conn_h(i) = elem_conn[i]; }
1112 auto&& elem_conn_d = block.GetDeviceElementConnectivityView();
1113 Kokkos::resize(elem_conn_d, elem_conn_length);
1114 Kokkos::deep_copy(elem_conn_d, elem_conn_h);
1115
1116 auto gathered_reference_coordinate_block_d = gathered_reference_coordinate_d.at(block_index);
1117 auto gathered_lumped_mass_block_d = gathered_lumped_mass_d.at(block_index);
1118
1120 field_ids_.reference_coordinates,
1121 num_elem_in_block,
1122 num_nodes_per_elem,
1123 elem_conn_d,
1124 gathered_reference_coordinate_block_d);
1125
1126 // COMPUTE LUMPED MASS
1127 Kokkos::parallel_for(
1128 "Lumped Mass", num_elem_in_block, KOKKOS_LAMBDA(const int i_elem) {
1129 auto element_reference_coordinate_d =
1130 Kokkos::subview(gathered_reference_coordinate_block_d, i_elem, Kokkos::ALL, Kokkos::ALL);
1131 auto element_lumped_mass_d = Kokkos::subview(gathered_lumped_mass_block_d, i_elem, Kokkos::ALL);
1132 element_d->ComputeLumpedMass(density, element_reference_coordinate_d, element_lumped_mass_d);
1133 });
1134
1135 // SCATTER TO NODE DATA
1137 field_ids_.lumped_mass, num_elem_in_block, num_nodes_per_elem, elem_conn_d, gathered_lumped_mass_block_d);
1138
1139 double block_critical_time_step =
1140 block.ComputeCriticalTimeStep(reference_coordinate, displacement, num_elem_in_block, elem_conn);
1141 if (block_critical_time_step < critical_time_step_) { critical_time_step_ = block_critical_time_step; }
1142
1143 block_index += 1;
1144 }
1145 Kokkos::deep_copy(lumped_mass_h, lumped_mass_d);
1146
1147 // MPI vector reduction on lumped mass
1148 std::vector<double> mpi_scalar_buffer(num_nodes);
1149 for (unsigned int i = 0; i < num_nodes; i++) { mpi_scalar_buffer[i] = lumped_mass_h(i); }
1150 vector_communicator->VectorReduction(1, mpi_scalar_buffer.data());
1151 for (int i = 0; i < num_nodes; i++) { lumped_mass_h(i) = mpi_scalar_buffer[i]; }
1152}
double GetDensity() const
Definition nimble_block_base.h:68
virtual double ComputeCriticalTimeStep(const nimble::Viewify< 2 > &node_reference_coordinates, const nimble::Viewify< 2 > &node_displacements, int num_elem, const int *elem_conn) const
Definition nimble_block_base.cc:52
const nimble::Parser & GetParser() const
Return constant reference to parser information.
Definition nimble_data_manager.h:101
virtual void ComputeLumpedMass(double density, const double *node_reference_coords, double *lumped_mass) const =0
Virtual function to compute the lumped mass.
double critical_time_step_
Critical time step.
Definition nimble_model_data_base.h:331
DeviceScalarNodeView GetDeviceScalarNodeData(int field_id)
Definition nimble_kokkos_model_data.cc:1556
HostScalarNodeView GetHostScalarNodeData(int field_id)
Definition nimble_kokkos_model_data.cc:1448
void ScatterScalarNodeData(int field_id, int num_elements, int num_nodes_per_element, const DeviceElementConnectivityView &elem_conn_d, const DeviceScalarNodeGatheredView &gathered_view_d)
Definition nimble_kokkos_model_data.cc:1661
nimble::Viewify< 2 > GetVectorNodeData(int field_id) override
Get view of vector quantity defined on nodes.
Definition nimble_kokkos_model_data.cc:1033
Kokkos::View< int *, kokkos_host > HostElementConnectivityView
Definition nimble_kokkos_defs.h:578
Field< FieldType::DeviceScalarNode >::GatheredView DeviceScalarNodeGatheredView
Definition nimble_kokkos_defs.h:583

◆ GatherScalarNodeData()

DeviceScalarNodeGatheredView nimble_kokkos::ModelData::GatherScalarNodeData ( int field_id,
int num_elements,
int num_nodes_per_element,
const DeviceElementConnectivityView & elem_conn_d,
DeviceScalarNodeGatheredView gathered_view_d )
1622{
1623 int index = field_id_to_device_node_data_index_.at(field_id);
1624 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1625 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(base_field_ptr);
1626 auto data = derived_field_ptr->data();
1627 Kokkos::parallel_for(
1628 "GatherScalarNodeData", num_elements, KOKKOS_LAMBDA(const int i_elem) {
1629 for (int i_node = 0; i_node < num_nodes_per_element; i_node++) {
1630 gathered_view_d(i_elem, i_node) = data(elem_conn_d(num_nodes_per_element * i_elem + i_node));
1631 }
1632 });
1633 return gathered_view_d;
1634}

◆ GatherVectorNodeData()

DeviceVectorNodeGatheredView nimble_kokkos::ModelData::GatherVectorNodeData ( int field_id,
int num_elements,
int num_nodes_per_element,
const DeviceElementConnectivityView & elem_conn_d,
DeviceVectorNodeGatheredView gathered_view_d )
1643{
1644 int index = field_id_to_device_node_data_index_.at(field_id);
1645 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1646 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(base_field_ptr);
1647 auto data = derived_field_ptr->data();
1648 Kokkos::parallel_for(
1649 "GatherVectorNodeData", num_elements, KOKKOS_LAMBDA(const int i_elem) {
1650 for (int i_node = 0; i_node < num_nodes_per_element; i_node++) {
1651 int node_index = elem_conn_d(num_nodes_per_element * i_elem + i_node);
1652 for (int i_coord = 0; i_coord < 3; i_coord++) {
1653 gathered_view_d(i_elem, i_node, i_coord) = data(node_index, i_coord);
1654 }
1655 }
1656 });
1657 return gathered_view_d;
1658}

◆ GetBlockIds()

std::vector< int > nimble_kokkos::ModelData::GetBlockIds ( ) const
887{
888 std::vector<int> block_ids;
889 for (auto const& entry : block_id_to_integration_point_data_index_) { block_ids.push_back(entry.first); }
890 return block_ids;
891}

◆ GetDeviceElementData()

template<FieldType ft>
Field< ft >::View nimble_kokkos::ModelData::GetDeviceElementData ( int block_id,
int field_id )
protected
1531{
1532 int block_index = block_id_to_element_data_index_.at(block_id);
1533 int data_index = field_id_to_device_element_data_index_.at(block_index).at(field_id);
1534 auto base_field_ptr = device_element_data_.at(block_index).at(data_index).get();
1535 auto derived_field_ptr = dynamic_cast<Field<ft>*>(base_field_ptr);
1536 return derived_field_ptr->data();
1537}

◆ GetDeviceFullTensorElementData()

DeviceFullTensorElemView nimble_kokkos::ModelData::GetDeviceFullTensorElementData ( int block_id,
int field_id )
1611{
1613}
Field< ft >::View GetDeviceElementData(int block_id, int field_id)
Definition nimble_kokkos_model_data.cc:1530

◆ GetDeviceFullTensorIntegrationPointData()

DeviceFullTensorIntPtView nimble_kokkos::ModelData::GetDeviceFullTensorIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1581{
1582 return GetDeviceIntPointData<FieldType::DeviceFullTensorIntPt>(block_id, field_id, step);
1583}
Field< ft >::View GetDeviceIntPointData(int block_id, int field_id, nimble::Step step)
Definition nimble_kokkos_model_data.cc:1541

◆ GetDeviceIntPointData()

template<FieldType ft>
Field< ft >::View nimble_kokkos::ModelData::GetDeviceIntPointData ( int block_id,
int field_id,
nimble::Step step )
protected
1542{
1543 int block_index = block_id_to_integration_point_data_index_.at(block_id);
1544 int data_index = field_id_to_device_integration_point_data_index_.at(block_index).at(field_id);
1545 FieldBase* base_field_ptr = nullptr;
1546 if (step == nimble::STEP_N) {
1547 base_field_ptr = device_integration_point_data_step_n_.at(block_index).at(data_index).get();
1548 } else if (step == nimble::STEP_NP1) {
1549 base_field_ptr = device_integration_point_data_step_np1_.at(block_index).at(data_index).get();
1550 }
1551 auto derived_field_ptr = dynamic_cast<Field<ft>*>(base_field_ptr);
1552 return derived_field_ptr->data();
1553}
@ STEP_N
Definition nimble_data_utils.h:98

◆ GetDeviceScalarElementData()

DeviceScalarElemView nimble_kokkos::ModelData::GetDeviceScalarElementData ( int block_id,
int field_id )
1599{
1600 return GetDeviceElementData<FieldType::DeviceScalarElem>(block_id, field_id);
1601}

◆ GetDeviceScalarIntegrationPointData()

DeviceScalarIntPtView nimble_kokkos::ModelData::GetDeviceScalarIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1587{
1588 return GetDeviceIntPointData<FieldType::DeviceScalarIntPt>(block_id, field_id, step);
1589}

◆ GetDeviceScalarNodeData()

DeviceScalarNodeView nimble_kokkos::ModelData::GetDeviceScalarNodeData ( int field_id)
1557{
1558 int index = field_id_to_device_node_data_index_.at(field_id);
1559 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1560 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(base_field_ptr);
1561 return derived_field_ptr->data();
1562}

◆ GetDeviceSymTensorElementData()

DeviceSymTensorElemView nimble_kokkos::ModelData::GetDeviceSymTensorElementData ( int block_id,
int field_id )
1605{
1607}

◆ GetDeviceSymTensorIntegrationPointData()

DeviceSymTensorIntPtView nimble_kokkos::ModelData::GetDeviceSymTensorIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1575{
1576 return GetDeviceIntPointData<FieldType::DeviceSymTensorIntPt>(block_id, field_id, step);
1577}

◆ GetDeviceVectorIntegrationPointData()

DeviceVectorIntPtView nimble_kokkos::ModelData::GetDeviceVectorIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1593{
1594 return GetDeviceIntPointData<FieldType::DeviceVectorIntPt>(block_id, field_id, step);
1595}

◆ GetDeviceVectorNodeData()

DeviceVectorNodeView nimble_kokkos::ModelData::GetDeviceVectorNodeData ( int field_id)
1566{
1567 int index = field_id_to_device_node_data_index_.at(field_id);
1568 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1569 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(base_field_ptr);
1570 return derived_field_ptr->data();
1571}

◆ GetFieldId()

int nimble_kokkos::ModelData::GetFieldId ( const std::string & field_label) const
inlineoverridevirtual

Returns the field ID for a specific label.

Parameters
field_labelLabel for a stored quantity
Returns
Field ID to identify the data storage

Implements nimble::ModelDataBase.

95 {
96 return field_label_to_field_id_map_.at(field_label);
97 }

◆ GetFullTensorIntegrationPointDataLabels()

std::vector< std::string > nimble_kokkos::ModelData::GetFullTensorIntegrationPointDataLabels ( int block_id) const
1425{
1426 int block_index = block_id_to_integration_point_data_index_.at(block_id);
1427 std::vector<std::string> ipt_data_labels;
1428 for (auto const& entry : field_label_to_field_id_map_) {
1429 std::string const& field_label = entry.first;
1430 int field_id = entry.second;
1431 for (auto const& ipt_entry : field_id_to_device_integration_point_data_index_.at(block_index)) {
1432 int ipt_data_field_id = ipt_entry.first;
1433 if (field_id == ipt_data_field_id) {
1434 int ipt_data_index = ipt_entry.second;
1435 if (device_integration_point_data_step_np1_.at(block_index).at(ipt_data_index)->type() ==
1437 if (std::find(ipt_data_labels.begin(), ipt_data_labels.end(), field_label) == ipt_data_labels.end()) {
1438 ipt_data_labels.push_back(field_label);
1439 }
1440 }
1441 }
1442 }
1443 }
1444 return ipt_data_labels;
1445}

◆ GetHostFullTensorElementData()

HostFullTensorElemView nimble_kokkos::ModelData::GetHostFullTensorElementData ( int block_id,
int field_id )
1519{
1520 int block_index = block_id_to_element_data_index_.at(block_id);
1521 int data_index = field_id_to_host_element_data_index_.at(block_index).at(field_id);
1522 FieldBase* base_field_ptr = nullptr;
1523 base_field_ptr = host_element_data_.at(block_index).at(data_index).get();
1524 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostFullTensorElem>*>(base_field_ptr);
1525 return derived_field_ptr->data();
1526}

◆ GetHostFullTensorIntegrationPointData()

HostFullTensorIntPtView nimble_kokkos::ModelData::GetHostFullTensorIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1493{
1494 int block_index = block_id_to_integration_point_data_index_.at(block_id);
1495 int data_index = field_id_to_host_integration_point_data_index_.at(block_index).at(field_id);
1496 FieldBase* base_field_ptr = nullptr;
1497 if (step == nimble::STEP_N) {
1498 base_field_ptr = host_integration_point_data_step_n_.at(block_index).at(data_index).get();
1499 } else if (step == nimble::STEP_NP1) {
1500 base_field_ptr = host_integration_point_data_step_np1_.at(block_index).at(data_index).get();
1501 }
1502 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostFullTensorIntPt>*>(base_field_ptr);
1503 return derived_field_ptr->data();
1504}

◆ GetHostScalarElementData()

HostScalarElemView nimble_kokkos::ModelData::GetHostScalarElementData ( int block_id,
int field_id )
1467{
1468 int block_index = block_id_to_element_data_index_.at(block_id);
1469 int data_index = field_id_to_host_element_data_index_.at(block_index).at(field_id);
1470 FieldBase* base_field_ptr = nullptr;
1471 base_field_ptr = host_element_data_.at(block_index).at(data_index).get();
1472 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostScalarElem>*>(base_field_ptr);
1473 return derived_field_ptr->data();
1474}

◆ GetHostScalarNodeData()

HostScalarNodeView nimble_kokkos::ModelData::GetHostScalarNodeData ( int field_id)
1449{
1450 int index = field_id_to_host_node_data_index_.at(field_id);
1451 FieldBase* base_field_ptr = host_node_data_.at(index).get();
1452 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostScalarNode>*>(base_field_ptr);
1453 return derived_field_ptr->data();
1454}

◆ GetHostSymTensorElementData()

HostSymTensorElemView nimble_kokkos::ModelData::GetHostSymTensorElementData ( int block_id,
int field_id )
1508{
1509 int block_index = block_id_to_element_data_index_.at(block_id);
1510 int data_index = field_id_to_host_element_data_index_.at(block_index).at(field_id);
1511 FieldBase* base_field_ptr = nullptr;
1512 base_field_ptr = host_element_data_.at(block_index).at(data_index).get();
1513 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostSymTensorElem>*>(base_field_ptr);
1514 return derived_field_ptr->data();
1515}

◆ GetHostSymTensorIntegrationPointData()

HostSymTensorIntPtView nimble_kokkos::ModelData::GetHostSymTensorIntegrationPointData ( int block_id,
int field_id,
nimble::Step step )
1478{
1479 int block_index = block_id_to_integration_point_data_index_.at(block_id);
1480 int data_index = field_id_to_host_integration_point_data_index_.at(block_index).at(field_id);
1481 FieldBase* base_field_ptr = nullptr;
1482 if (step == nimble::STEP_N) {
1483 base_field_ptr = host_integration_point_data_step_n_.at(block_index).at(data_index).get();
1484 } else if (step == nimble::STEP_NP1) {
1485 base_field_ptr = host_integration_point_data_step_np1_.at(block_index).at(data_index).get();
1486 }
1487 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostSymTensorIntPt>*>(base_field_ptr);
1488 return derived_field_ptr->data();
1489}

◆ GetHostVectorNodeData()

HostVectorNodeView nimble_kokkos::ModelData::GetHostVectorNodeData ( int field_id)
1458{
1459 int index = field_id_to_host_node_data_index_.at(field_id);
1460 FieldBase* base_field_ptr = host_node_data_.at(index).get();
1461 auto derived_field_ptr = dynamic_cast<Field<FieldType::HostVectorNode>*>(base_field_ptr);
1462 return derived_field_ptr->data();
1463}

◆ GetScalarNodeData()

nimble::Viewify< 1 > nimble_kokkos::ModelData::GetScalarNodeData ( int field_id)
overridevirtual

Get view of scalar quantity defined on nodes.

Parameters
field_idthe field id (see DataManager::GetFieldIDs())
Returns
Viewify<1> object for scalar quantity

Implements nimble::ModelDataBase.

1025{
1026 auto field_view = GetHostScalarNodeData(field_id);
1027 auto field_size = static_cast<int>(field_view.extent(0));
1028 auto field_stride = static_cast<int>(field_view.stride_0());
1029 return {field_view.data(), {field_size}, {field_stride}};
1030}

◆ GetScalarNodeDataLabels()

std::vector< std::string > nimble_kokkos::ModelData::GetScalarNodeDataLabels ( ) const
1362{
1363 std::vector<std::string> node_data_labels;
1364 for (auto const& entry : field_label_to_field_id_map_) {
1365 std::string const& field_label = entry.first;
1366 int field_id = entry.second;
1367 for (auto const& node_entry : field_id_to_host_node_data_index_) {
1368 int node_data_field_id = node_entry.first;
1369 if (field_id == node_data_field_id) {
1370 int node_data_index = node_entry.second;
1371 if (host_node_data_.at(node_data_index)->type() == FieldType::HostScalarNode)
1372 node_data_labels.push_back(field_label);
1373 }
1374 }
1375 }
1376 return node_data_labels;
1377}
@ HostScalarNode
Definition nimble_kokkos_defs.h:90

◆ GetSymmetricTensorIntegrationPointDataLabels()

std::vector< std::string > nimble_kokkos::ModelData::GetSymmetricTensorIntegrationPointDataLabels ( int block_id) const
1400{
1401 int block_index = block_id_to_integration_point_data_index_.at(block_id);
1402 int num_blocks = static_cast<int>(block_id_to_integration_point_data_index_.size());
1403 std::vector<std::string> ipt_data_labels;
1404 for (auto const& entry : field_label_to_field_id_map_) {
1405 std::string const& field_label = entry.first;
1406 int field_id = entry.second;
1407 for (auto const& ipt_entry : field_id_to_device_integration_point_data_index_.at(block_index)) {
1408 int ipt_data_field_id = ipt_entry.first;
1409 if (field_id == ipt_data_field_id) {
1410 int ipt_data_index = ipt_entry.second;
1411 if (device_integration_point_data_step_np1_.at(block_index).at(ipt_data_index)->type() ==
1413 if (std::find(ipt_data_labels.begin(), ipt_data_labels.end(), field_label) == ipt_data_labels.end()) {
1414 ipt_data_labels.push_back(field_label);
1415 }
1416 }
1417 }
1418 }
1419 }
1420 return ipt_data_labels;
1421}

◆ GetVectorNodeData()

nimble::Viewify< 2 > nimble_kokkos::ModelData::GetVectorNodeData ( int field_id)
overridevirtual

Get view of vector quantity defined on nodes.

Parameters
field_idthe field id (see DataManager::GetFieldIDs())
Returns
Viewify<2> object for vector quantity

Implements nimble::ModelDataBase.

1034{
1035 auto field_view = GetHostVectorNodeData(field_id);
1036 auto size0 = static_cast<int>(field_view.extent(0));
1037 auto size1 = static_cast<int>(field_view.extent(1));
1038 auto stride0 = static_cast<int>(field_view.stride_0());
1039 auto stride1 = static_cast<int>(field_view.stride_1());
1040 return {field_view.data(), {size0, size1}, {stride0, stride1}};
1041}

◆ GetVectorNodeDataLabels()

std::vector< std::string > nimble_kokkos::ModelData::GetVectorNodeDataLabels ( ) const
1381{
1382 std::vector<std::string> node_data_labels;
1383 for (auto const& entry : field_label_to_field_id_map_) {
1384 std::string const& field_label = entry.first;
1385 int field_id = entry.second;
1386 for (auto const& node_entry : field_id_to_host_node_data_index_) {
1387 int node_data_field_id = node_entry.first;
1388 if (field_id == node_data_field_id) {
1389 int node_data_index = node_entry.second;
1390 if (host_node_data_.at(node_data_index)->type() == FieldType::HostVectorNode)
1391 node_data_labels.push_back(field_label);
1392 }
1393 }
1394 }
1395 return node_data_labels;
1396}
@ HostVectorNode
Definition nimble_kokkos_defs.h:92

◆ InitializeBlockData()

void nimble_kokkos::ModelData::InitializeBlockData ( nimble::DataManager & data_manager)
protected

Initialize block data for material information.

Parameters
data_manager
1290{
1291 //
1292 // Build up block data for stress computation
1293 //
1294 const auto& mesh = data_manager.GetMesh();
1295 for (auto&& block_it : blocks_) {
1296 int block_id = block_it.first;
1297 nimble_kokkos::Block& block = block_it.second;
1298 nimble::Material* material_d = block.GetDeviceMaterialModel();
1299 int num_elem_in_block = mesh.GetNumElementsInBlock(block_id);
1300 int num_integration_points_per_element = block.GetHostElement()->NumIntegrationPointsPerElement();
1301 block_data_.emplace_back(&block, material_d, block_id, num_elem_in_block, num_integration_points_per_element);
1302 }
1303}
nimble::Material * GetDeviceMaterialModel()
Definition nimble_kokkos_block.h:116
std::shared_ptr< nimble::Element > GetHostElement()
Definition nimble_kokkos_block.h:98

◆ InitializeBlocks()

void nimble_kokkos::ModelData::InitializeBlocks ( nimble::DataManager & data_manager,
const std::shared_ptr< nimble::MaterialFactoryBase > & material_factory_base )
override

Initialize the different blocks in the mesh.

Parameters
data_managerReference to the data manager
material_factory_baseShared pointer to the material factory
897{
898 bool store_unrotated_stress(true);
899
900 const auto& mesh_ = data_manager.GetMesh();
901 const auto& parser_ = data_manager.GetParser();
902 auto& field_ids_ = data_manager.GetFieldIDs();
903
904 auto material_factory_ptr = dynamic_cast<nimble_kokkos::MaterialFactory*>(material_factory_base.get());
905 const auto num_blocks = static_cast<int>(mesh_.GetNumBlocks());
906
907 //
908 // Blocks
909 //
910 std::vector<int> block_ids = mesh_.GetBlockIds();
911 for (int i = 0; i < num_blocks; i++) {
912 int block_id = block_ids.at(i);
913 std::string const& model_material_parameters = parser_.GetModelMaterialParameters(block_id);
914 int num_elements_in_block = mesh_.GetNumElementsInBlock(block_id);
915 blocks_[block_id] = nimble_kokkos::Block();
916 blocks_.at(block_id).Initialize(model_material_parameters, num_elements_in_block, *material_factory_ptr);
917 //
918 // MPI version use model_data.DeclareElementData(block_id,
919 // data_labels_and_lengths);
920 //
921 std::vector<double> initial_value(9, 0.0);
922 initial_value[0] = initial_value[1] = initial_value[2] = 1.0;
923 field_ids_.deformation_gradient = AllocateIntegrationPointData(
924 block_id, nimble::FULL_TENSOR, "deformation_gradient", num_elements_in_block, initial_value);
925 // volume-averaged quantities for I/O are stored as element data
926 AllocateElementData(block_id, nimble::FULL_TENSOR, "deformation_gradient", num_elements_in_block);
927
928 field_ids_.stress =
929 AllocateIntegrationPointData(block_id, nimble::SYMMETRIC_TENSOR, "stress", num_elements_in_block);
930 if (store_unrotated_stress) {
931 field_ids_.unrotated_stress =
932 AllocateIntegrationPointData(block_id, nimble::SYMMETRIC_TENSOR, "stress", num_elements_in_block);
933 }
934
935 // volume-averaged quantities for I/O are stored as element data
936 AllocateElementData(block_id, nimble::SYMMETRIC_TENSOR, "stress", num_elements_in_block);
937
938 if (parser_.GetOutputFieldString().find("volume") != std::string::npos) {
939 AllocateElementData(block_id, nimble::SCALAR, "volume", num_elements_in_block);
940 }
941
942 if (blocks_.at(block_id).GetMaterialPointer()->NumStateVariables() > 0) {
943 //
944 // Verify which state variables are actually needed
945 //
946 field_ids_.state_sym_tensor =
947 AllocateIntegrationPointData(block_id, nimble::SYMMETRIC_TENSOR, "state_sym_tensor", num_elements_in_block);
948 field_ids_.state_full_tensor =
949 AllocateIntegrationPointData(block_id, nimble::FULL_TENSOR, "state_full_tensor", num_elements_in_block);
950 field_ids_.state_scalar =
951 AllocateIntegrationPointData(block_id, nimble::SCALAR, "state_scalar", num_elements_in_block);
952 field_ids_.state_vec3D =
953 AllocateIntegrationPointData(block_id, nimble::VECTOR, "state_vec3D", num_elements_in_block);
954 }
955 }
956
957 // Initialize gathered containers when using explicit scheme
958 if (parser_.TimeIntegrationScheme() == "explicit") InitializeGatheredVectors(mesh_);
959
960 InitializeBlockData(data_manager);
961}
void InitializeBlockData(nimble::DataManager &data_manager)
Initialize block data for material information.
Definition nimble_kokkos_model_data.cc:1289
int AllocateElementData(int block_id, nimble::Length length, std::string label, int num_objects)
Definition nimble_kokkos_model_data.cc:645
int AllocateIntegrationPointData(int block_id, nimble::Length length, std::string label, int num_objects, std::vector< double > initial_value=std::vector< double >())
Definition nimble_kokkos_model_data.cc:708
void InitializeGatheredVectors(const nimble::GenesisMesh &mesh_)
Definition nimble_kokkos_model_data.cc:1044

◆ InitializeExodusOutput()

void nimble_kokkos::ModelData::InitializeExodusOutput ( nimble::DataManager & data_manager)
overridevirtual

Reimplemented from nimble::ModelDataBase.

1307{
1308 const auto& mesh_ = data_manager.GetMesh();
1309 const auto& parser_ = data_manager.GetParser();
1310
1311 // Initialize the exodus-output-manager
1312 exodus_output_manager_->SpecifyOutputFields(*this, parser_.GetOutputFieldString());
1313
1314 output_node_component_labels_ = std::move(exodus_output_manager_->GetNodeDataLabelsForOutput());
1315 output_element_component_labels_ = std::move(exodus_output_manager_->GetElementDataLabelsForOutput());
1316
1318 std::vector<int> block_ids = mesh_.GetBlockIds();
1319 for (auto block_id : block_ids) {
1320 derived_output_element_data_labels_[block_id] = std::vector<std::string>(); // TODO eliminate this
1321 }
1322
1323 auto& field_ids = data_manager.GetFieldIDs();
1324 displacement_h_ = GetHostVectorNodeData(field_ids.displacement);
1325 displacement_d_ = GetDeviceVectorNodeData(field_ids.displacement);
1326
1327 velocity_h_ = GetHostVectorNodeData(field_ids.velocity);
1328 velocity_d_ = GetDeviceVectorNodeData(field_ids.velocity);
1329}
std::map< int, std::vector< std::string > > output_element_component_labels_
Output labels for element data that will be written to disk.
Definition nimble_model_data_base.h:341
std::vector< std::string > output_node_component_labels_
Output labels for node data that will be written to disk.
Definition nimble_model_data_base.h:334
std::map< int, std::vector< std::string > > derived_output_element_data_labels_
Output labels for derived element data that will be written to disk.
Definition nimble_model_data_base.h:344
nimble_kokkos::DeviceVectorNodeView displacement_d_
Definition nimble_kokkos_model_data.h:350
nimble_kokkos::DeviceVectorNodeView velocity_d_
Definition nimble_kokkos_model_data.h:353
nimble_kokkos::HostVectorNodeView velocity_h_
Definition nimble_kokkos_model_data.h:352
nimble_kokkos::HostVectorNodeView displacement_h_
Definition nimble_kokkos_model_data.h:349

◆ InitializeGatheredVectors()

void nimble_kokkos::ModelData::InitializeGatheredVectors ( const nimble::GenesisMesh & mesh_)
protected
1045{
1046 int num_blocks = static_cast<int>(mesh_.GetNumBlocks());
1047
1049 num_blocks, nimble_kokkos::DeviceVectorNodeGatheredView("gathered_reference_coordinates", 1));
1050 gathered_displacement_d.resize(num_blocks, nimble_kokkos::DeviceVectorNodeGatheredView("gathered_displacement", 1));
1052 num_blocks, nimble_kokkos::DeviceVectorNodeGatheredView("gathered_internal_force", 1));
1053 gathered_contact_force_d.resize(num_blocks, nimble_kokkos::DeviceVectorNodeGatheredView("gathered_contact_force", 1));
1054
1055 int block_index = 0;
1056 for (const auto& block_it : blocks_) {
1057 int block_id = block_it.first;
1058 int num_elem_in_block = mesh_.GetNumElementsInBlock(block_id);
1059 Kokkos::resize(gathered_reference_coordinate_d.at(block_index), num_elem_in_block);
1060 Kokkos::resize(gathered_displacement_d.at(block_index), num_elem_in_block);
1061 Kokkos::resize(gathered_internal_force_d.at(block_index), num_elem_in_block);
1062 Kokkos::resize(gathered_contact_force_d.at(block_index), num_elem_in_block);
1063 block_index += 1;
1064 }
1065}
unsigned int GetNumBlocks() const
Definition nimble_genesis_mesh.h:135
int GetNumElementsInBlock(int block_id) const
Definition nimble_genesis_mesh.cc:460
std::vector< nimble_kokkos::DeviceVectorNodeGatheredView > gathered_contact_force_d
Definition nimble_kokkos_model_data.h:346

◆ ScatterScalarNodeData()

void nimble_kokkos::ModelData::ScatterScalarNodeData ( int field_id,
int num_elements,
int num_nodes_per_element,
const DeviceElementConnectivityView & elem_conn_d,
const DeviceScalarNodeGatheredView & gathered_view_d )
1667{
1668 int index = field_id_to_device_node_data_index_.at(field_id);
1669 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1670 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(base_field_ptr);
1671 Field<FieldType::DeviceScalarNode>::AtomicView data = derived_field_ptr->data();
1672 Kokkos::parallel_for(
1673 "ScatterScalarNodeData", num_elements, KOKKOS_LAMBDA(const int i_elem) {
1674 for (int i_node = 0; i_node < num_nodes_per_element; i_node++) {
1675 data(elem_conn_d(num_nodes_per_element * i_elem + i_node)) += gathered_view_d(i_elem, i_node);
1676 }
1677 });
1678}
Kokkos::View< double *, kokkos_layout, kokkos_device, Kokkos::MemoryTraits< Kokkos::Atomic > > AtomicView
Definition nimble_kokkos_defs.h:201

◆ ScatterScalarNodeDataUsingKokkosScatterView()

void nimble_kokkos::ModelData::ScatterScalarNodeDataUsingKokkosScatterView ( int field_id,
int num_elements,
int num_nodes_per_element,
const DeviceElementConnectivityView & elem_conn_d,
const DeviceScalarNodeGatheredView & gathered_view_d )
1711{
1712 int index = field_id_to_device_node_data_index_.at(field_id);
1713 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1714 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceScalarNode>*>(base_field_ptr);
1715 auto data = derived_field_ptr->data();
1716 auto scatter_view =
1717 Kokkos::Experimental::create_scatter_view(data); // DJL it is a terrible idea to allocate this here
1718 scatter_view.reset();
1719 Kokkos::parallel_for(
1720 "GatherVectorNodeData", num_elements, KOKKOS_LAMBDA(const int i_elem) {
1721 auto scattered_access = scatter_view.access();
1722 for (int i_node = 0; i_node < num_nodes_per_element; i_node++) {
1723 scattered_access(elem_conn_d(num_nodes_per_element * i_elem + i_node)) += gathered_view_d(i_elem, i_node);
1724 }
1725 });
1726 Kokkos::Experimental::contribute(data, scatter_view);
1727}

◆ ScatterVectorNodeData()

void nimble_kokkos::ModelData::ScatterVectorNodeData ( int field_id,
int num_elements,
int num_nodes_per_element,
const DeviceElementConnectivityView & elem_conn_d,
const DeviceVectorNodeGatheredView & gathered_view_d )
1687{
1688 int index = field_id_to_device_node_data_index_.at(field_id);
1689 FieldBase* base_field_ptr = device_node_data_.at(index).get();
1690 auto derived_field_ptr = dynamic_cast<Field<FieldType::DeviceVectorNode>*>(base_field_ptr);
1691 Field<FieldType::DeviceVectorNode>::AtomicView data = derived_field_ptr->data();
1692 Kokkos::parallel_for(
1693 "ScatterVectorNodeData", num_elements, KOKKOS_LAMBDA(const int i_elem) {
1694 for (int i_node = 0; i_node < num_nodes_per_element; i_node++) {
1695 int node_index = elem_conn_d(num_nodes_per_element * i_elem + i_node);
1696 for (int i_coord = 0; i_coord < 3; i_coord++) {
1697 data(node_index, i_coord) += gathered_view_d(i_elem, i_node, i_coord);
1698 }
1699 }
1700 });
1701}
Kokkos::View< double *[3], kokkos_layout, kokkos_device, Kokkos::MemoryTraits< Kokkos::Atomic > > AtomicView
Definition nimble_kokkos_defs.h:251

◆ UpdateStates()

void nimble_kokkos::ModelData::UpdateStates ( const nimble::DataManager & data_manager)
overridevirtual

Copy time state (n+1) into time state (n)

Parameters
data_managerReference to the data manager

Implements nimble::ModelDataBase.

965{
966 const auto& field_ids_ = data_manager.GetFieldIDs();
967
968 // Copy STEP_NP1 data to STEP_N
969 int block_index = 0;
970 for (auto& block_it : blocks_) {
971 int block_id = block_it.first;
972 auto deformation_gradient_step_n_d =
973 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.deformation_gradient, nimble::STEP_N);
974 auto unrotated_stress_step_n_d =
975 GetDeviceSymTensorIntegrationPointData(block_id, field_ids_.unrotated_stress, nimble::STEP_N);
976 auto stress_step_n_d = GetDeviceSymTensorIntegrationPointData(block_id, field_ids_.stress, nimble::STEP_N);
977 auto deformation_gradient_step_np1_d =
978 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.deformation_gradient, nimble::STEP_NP1);
979 auto unrotated_stress_step_np1_d =
980 GetDeviceSymTensorIntegrationPointData(block_id, field_ids_.unrotated_stress, nimble::STEP_NP1);
981 auto stress_step_np1_d = GetDeviceSymTensorIntegrationPointData(block_id, field_ids_.stress, nimble::STEP_NP1);
982 Kokkos::deep_copy(deformation_gradient_step_n_d, deformation_gradient_step_np1_d);
983 Kokkos::deep_copy(unrotated_stress_step_n_d, unrotated_stress_step_np1_d);
984 Kokkos::deep_copy(stress_step_n_d, stress_step_np1_d);
985 //
986 if (blocks_.at(block_id).GetMaterialPointer()->NumStateVariables() > 0) {
987 if (field_ids_.state_sym_tensor >= 0) {
988 auto state_sym_tensor_n =
989 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_sym_tensor, nimble::STEP_N);
990 auto state_sym_tensor_np1 =
991 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_sym_tensor, nimble::STEP_NP1);
992 Kokkos::deep_copy(state_sym_tensor_n, state_sym_tensor_np1);
993 }
994 //
995 if (field_ids_.state_full_tensor >= 0) {
996 auto state_full_tensor_n =
997 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_full_tensor, nimble::STEP_N);
998 auto state_full_tensor_np1 =
999 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_full_tensor, nimble::STEP_NP1);
1000 Kokkos::deep_copy(state_full_tensor_n, state_full_tensor_np1);
1001 }
1002 //
1003 if (field_ids_.state_scalar >= 0) {
1004 auto state_scalar_n =
1005 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_scalar, nimble::STEP_N);
1006 auto state_scalar_np1 =
1007 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_scalar, nimble::STEP_NP1);
1008 Kokkos::deep_copy(state_scalar_n, state_scalar_np1);
1009 }
1010 //
1011 if (field_ids_.state_vec3D >= 0) {
1012 auto state_vector_n = GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_vec3D, nimble::STEP_N);
1013 auto state_vector_np1 =
1014 GetDeviceFullTensorIntegrationPointData(block_id, field_ids_.state_vec3D, nimble::STEP_NP1);
1015 Kokkos::deep_copy(state_vector_n, state_vector_np1);
1016 }
1017 }
1018 //
1019 block_index += 1;
1020 }
1021}

◆ UpdateWithNewDisplacement()

void nimble_kokkos::ModelData::UpdateWithNewDisplacement ( nimble::DataManager & data_manager,
double dt )
overridevirtual

Update model with new displacement.

Parameters
[in]data_managerReference to the data manager
[in]dtCurrent time step
Note
This routine wll synchronize the host and device displacements.

Reimplemented from nimble::ModelDataBase.

1738{
1739 Kokkos::deep_copy(displacement_d_, displacement_h_);
1740}

◆ UpdateWithNewVelocity()

void nimble_kokkos::ModelData::UpdateWithNewVelocity ( nimble::DataManager & data_manager,
double dt )
overridevirtual

Update model with new velocity.

Parameters
[in]data_managerReference to the data manager
[in]dtCurrent time step
Note
This routine wll synchronize the host and device velocities.

Reimplemented from nimble::ModelDataBase.

1732{
1733 Kokkos::deep_copy(velocity_d_, velocity_h_);
1734}

◆ WriteExodusOutput()

void nimble_kokkos::ModelData::WriteExodusOutput ( nimble::DataManager & data_manager,
double time_current )
overridevirtual

Write output of simulation in Exodus format.

Parameters
[in]data_managerReference to data manager
[in]time_currentTime value

Reimplemented from nimble::ModelDataBase.

1333{
1334 auto mesh_ = data_manager.GetMesh();
1335 const auto& parser_ = data_manager.GetParser();
1336 auto exodus_output = data_manager.GetExodusOutput();
1337
1338 Kokkos::deep_copy(displacement_d_, displacement_h_);
1339 Kokkos::deep_copy(velocity_d_, velocity_h_);
1340
1341 exodus_output_manager_->ComputeElementData(
1343
1344 auto const& node_data_output = exodus_output_manager_->GetNodeDataForOutput(*this);
1345 auto const& elem_data_output = exodus_output_manager_->GetElementDataForOutput(*this);
1346
1347 std::vector<double> glbl_data;
1348 std::map<int, std::vector<std::vector<double>>> drvd_elem_data;
1349
1350 exodus_output->WriteStep(
1351 time_current,
1352 glbl_data,
1353 node_data_output,
1355 elem_data_output,
1357 drvd_elem_data);
1358}
std::shared_ptr< nimble::ExodusOutput > GetExodusOutput()
Definition nimble_data_manager.h:158

Member Data Documentation

◆ block_data_

std::vector<nimble::BlockData> nimble_kokkos::ModelData::block_data_
protected

◆ block_id_to_element_data_index_

std::map<int, int> nimble_kokkos::ModelData::block_id_to_element_data_index_
protected

◆ block_id_to_integration_point_data_index_

std::map<int, int> nimble_kokkos::ModelData::block_id_to_integration_point_data_index_
protected

◆ blocks_

std::map<int, nimble_kokkos::Block> nimble_kokkos::ModelData::blocks_
protected

Blocks.

◆ device_element_data_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::device_element_data_
protected

◆ device_integration_point_data_step_n_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::device_integration_point_data_step_n_
protected

◆ device_integration_point_data_step_np1_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::device_integration_point_data_step_np1_
protected

◆ device_node_data_

std::vector<Data> nimble_kokkos::ModelData::device_node_data_
protected

◆ displacement_d_

nimble_kokkos::DeviceVectorNodeView nimble_kokkos::ModelData::displacement_d_
protected

◆ displacement_h_

nimble_kokkos::HostVectorNodeView nimble_kokkos::ModelData::displacement_h_
protected

◆ exodus_output_manager_

std::unique_ptr<nimble_kokkos::ExodusOutputManager> nimble_kokkos::ModelData::exodus_output_manager_
protected

◆ field_id_to_device_element_data_index_

std::vector<std::map<int, int> > nimble_kokkos::ModelData::field_id_to_device_element_data_index_
protected

◆ field_id_to_device_integration_point_data_index_

std::vector<std::map<int, int> > nimble_kokkos::ModelData::field_id_to_device_integration_point_data_index_
protected

◆ field_id_to_device_node_data_index_

std::map<int, int> nimble_kokkos::ModelData::field_id_to_device_node_data_index_
protected

◆ field_id_to_host_element_data_index_

std::vector<std::map<int, int> > nimble_kokkos::ModelData::field_id_to_host_element_data_index_
protected

◆ field_id_to_host_integration_point_data_index_

std::vector<std::map<int, int> > nimble_kokkos::ModelData::field_id_to_host_integration_point_data_index_
protected

◆ field_id_to_host_node_data_index_

std::map<int, int> nimble_kokkos::ModelData::field_id_to_host_node_data_index_
protected

◆ field_label_to_field_id_map_

std::map<std::string, int> nimble_kokkos::ModelData::field_label_to_field_id_map_
protected

◆ gathered_contact_force_d

std::vector<nimble_kokkos::DeviceVectorNodeGatheredView> nimble_kokkos::ModelData::gathered_contact_force_d
protected

◆ gathered_displacement_d

std::vector<nimble_kokkos::DeviceVectorNodeGatheredView> nimble_kokkos::ModelData::gathered_displacement_d
protected

◆ gathered_internal_force_d

std::vector<nimble_kokkos::DeviceVectorNodeGatheredView> nimble_kokkos::ModelData::gathered_internal_force_d
protected

◆ gathered_reference_coordinate_d

std::vector<nimble_kokkos::DeviceVectorNodeGatheredView> nimble_kokkos::ModelData::gathered_reference_coordinate_d
protected

◆ host_element_data_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::host_element_data_
protected

◆ host_integration_point_data_step_n_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::host_integration_point_data_step_n_
protected

◆ host_integration_point_data_step_np1_

std::vector<std::vector<Data> > nimble_kokkos::ModelData::host_integration_point_data_step_np1_
protected

◆ host_node_data_

std::vector<Data> nimble_kokkos::ModelData::host_node_data_
protected

◆ velocity_d_

nimble_kokkos::DeviceVectorNodeView nimble_kokkos::ModelData::velocity_d_
protected

◆ velocity_h_

nimble_kokkos::HostVectorNodeView nimble_kokkos::ModelData::velocity_h_
protected

The documentation for this class was generated from the following files: