Skip to content
Snippets Groups Projects
Commit 1b989e2f authored by Van Dung NGUYEN's avatar Van Dung NGUYEN
Browse files

add new function

parent 0321a1a2
Branches
Tags
1 merge request!309Master
......@@ -785,6 +785,10 @@ void BimaterialHomogenization::computeBimaterial(const fullMatrix<double>& C1, c
bool ok = fullMatrixOperation::invertMatrix(A,invA,stiff,DinvADA);
if (!ok)
{
C1.print("C1");
C2.print("C2");
normal.print("normal");
printf("f1 = %e, f2 = %e\n",f1,f2);
A.print("A");
}
//
......@@ -1561,6 +1565,26 @@ void TrainingDeepMaterialNetwork::setTrainingSample(int row, const fullMatrix<do
}
};
void TrainingDeepMaterialNetwork::setTrainingSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2,const fullMatrix<double>& C3, const fullMatrix<double>& C4, const fullMatrix<double>& Ceff)
{
if (_homo->withPlaneStrain())
{
fullMatrixOperation::reducePlaneStrain(C1,_XTrain[row][0]);
fullMatrixOperation::reducePlaneStrain(C2,_XTrain[row][1]);
fullMatrixOperation::reducePlaneStrain(C3,_XTrain[row][2]);
fullMatrixOperation::reducePlaneStrain(C4,_XTrain[row][3]);
fullMatrixOperation::reducePlaneStrain(Ceff,_YTrain[row]);
}
else
{
fullMatrixOperation::reduceMat99ToMat66(C1,_XTrain[row][0]);
fullMatrixOperation::reduceMat99ToMat66(C2,_XTrain[row][1]);
fullMatrixOperation::reduceMat99ToMat66(C3,_XTrain[row][2]);
fullMatrixOperation::reduceMat99ToMat66(C4,_XTrain[row][3]);
fullMatrixOperation::reduceMat99ToMat66(Ceff,_YTrain[row]);
}
};
void TrainingDeepMaterialNetwork::setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& Ceff)
{
static fullMatrix<double> C2;
......@@ -1615,6 +1639,26 @@ void TrainingDeepMaterialNetwork::setTestSample(int row, const fullMatrix<double
}
};
void TrainingDeepMaterialNetwork::setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, const fullMatrix<double>& C4, const fullMatrix<double>& Ceff)
{
if (_homo->withPlaneStrain())
{
fullMatrixOperation::reducePlaneStrain(C1,_XTest[row][0]);
fullMatrixOperation::reducePlaneStrain(C2,_XTest[row][1]);
fullMatrixOperation::reducePlaneStrain(C3,_XTest[row][2]);
fullMatrixOperation::reducePlaneStrain(C4,_XTest[row][3]);
fullMatrixOperation::reducePlaneStrain(Ceff,_YTest[row]);
}
else
{
fullMatrixOperation::reduceMat99ToMat66(C1,_XTest[row][0]);
fullMatrixOperation::reduceMat99ToMat66(C2,_XTest[row][1]);
fullMatrixOperation::reduceMat99ToMat66(C3,_XTest[row][2]);
fullMatrixOperation::reduceMat99ToMat66(C4,_XTest[row][3]);
fullMatrixOperation::reduceMat99ToMat66(Ceff,_YTest[row]);
}
};
void TrainingDeepMaterialNetwork::testDataSize(int Nt, int numPhase)
{
printf("size of test set = %d numPhase = %d\n",Nt,numPhase);
......
......@@ -277,10 +277,12 @@ class TrainingDeepMaterialNetwork
void setTrainingSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& Ceff);
void setTrainingSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& Ceff);
void setTrainingSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, const fullMatrix<double>& Ceff);
void setTrainingSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, const fullMatrix<double>& C4, const fullMatrix<double>& Ceff);
void testDataSize(int Ns, int numPhase);
void setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& Ceff);
void setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& Ceff);
void setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, const fullMatrix<double>& Ceff);
void setTestSample(int row, const fullMatrix<double>& C1, const fullMatrix<double>& C2, const fullMatrix<double>& C3, const fullMatrix<double>& C4, const fullMatrix<double>& Ceff);
void train(double lr, int maxEpoch,
std::string loss = "mare",
std::string historyFileName="history.csv",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment