24int main(
int argc,
char **argv)
27 std::cout <<
"You need at least 4 arguments" << std::endl;
32 double start = atof(argv[1]), end = atof(argv[2]);
33 int points = atoi(argv[3]), N;
34 int lattice_sizes[] = {20, 40, 60, 80, 100};
35 double dt = (end - start) / points;
36 int cycles = atoi(argv[4]);
42 int rank, cluster_size;
45 MPI_Init(&argc, &argv);
48 MPI_Comm_size(MPI_COMM_WORLD, &cluster_size);
49 MPI_Comm_rank(MPI_COMM_WORLD, &rank);
51 int remainder = points % cluster_size;
55 if (rank < remainder) {
56 i_points = points / cluster_size + 1;
57 i_start = start + dt * i_points * rank;
60 i_points = points / cluster_size;
61 i_start = start + dt * (i_points * rank + remainder);
65 std::cout <<
"Rank " << rank <<
": " << i_points <<
',' << i_start <<
'\n';
67 for (
int L : lattice_sizes) {
69 for (
size_t i = 0; i < i_points; i++) {
74 std::copy_n(i_data, i_points, data);
75 for (
size_t i = 1; i < cluster_size; i++) {
76 if (rank < remainder) {
77 MPI_Recv((
void *)i_data,
78 sizeof(
data_t) * (points / cluster_size + 1),
79 MPI_CHAR, i, MPI_ANY_TAG, MPI_COMM_WORLD,
81 std::copy_n(i_data, points / cluster_size + 1,
82 data + (points / cluster_size) * i);
85 MPI_Recv((
void *)i_data,
86 sizeof(
data_t) * (points / cluster_size), MPI_CHAR,
87 i, MPI_ANY_TAG, MPI_COMM_WORLD, MPI_STATUS_IGNORE);
88 std::copy_n(i_data, points / cluster_size,
89 data + (points / cluster_size) * i + remainder);
92 std::stringstream outfile;
93 outfile <<
"output/phase_transition/size_" << L <<
".txt";
94 utils::mkpath(utils::dirname(outfile.str()));
95 ofile.open(outfile.str());
99 using utils::scientific_format;
100 for (
size_t i = 0; i < points; i++) {
101 temp = start + dt * i;
102 CV = (data[i].E2 - data[i].E * data[i].E)
103 / ((
double)N * temp * temp);
104 X = (data[i].M2 - data[i].M_abs * data[i].M_abs)
105 / ((
double)N * temp);
107 ofile << scientific_format(temp) <<
','
108 << scientific_format(data[i].E / N) <<
','
109 << scientific_format(data[i].M_abs / N) <<
','
110 << scientific_format(CV) <<
',' << scientific_format(X)
116 MPI_Send(i_data, i_points *
sizeof(
data_t), MPI_CHAR, 0, rank,
124 std::cout <<
"Time: " << t1 - t0 <<
" seconds\n";
Header for the data_t type.
Functions for monte carlo simulations.
data_t monte_carlo_parallel(int L, double T, int cycles)
Execute the Metropolis algorithm for a certain amount of Monte Carlo cycles in parallel.
int main()
The main function.
Function prototypes and macros that are useful.