diff --git a/source/Applications/DLR-F16/F16BombadilTest10e-6.cfg b/source/Applications/DLR-F16/F16BombadilTest10e-6.cfg
index d9c64f2230860cdf5f68ea0b447ea2afe30a976c..cb62161f5b894002642bc1eefe613efcf94486c2 100644
--- a/source/Applications/DLR-F16/F16BombadilTest10e-6.cfg
+++ b/source/Applications/DLR-F16/F16BombadilTest10e-6.cfg
@@ -53,11 +53,11 @@ startDistance = -1.0
 refineDistance = 0.3
 
 newStart = false
-restartStep = 30
-restartStepStart = 20
+restartStep = 10
+restartStepStart = 50
 
-outTime = 10
-endTime = 30
+outTime = 60
+endTime = 60
 
 logToFile = false
 
diff --git a/source/Applications/DLR-F16/f16.cpp b/source/Applications/DLR-F16/f16.cpp
index 02cc28c0a036a379d5da8d29972ae483dea38d6d..bcb150b5253a79343cc4d56c1fb60848999e2ccf 100644
--- a/source/Applications/DLR-F16/f16.cpp
+++ b/source/Applications/DLR-F16/f16.cpp
@@ -153,7 +153,7 @@ void run(string configname)
       //////////////////////////////////////////////////////////////////////////
       //restart
       UbSchedulerPtr rSch(new UbScheduler(restartStep, restartStepStart));
-      //RestartCoProcessor rp(grid, rSch, comm, pathOut, RestartCoProcessor::TXT);
+      //RestartCoProcessor rp(grid, rSch, comm, pathOut, RestartCoProcessor::BINARY);
       MPIIORestartCoProcessor rcp(grid, rSch, pathOut, comm);
       //////////////////////////////////////////////////////////////////////////
 
@@ -319,10 +319,10 @@ void run(string configname)
             {
                boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(fngIntrBodyPart)->refineBlockGridToLevel(refineLevel, startDistance, refineDistance);
             }
-            //else
-            //{
-            //   boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(fngIntrWhole)->refineBlockGridToLevel(refineLevel, startDistance, refineDistance);
-            //}
+            else
+            {
+               boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(fngIntrWhole)->refineBlockGridToLevel(refineLevel, startDistance, refineDistance);
+            }
 
             //boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(triBand1Interactor)->refineBlockGridToLevel(refineLevel, 0.0, refineDistance);
             //boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(triBand2Interactor)->refineBlockGridToLevel(refineLevel, 0.0, refineDistance);
@@ -330,12 +330,12 @@ void run(string configname)
             //boost::dynamic_pointer_cast<D3Q27TriFaceMeshInteractor>(triBand4Interactor)->refineBlockGridToLevel(refineLevel, 0.0, refineDistance);
 
 
-            GbObject3DPtr fngBox(new GbCuboid3D(fngMeshWhole->getX1Minimum(), fngMeshWhole->getX2Minimum(), fngMeshWhole->getX3Minimum(),
-                                                fngMeshWhole->getX1Maximum(), fngMeshWhole->getX2Maximum(), fngMeshWhole->getX3Maximum()));
-            if (myid==0) GbSystem3D::writeGeoObject(fngBox.get(), pathOut+"/geo/fngBox", WbWriterVtkXmlASCII::getInstance());
+            //GbObject3DPtr fngBox(new GbCuboid3D(fngMeshWhole->getX1Minimum(), fngMeshWhole->getX2Minimum(), fngMeshWhole->getX3Minimum(),
+            //                                    fngMeshWhole->getX1Maximum(), fngMeshWhole->getX2Maximum(), fngMeshWhole->getX3Maximum()));
+            //if (myid==0) GbSystem3D::writeGeoObject(fngBox.get(), pathOut+"/geo/fngBox", WbWriterVtkXmlASCII::getInstance());
 
-            RefineCrossAndInsideGbObjectBlockVisitor refVisitor0(fngBox, refineLevel);
-            grid->accept(refVisitor0);
+            //RefineCrossAndInsideGbObjectBlockVisitor refVisitor0(fngBox, refineLevel);
+            //grid->accept(refVisitor0);
 
             
             GbObject3DPtr bandTopBox(new GbCuboid3D(meshBand1->getX1Minimum(), meshBand1->getX2Minimum(), meshBand1->getX3Minimum(), 
@@ -626,7 +626,7 @@ void run(string configname)
       }
       else
       {
-         rcp.restart();
+         rcp.restart(restartStepStart);
          grid->setTimeStep(restartStepStart);
 
          {
diff --git a/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.cpp b/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.cpp
index 2d0bb5aefa3f2bf932c0350c29c5b09a9b64dae6..80f9cf48e4e861a0ee0b8c96e5b57368fc6b3cda 100644
--- a/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.cpp
+++ b/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.cpp
@@ -4,9 +4,10 @@
 //#include "LBMKernel.h"
 #include "CompressibleCumulantLBMKernel.h"
 #include "D3Q27EsoTwist3DSplittedVector.h"
+#include <UbSystem.h>
 
-//! \brief BLOCK_SIZE defines the quantity of the BoundaryCondition-structures written as one block to the file
-//! \details To avoid overflow in the parameter \a count of the function MPI_File_write_at 
+//! BLOCK_SIZE defines the quantity of the BoundaryCondition-structures written as one block to the file
+//! To avoid overflow in the parameter \a count of the function MPI_File_write_at 
 //! structures BoundaryCondition are being written in blocks containing each of them BLOCK_SIZE structures
 #define BLOCK_SIZE 1024
 
@@ -17,6 +18,8 @@ MPIIORestartCoProcessor::MPIIORestartCoProcessor(Grid3DPtr grid, UbSchedulerPtr
                                          path(path),
                                          comm(comm)
 {
+   UbSystem::makeDirectory(path + "/mpi_io_cp");
+
    memset(&blockParamStr, 0, sizeof(blockParamStr));
 
    //-------------------------   define MPI types  ---------------------------------
@@ -107,19 +110,26 @@ void MPIIORestartCoProcessor::process(double step)
 {
 	if(scheduler->isDue(step))
 	{
-		writeBlocks();
-		writeDataSet();
-		writeBoundaryConds();
+      if(comm->isRoot()) UBLOG(logINFO,"MPIIORestartCoProcessor save step: " << step);
+      if(comm->isRoot()) UBLOG(logINFO,"Save check point - start");
+		writeBlocks((int)step);
+		writeDataSet((int)step);
+		writeBoundaryConds((int)step);
+      if(comm->isRoot()) UBLOG(logINFO,"Save check point - end");
 	}
 }
 //////////////////////////////////////////////////////////////////////////
-void MPIIORestartCoProcessor::writeBlocks()
+void MPIIORestartCoProcessor::writeBlocks(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
 	MPI_Comm_size(MPI_COMM_WORLD, &size);
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputBlocks.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+
+   UbSystem::makeDirectory(path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step));
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpBlocks.bin";
+	int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
    int blocksCount = 0; // quantity of blocks in the grid, max 2147483648 blocks!
    int minInitLevel = this->grid->getCoarsestInitializedLevel();
@@ -382,7 +392,7 @@ void MPIIORestartCoProcessor::writeBlocks()
    delete gridParameters;
 }
 
-void MPIIORestartCoProcessor::writeDataSet()
+void MPIIORestartCoProcessor::writeDataSet(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
@@ -476,7 +486,9 @@ void MPIIORestartCoProcessor::writeDataSet()
 	}
 
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputDataSet.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpDataSet.bin";
+	int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
    // each process writes the quantity of it's blocks
    MPI_File_write_at(file_handler, rank * sizeof(int), &blocksCount, 1, MPI_INT, MPI_STATUS_IGNORE);
@@ -490,7 +502,7 @@ void MPIIORestartCoProcessor::writeDataSet()
 	delete[] dataSetArray;
 }
 
-void MPIIORestartCoProcessor::writeBoundaryConds()
+void MPIIORestartCoProcessor::writeBoundaryConds(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
@@ -611,7 +623,9 @@ void MPIIORestartCoProcessor::writeBoundaryConds()
 	}
 
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputBoundCond.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpBC.bin";
+	int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
    // each process writes the quantity of it's blocks
    MPI_File_write_at(file_handler, rank * sizeof(int), &blocksCount, 1, MPI_INT, MPI_STATUS_IGNORE);	//	blocks quantity
@@ -635,23 +649,27 @@ void MPIIORestartCoProcessor::writeBoundaryConds()
 }
 
 //------------------------------------------- READ -----------------------------------------------
-void MPIIORestartCoProcessor::restart()
+void MPIIORestartCoProcessor::restart(int step)
 {
-	readBlocks();
-	readDataSet();
-	readBoundaryConds();
-
+   if(comm->isRoot()) UBLOG(logINFO,"MPIIORestartCoProcessor restart step: " << step);
+   if(comm->isRoot()) UBLOG(logINFO,"Load check point - start");
+	readBlocks(step);
+	readDataSet(step);
+	readBoundaryConds(step);
+   if(comm->isRoot()) UBLOG(logINFO,"Load check point - end");
 	this->reconnect(grid);
 }
 
-void MPIIORestartCoProcessor::readBlocks()
+void MPIIORestartCoProcessor::readBlocks(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
 	MPI_Comm_size(MPI_COMM_WORLD, &size);
 
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputBlocks.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpBlocks.bin";
+	int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
    // read count of blocks
    int blocksCount = 0;
@@ -793,14 +811,16 @@ void MPIIORestartCoProcessor::readBlocks()
 	delete [] block3dArray;
 }
 
-void MPIIORestartCoProcessor::readDataSet()
+void MPIIORestartCoProcessor::readDataSet(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
 	MPI_Comm_size(MPI_COMM_WORLD, &size);
 
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputDataSet.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpDataSet.bin";
+	int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
    // read count of blocks
    int blocksCount = 0;
@@ -909,14 +929,16 @@ void MPIIORestartCoProcessor::readDataSet()
 	delete [] dataSetArray;
 }
 
-void MPIIORestartCoProcessor::readBoundaryConds()
+void MPIIORestartCoProcessor::readBoundaryConds(int step)
 {
 	int rank, size;
 	MPI_Comm_rank(MPI_COMM_WORLD, &rank);
 	MPI_Comm_size(MPI_COMM_WORLD, &size);
 
 	MPI_File file_handler;
-	int error = MPI_File_open(MPI_COMM_WORLD, "outputBoundCond.bin", MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   std::string filename = path + "/mpi_io_cp/mpi_io_cp_" + UbSystem::toString(step) + "/cpBC.bin";
+   int error = MPI_File_open(MPI_COMM_WORLD, filename.c_str(), MPI_MODE_CREATE | MPI_MODE_RDWR , MPI_INFO_NULL, &file_handler);
+   if(error) throw UbException(UB_EXARGS,"couldn't open file "+filename);
 
 	int blocksCount = 0;
    int dataCount1000 = 0;
diff --git a/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.h b/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.h
index a626c364205e04c46ebb7cb31c02eeccae074786..9609d76afd5ae82feb0575427b26547cfc7b73ea 100644
--- a/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.h
+++ b/source/VirtualFluidsCore/CoProcessors/MPIIORestartCoProcessor.h
@@ -137,19 +137,19 @@ public:
    //! Each timestep writes the grid into the files
    void process(double step);
    //! Reads the grid from the files before grid reconstruction
-   void restart();
+   void restart(int step);
    //! Writes the blocks of the grid into the file outputBlocks.bin
-   void writeBlocks();
+   void writeBlocks(int step);
    //! Writes the datasets of the blocks into the file outputDataSet.bin
-   void writeDataSet();
+   void writeDataSet(int step);
    //! Writes the boundary conditions of the blocks into the file outputBoundCond.bin
-   void writeBoundaryConds();
+   void writeBoundaryConds(int step);
    //! Reads the blocks of the grid from the file outputBlocks.bin
-   void readBlocks();
+   void readBlocks(int step);
    //! Reads the datasets of the blocks from the file outputDataSet.bin
-   void readDataSet();
+   void readDataSet(int step);
    //! Reads the boundary conditions of the blocks from the file outputBoundCond.bin
-   void readBoundaryConds();
+   void readBoundaryConds(int step);
 
 protected:
    std::string path;