I have inserted 1000 CT DICOM file datasets into std::map. A CT file size is 48 KB. Initially, the Application size is 3 MB. When I load 1000 CT Files, the Application size is around 50 MB. And in the next iteration, it starts at 3 MB.
I am iterating the while loop after the 80th time, and the memory has not Cleared. It always maintains a minimum of 33+ MB (after the 80th iteration), even at the ideal time.
here the sample code.
Code: Select all
namespace fs = boost::filesystem;
typedef std::shared_ptr<DcmDataset> DcmDatasetPtr_;
typedef std::map<std::string, DcmDatasetPtr_> image_data_map_;
std::mutex c_mutex;
void prepare_image_(std::string& fname, image_data_map_ &img_data_map)
{
DcmFileFormat file;
OFCondition status;
status = file.loadFile(fname.c_str());
DcmDataset *dataset = file.getDataset();
OFString sop_instance_uid, studyUid;
dataset->findAndGetOFString(DCM_SOPInstanceUID, sop_instance_uid);
DcmDatasetPtr_ data_ptr;
{
data_ptr = std::make_shared<DcmDataset>(*dataset);
{
std::lock_guard<std::mutex> lock(c_mutex);
img_data_map.insert(std::make_pair(sop_instance_uid.c_str(), data_ptr));
}
}
dataset->clear();
data_ptr.reset();
}
void preparelist(std::vector<fs::path> &pathList, image_data_map_& img_map)
{
for (auto filePath : pathList)
{
std::string f_path = filePath.string();
prepare_image_(f_path, (img_map));
}
}
int main(int argc, char* argv[])
{
std::string dir_path = "E:/1000_CT";
std::vector<fs::path> pathList;
std::copy(fs::recursive_directory_iterator(dir_path), fs::recursive_directory_iterator(), back_inserter(pathList));
int i = 1;
while (true)
{
{
image_data_map_ img_map;
preparelist(pathList, std::ref(img_map));
}
std::cout << "Images are Prepared : " << i << std::endl;
i++;
boost::this_thread::sleep_for(boost::chrono::seconds(3));
}
}
Thanks.