安装对应版本的cmake
我安装的是cmake 3.16.3。安装步骤可以参考https://www.cnblogs.com/hans_...。
cmake安装完之后,可以用
ctest --version
测试一下是否成功安装了ctest,这个之后的编译环节需要。在clion中配置相应的cmake路径,此处应该有很多教程,不再赘述。
安装HDF5相关的文件
我下载的HDF5 1.10.8版本,下载链接https://confluence.hdfgroup.o...。
因为我们是在服务器上使用,所以下载CMake-hdf5-1.10.8.tar.gz即可。
下载压缩包
wget https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.8/src/CMake-hdf5-1.10.8.tar.gz
解压缩这个文件
tar -zxvf CMake-hdf5-1.10.8.tar.gz
cd CMake-hdf5-1.10.8
运行编译脚本
sh build-unix.sh
然后会发现当前目录下会多了一个
HDF5-1.10.8-Linux.tar.gz
文件,对他进行解压缩tar -zxvf HDF5-1.10.8-Linux.tar.gz
不断进入当前文件的子文件中,即
(.....)/CMake-hdf5-1.10.8/HDF5-1.10.8-Linux/HDF_Group/HDF5/1.10.8
。此时可以看到有bin、include、lib、share文件。其中share文件夹中的cmake文件正是我们需要的。在clion中配置一个环境变量(Settings -> Cmake -> Environment -> +),
HDF5_DIR
=>(.....)/CMake-hdf5-1.10.8/HDF5-1.10.8-Linux/HDF_Group/HDF5/1.10.8/share/cmake
。用官网给出的文件测试环境
此时环境应该就算配置完成,可以使用官网给出的文件进行测试,
如下是CmakeLists.txt文件
cmake_minimum_required (VERSION 3.10.2) project (myproj C CXX Fortran) set (FIND_HDF_COMPONENTS C CXX shared) find_package (HDF5 NAMES "hdf5" COMPONENTS ${FIND_HDF_COMPONENTS}) if (HDF5_FOUND) if (HDF5_shared_C_FOUND) set (LINK_LIBS ${LINK_LIBS} ${HDF5_C_SHARED_LIBRARY}) endif () if (HDF5_shared_CXX_FOUND) set (LINK_LIBS ${LINK_LIBS} ${HDF5_CXX_SHARED_LIBRARY}) endif () else () message (FATAL_ERROR " HDF5 is Required") endif () INCLUDE_DIRECTORIES (${HDF5_INCLUDE_DIR}) # Add your application HERE add_executable (runapp h5_write.c) target_link_libraries (runapp PRIVATE ${LINK_LIBS})
如下是h5_write.c文件
/* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * Copyright by The HDF Group. * * Copyright by the Board of Trustees of the University of Illinois. * * All rights reserved. * * * * This file is part of HDF5. The full HDF5 copyright notice, including * * terms governing use, modification, and redistribution, is contained in * * the COPYING file, which can be found at the root of the source code * * distribution tree, or in https://support.hdfgroup.org/ftp/HDF5/releases. * * If you do not have access to either file, you may request a copy from * * help@hdfgroup.org. * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * */ /* * This example writes data to the HDF5 file. * Data conversion is performed during write operation. */ #include "hdf5.h" #define H5FILE_NAME "SDS.h5" #define DATASETNAME "IntArray" #define NX 5 /* dataset dimensions */ #define NY 6 #define RANK 2 int main (void) { hid_t file, dataset; /* file and dataset handles */ hid_t datatype, dataspace; /* handles */ hsize_t dimsf[2]; /* dataset dimensions */ herr_t status; int data[NX][NY]; /* data to write */ int i, j; /* * Data and output buffer initialization. */ for(j = 0; j < NX; j++) for(i = 0; i < NY; i++) data[j][i] = i + j; /* * 0 1 2 3 4 5 * 1 2 3 4 5 6 * 2 3 4 5 6 7 * 3 4 5 6 7 8 * 4 5 6 7 8 9 */ /* * Create a new file using H5F_ACC_TRUNC access, * default file creation properties, and default file * access properties. */ file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); /* * Describe the size of the array and create the data space for fixed * size dataset. */ dimsf[0] = NX; dimsf[1] = NY; dataspace = H5Screate_simple(RANK, dimsf, NULL); /* * Define datatype for the data in the file. * We will store little endian INT numbers. */ datatype = H5Tcopy(H5T_NATIVE_INT); status = H5Tset_order(datatype, H5T_ORDER_LE); /* * Create a new dataset within the file using defined dataspace and * datatype and default dataset creation properties. */ dataset = H5Dcreate2(file, DATASETNAME, datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* * Write the data to the dataset using default transfer properties. */ status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, data); /* * Close/release resources. */ H5Sclose(dataspace); H5Tclose(datatype); H5Dclose(dataset); H5Fclose(file); return 0; }
运行h5_write.c文件即可。
**粗体** _斜体_ [链接](http://example.com) `代码` - 列表 > 引用
。你还可以使用@
来通知其他用户。