这篇教程C++ H5Tcopy函数代码示例写得很实用,希望能帮到您。
本文整理汇总了C++中H5Tcopy函数的典型用法代码示例。如果您正苦于以下问题:C++ H5Tcopy函数的具体用法?C++ H5Tcopy怎么用?C++ H5Tcopy使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。 在下文中一共展示了H5Tcopy函数的28个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。 示例1: create_nbit_dsets_float/*------------------------------------------------------------------------- * Function: create_nbit_dsets_float * * Purpose: Create a dataset of FLOAT datatype with nbit filter * * Return: Success: 0 * Failure: -1 * * Programmer: Raymond Lu * 29 March 2011 * * Modifications: * *------------------------------------------------------------------------- */intcreate_nbit_dsets_float(hid_t fid, hid_t fsid, hid_t msid){ hid_t dataset = -1; /* dataset handles */ hid_t datatype = -1; hid_t dcpl = -1; size_t precision, offset; float data[NX][NY]; /* data to write */ float fillvalue = -2.2f; hsize_t chunk[RANK] = {CHUNK0, CHUNK1}; int i, j; /* * Data and output buffer initialization. */ for (j = 0; j < NX; j++) { for (i = 0; i < NY; i++) data[j][i] = ((float)(i + j + 1))/3; } /* * Create the dataset creation property list, add the Scale-Offset * filter, set the chunk size, and set the fill value. */ if((dcpl = H5Pcreate(H5P_DATASET_CREATE)) < 0) TEST_ERROR if(H5Pset_nbit(dcpl) < 0) TEST_ERROR if(H5Pset_chunk(dcpl, RANK, chunk) < 0) TEST_ERROR if(H5Pset_fill_value(dcpl, H5T_NATIVE_FLOAT, &fillvalue) < 0) TEST_ERROR /* Define user-defined single-precision floating-point type for dataset. * A 20-bit little-endian data type. */ if((datatype = H5Tcopy(H5T_IEEE_F32LE)) < 0) TEST_ERROR if(H5Tset_fields(datatype, (size_t)26, (size_t)20, (size_t)6, (size_t)7, (size_t)13) < 0) TEST_ERROR offset = 7; if(H5Tset_offset(datatype,offset) < 0) TEST_ERROR precision = 20; if(H5Tset_precision(datatype,precision) < 0) TEST_ERROR if(H5Tset_size(datatype, (size_t)4) < 0) TEST_ERROR if(H5Tset_ebias(datatype, (size_t)31) < 0) TEST_ERROR /* * Create a new dataset within the file using defined dataspace, * user-defined datatype, and default dataset creation properties. */ if((dataset = H5Dcreate2(fid, DATASETNAME22, datatype, fsid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR /* * Write the data to the dataset using default transfer properties. */ if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0) TEST_ERROR /* Close dataset */ if(H5Dclose(dataset) < 0) TEST_ERROR /* Now create a dataset with a big-endian type */ if(H5Tset_order(datatype, H5T_ORDER_BE) < 0) TEST_ERROR if((dataset = H5Dcreate2(fid, DATASETNAME23, datatype, fsid, H5P_DEFAULT, dcpl, H5P_DEFAULT)) < 0) TEST_ERROR if(H5Dwrite(dataset, H5T_NATIVE_FLOAT, msid, fsid, H5P_DEFAULT, data) < 0) TEST_ERROR if(H5Dclose(dataset) < 0) TEST_ERROR /* * Close/release resources. */ if(H5Pclose(dcpl) < 0) TEST_ERROR//.........这里部分代码省略.........
开发者ID:Starlink,项目名称:hdf5,代码行数:101,
示例2: mainint main(int argc, char *argv[]){ (void)argc; (void)argv;typedef struct rt { int channels; char date[DATELEN]; char time[TIMELEN];} rt;// H5Fis_hdf5("/dev/null");/** Create a new file using H5ACC_TRUNC access,* default file creation properties, and default file* access properties.* Then close the file.*/ const int NRECORDS = 1; const int NFIELDS = 3; char fName[] = "tmp.h5"; /* Calculate the size and the offsets of our struct members in memory */ size_t rt_offset[NFIELDS] = { HOFFSET( rt, channels ), HOFFSET( rt, date ), HOFFSET( rt, time )}; rt p_data; p_data.channels = 1; strcpy( p_data.date, "1234-Dec-31"); strcpy( p_data.time, "12:34:56"); hid_t file_id = H5Fcreate(fName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); /* Define field information */ const char *field_names[NFIELDS] = { "channels", "date", "time" }; hid_t field_type[NFIELDS]; /* Initialize the field field_type */ hid_t string_type1 = H5Tcopy( H5T_C_S1 ); hid_t string_type2 = H5Tcopy( H5T_C_S1 ); H5Tset_size( string_type1, strlen(p_data.date)); H5Tset_size( string_type2, strlen(p_data.time)); field_type[0] = H5T_NATIVE_INT; field_type[1] = string_type1; field_type[2] = string_type2; std::ostringstream desc; desc << "Description of " << fName; herr_t status = H5TBmake_table( desc.str().c_str(), file_id, "description", (hsize_t)NFIELDS, (hsize_t)NRECORDS, sizeof(rt), field_names, rt_offset, field_type, 10, NULL, 0, &p_data ); if (status < 0) { perror("Exception while writing description in stfio::exportHDF5File"); H5Fclose(file_id); H5close(); exit(-1); } H5Fclose(file_id); return(0);}
开发者ID:3togo,项目名称:mxe,代码行数:68,
示例3: H5PTcreate_flhid_t H5PTcreate_fl ( hid_t loc_id, const char *dset_name, hid_t dtype_id, hsize_t chunk_size, int compression ){ htbl_t * table = NULL; hid_t dset_id = H5I_BADID; hid_t space_id = H5I_BADID; hid_t plist_id = H5I_BADID; hsize_t dims[1]; hsize_t dims_chunk[1]; hsize_t maxdims[1]; hid_t ret_value; /* Register the packet table ID type if this is the first table created */ if(H5PT_ptable_id_type < 0) if((H5PT_ptable_id_type = H5Iregister_type((size_t)H5PT_HASH_TABLE_SIZE, 0, (H5I_free_t)free)) < 0) goto out; /* Get memory for the table identifier */ table = (htbl_t *)malloc(sizeof(htbl_t)); /* Create a simple data space with unlimited size */ dims[0] = 0; dims_chunk[0] = chunk_size; maxdims[0] = H5S_UNLIMITED; if((space_id = H5Screate_simple(1, dims, maxdims)) < 0) goto out; /* Modify dataset creation properties to enable chunking */ plist_id = H5Pcreate(H5P_DATASET_CREATE); if(H5Pset_chunk(plist_id, 1, dims_chunk) < 0) goto out; if(compression >= 0 && compression <= 9) if(H5Pset_deflate(plist_id, (unsigned)compression) < 0) goto out; /* Create the dataset. */ if((dset_id = H5Dcreate2(loc_id, dset_name, dtype_id, space_id, H5P_DEFAULT, plist_id, H5P_DEFAULT)) < 0) goto out; /* Terminate access to the data space. */ if(H5Sclose(space_id) < 0) goto out; /* End access to the property list */ if(H5Pclose(plist_id) < 0) goto out; /* Create the table identifier */ table->dset_id = dset_id; if((table->type_id = H5Tcopy(dtype_id)) < 0) goto out; H5PT_create_index(table); table->size = 0; /* Get an ID for this table */ ret_value = H5Iregister(H5PT_ptable_id_type, table); if(ret_value != H5I_INVALID_HID) H5PT_ptable_count++; else H5PT_close(table); return ret_value; out: H5E_BEGIN_TRY H5Sclose(space_id); H5Pclose(plist_id); H5Dclose(dset_id); if(table) free(table); H5E_END_TRY return H5I_INVALID_HID;}
开发者ID:BlackGinger,项目名称:ExocortexCrate,代码行数:79,
示例4: gen_newgrat_file/* * Generate HDF5 file with latest format with * NUM_GRPS groups and NUM_ATTRS attributes for the dataset * */static void gen_newgrat_file(const char *fname){ hid_t fapl; /* File access property */ hid_t fid; /* File id */ hid_t gid; /* Group id */ hid_t tid; /* Datatype id */ hid_t sid; /* Dataspace id */ hid_t attr_id; /* Attribute id */ hid_t did; /* Dataset id */ char name[30]; /* Group name */ char attrname[30]; /* Attribute name */ int i; /* Local index variable */ /* Get a copy file access property list */ if((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) goto error; /* Set to use latest library format */ if(H5Pset_libver_bounds(fapl, H5F_LIBVER_LATEST, H5F_LIBVER_LATEST) < 0) goto error; /* Create dataset */ if((fid = H5Fcreate(NEWGRAT_FILE, H5F_ACC_TRUNC, H5P_DEFAULT, fapl)) < 0) goto error; /* Create NUM_GRPS groups in the root group */ for(i = 1; i <= NUM_GRPS; i++) { sprintf(name, "%s%d", GROUP_NAME,i); if((gid = H5Gcreate2(fid, name, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error; if(H5Gclose(gid) < 0) goto error; } /* end for */ /* Create a datatype to commit and use */ if((tid = H5Tcopy(H5T_NATIVE_INT)) < 0) goto error; /* Create dataspace for dataset */ if((sid = H5Screate(H5S_SCALAR)) < 0) goto error; /* Create dataset */ if((did = H5Dcreate2(fid, DATASET_NAME, tid, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error; /* Create NUM_ATTRS for the dataset */ for(i = 1; i <= NUM_ATTRS; i++) { sprintf(attrname, "%s%d", ATTR_NAME,i); if((attr_id = H5Acreate2(did, attrname, tid, sid, H5P_DEFAULT, H5P_DEFAULT)) < 0) goto error; if(H5Aclose(attr_id) < 0) goto error; } /* end for */ /* Close dataset, dataspace, datatype, file */ if(H5Dclose(did) < 0) goto error; if(H5Sclose(sid) < 0) goto error; if(H5Tclose(tid) < 0) goto error; if(H5Fclose(fid) < 0) goto error;error: H5E_BEGIN_TRY { H5Aclose(attr_id); H5Dclose(did); H5Tclose(tid); H5Sclose(sid); H5Gclose(gid); H5Fclose(fid); } H5E_END_TRY;} /* gen_newgrat_file() */
开发者ID:AndyHuang7601,项目名称:EpicGames-UnrealEngine,代码行数:82,
示例5: test_multi//.........这里部分代码省略......... /* Before any data is written, the raw data file is empty. So * the file size is only the size of b-tree + HADDR_MAX/4. */ if(file_size < HADDR_MAX/4 || file_size > HADDR_MAX/2) TEST_ERROR; if((dset=H5Dcreate2(file, dname, H5T_NATIVE_INT, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; for(i=0; i<MULTI_SIZE; i++) for(j=0; j<MULTI_SIZE; j++) buf[i][j] = i*10000+j; if(H5Dwrite(dset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf) < 0) TEST_ERROR; if((fapl2=H5Pcreate(H5P_FILE_ACCESS)) < 0) TEST_ERROR; if(H5Pset_multi_type(fapl2, H5FD_MEM_SUPER) < 0) TEST_ERROR; if(H5Fget_vfd_handle(file, fapl2, (void **)&fhandle) < 0) TEST_ERROR; if(*fhandle<0) TEST_ERROR; if(H5Pset_multi_type(fapl2, H5FD_MEM_DRAW) < 0) TEST_ERROR; if(H5Fget_vfd_handle(file, fapl2, (void **)&fhandle2) < 0) TEST_ERROR; if(*fhandle2<0) TEST_ERROR; /* Check file size API */ if(H5Fget_filesize(file, &file_size) < 0) TEST_ERROR; /* After the data is written, the file size is huge because the * beginning of raw data file is set at HADDR_MAX/2. It's supposed * to be (HADDR_MAX/2 + 128*128*4) */ if(file_size < HADDR_MAX/2 || file_size > HADDR_MAX) TEST_ERROR; if(H5Sclose(space) < 0) TEST_ERROR; if(H5Dclose(dset) < 0) TEST_ERROR; if(H5Pclose(fapl2) < 0) TEST_ERROR; /* Create and write attribute for the root group. */ if((root = H5Gopen2(file, "/", H5P_DEFAULT)) < 0) FAIL_STACK_ERROR /* Attribute string. */ if((atype = H5Tcopy(H5T_C_S1)) < 0) TEST_ERROR; if(H5Tset_size(atype, strlen(meta) + 1) < 0) TEST_ERROR; if(H5Tset_strpad(atype, H5T_STR_NULLTERM) < 0) TEST_ERROR; /* Create and write attribute */ if((aspace = H5Screate_simple(1, adims, NULL)) < 0) TEST_ERROR; if((attr = H5Acreate2(root, "Metadata", atype, aspace, H5P_DEFAULT, H5P_DEFAULT)) < 0) TEST_ERROR; if(H5Awrite(attr, atype, meta) < 0) TEST_ERROR; /* Close IDs */ if(H5Tclose(atype) < 0) TEST_ERROR; if(H5Sclose(aspace) < 0) TEST_ERROR; if(H5Aclose(attr) < 0) TEST_ERROR; if(H5Fclose(file) < 0) TEST_ERROR; h5_cleanup(FILENAME, fapl); PASSED(); return 0;error: H5E_BEGIN_TRY { H5Sclose(space); H5Dclose(dset); H5Pclose(fapl); H5Pclose(fapl2); H5Fclose(file); } H5E_END_TRY; return -1;}
开发者ID:chaako,项目名称:sceptic3D,代码行数:101,
示例6: add_attrs/* adds different types of attributes to an object. returns the number of attributes added to the objects. */int add_attrs(hid_t oid, int idx) { char name[32]; int i0, i1, i2, j, nattrs=0; hid_t aid, tid, tid1, sid; hvl_t i_vlen[4]; hobj_ref_t ref; zipcode_t cmp_data[4]; unsigned int i = 0xffffffff; long long l = -2147483647; float f = 123456789.987654321; double d = 987654321.123456789; char *s[7] = {"Parting", "is such", "sweeter", "sorrow."}; float f_array[4] = {1.0, 2.22, 3.333, 4.444}; char *s_vlen[4] = {"Parting", "is such", "sweet", "sorrow."}; hsize_t dims1[1]={1}, dims2[1]={4}, dims3[2]={3,5}; int int3d[4][3][5]; size_t offset = 0; for (i0=0; i0<4; i0++) { i_vlen[i0].len = (i0+1); i_vlen[i0].p = (int *)calloc(i_vlen[i0].len, sizeof(int)); for (j=0; j<i_vlen[i0].len; j++) ((int *)i_vlen[i0].p)[j] = i0*100+j; for (i1=0; i1<3; i1++) { for (i2=0; i2<5; i2++) int3d[i0][i1][i2] = i0*i1-i1*i2+i0*i2; } } cmp_data[0].zipcode = 01001; cmp_data[0].city = "Agawam, Massachusetts"; cmp_data[1].zipcode = 99950; cmp_data[1].city = "Ketchikan, Alaska"; cmp_data[2].zipcode = 00501; cmp_data[2].city = "Holtsville, New York"; cmp_data[3].zipcode = 61820; cmp_data[3].city = "Champaign, Illinois"; /* 1 scalar point */ sid = H5Screate (H5S_SCALAR); sprintf(name, "%05d scalar int", idx); nattrs += add_attr(oid, name, H5T_NATIVE_UINT, sid, &i); sprintf(name, "%05d scalar ulong", idx); nattrs += add_attr(oid, name, H5T_NATIVE_INT64, sid, &l); sprintf(name, "%05d scalar str", idx); tid = H5Tcopy (H5T_C_S1); H5Tset_size (tid, H5T_VARIABLE); nattrs += add_attr(oid, name, tid, sid, &s[2]); H5Tclose(tid); H5Sclose(sid); /* 4 single point */ sid = H5Screate_simple (1, dims1, NULL); H5Rcreate(&ref, oid, ".", H5R_OBJECT, -1); sprintf(name, "%05d single float", idx); nattrs += add_attr(oid, name, H5T_NATIVE_FLOAT, sid, &f); sprintf(name, "%05d single double", idx); nattrs += add_attr(oid, name, H5T_NATIVE_DOUBLE, sid, &d); sprintf(name, "%05d single obj_ref", idx); nattrs += add_attr(oid, name, H5T_STD_REF_OBJ, sid, &ref); H5Sclose(sid); /* 7 fixed length 1D array */ sid = H5Screate_simple (1, dims1, NULL); tid = H5Tarray_create (H5T_NATIVE_FLOAT, 1, dims2); sprintf(name, "%05d array float", idx); nattrs += add_attr(oid, name, tid, sid, &f_array[0]); H5Tclose(tid); tid = H5Tcopy (H5T_C_S1); H5Tset_size (tid, strlen(s[0])+1); tid1 = H5Tarray_create (tid, 1, dims2); sprintf(name, "%05d array str", idx); nattrs += add_attr(oid, name, tid1, sid, s); H5Tclose(tid1); H5Tclose(tid); H5Sclose(sid); /* 9 fixed length 2D int arrays */ sid = H5Screate_simple (1, dims2, NULL); tid = H5Tarray_create (H5T_NATIVE_INT, 2, dims3); sprintf(name, "%05d array int 2D", idx); nattrs += add_attr(oid, name, tid, sid, int3d[0][0]); H5Tclose(tid); H5Sclose(sid); /* 10 variable length arrays */ sid = H5Screate_simple (1, dims2, NULL); tid = H5Tcopy (H5T_C_S1); H5Tset_size (tid, H5T_VARIABLE); sprintf(name, "%05d vlen strings", idx); nattrs += add_attr(oid, name, tid, sid, s_vlen); H5Tclose(tid); tid = H5Tvlen_create (H5T_NATIVE_INT);; sprintf(name, "%05d vlen int array", idx);//.........这里部分代码省略.........
开发者ID:FilipeMaia,项目名称:hdf5,代码行数:101,
示例7: read_file//.........这里部分代码省略......... if(All.ICFormat == 1 || All.ICFormat == 2) my_fread(CommBuffer, bytes_per_blockelement, pc, fd);#ifdef HAVE_HDF5 if(All.ICFormat == 3) { get_dataset_name(blocknr, buf); hdf5_dataset = H5Dopen(hdf5_grp[type], buf); dims[0] = header.npart[type]; dims[1] = get_values_per_blockelement(blocknr); if(dims[1] == 1) rank = 1; else rank = 2; hdf5_dataspace_in_file = H5Screate_simple(rank, dims, NULL); dims[0] = pc; hdf5_dataspace_in_memory = H5Screate_simple(rank, dims, NULL); start[0] = pcsum; start[1] = 0; count[0] = pc; count[1] = get_values_per_blockelement(blocknr); pcsum += pc; H5Sselect_hyperslab(hdf5_dataspace_in_file, H5S_SELECT_SET, start, NULL, count, NULL); switch (get_datatype_in_block(blocknr)) { case 0: hdf5_datatype = H5Tcopy(H5T_NATIVE_UINT); break; case 1: hdf5_datatype = H5Tcopy(H5T_NATIVE_FLOAT); break; case 2: hdf5_datatype = H5Tcopy(H5T_NATIVE_UINT64); break; } H5Dread(hdf5_dataset, hdf5_datatype, hdf5_dataspace_in_memory, hdf5_dataspace_in_file, H5P_DEFAULT, CommBuffer); H5Tclose(hdf5_datatype); H5Sclose(hdf5_dataspace_in_memory); H5Sclose(hdf5_dataspace_in_file); H5Dclose(hdf5_dataset); }#endif } if(ThisTask == readTask && task != readTask) MPI_Ssend(CommBuffer, bytes_per_blockelement * pc, MPI_BYTE, task, TAG_PDATA, MPI_COMM_WORLD); if(ThisTask != readTask && task == ThisTask) MPI_Recv(CommBuffer, bytes_per_blockelement * pc, MPI_BYTE, readTask, TAG_PDATA, MPI_COMM_WORLD, &status); if(ThisTask == task) { empty_read_buffer(blocknr, nstart + offset, pc, type);
开发者ID:Christian-Schultz,项目名称:dark_energy,代码行数:66,
示例8: H5Fcreatebool stfio::exportHDF5File(const std::string& fName, const Recording& WData, ProgressInfo& progDlg) { hid_t file_id = H5Fcreate(fName.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT); const int NRECORDS = 1; const int NFIELDS = 3; /* Calculate the size and the offsets of our struct members in memory */ size_t rt_offset[NFIELDS] = { HOFFSET( rt, channels ), HOFFSET( rt, date ), HOFFSET( rt, time )}; /* Define an array of root tables */ rt p_data; p_data.channels = WData.size(); struct tm t = WData.GetDateTime(); std::size_t date_length = snprintf(p_data.date, DATELEN, "%04i-%02i-%02i", t.tm_year+1900, t.tm_mon+1, t.tm_mday); std::size_t time_length = snprintf(p_data.time, TIMELEN, "%02i:%02i:%02i", t.tm_hour, t.tm_min, t.tm_sec); // ensure that an undefine string is set to "/0", and that the terminating /0 is counted in string length p_data.date[date_length++] = 0; p_data.time[time_length++] = 0; /* Define field information */ const char *field_names[NFIELDS] = { "channels", "date", "time" }; hid_t field_type[NFIELDS]; /* Initialize the field field_type */ hid_t string_type1 = H5Tcopy( H5T_C_S1 ); hid_t string_type2 = H5Tcopy( H5T_C_S1 ); H5Tset_size( string_type1, date_length); H5Tset_size( string_type2, time_length); field_type[0] = H5T_NATIVE_INT; field_type[1] = string_type1; field_type[2] = string_type2; std::ostringstream desc; desc << "Description of " << fName; herr_t status = H5TBmake_table( desc.str().c_str(), file_id, "description", (hsize_t)NFIELDS, (hsize_t)NRECORDS, sizeof(rt), field_names, rt_offset, field_type, 10, NULL, 0, &p_data ); if (status < 0) { std::string errorMsg("Exception while writing description in stfio::exportHDF5File"); H5Fclose(file_id); H5close(); throw std::runtime_error(errorMsg); } hid_t comment_group = H5Gcreate2( file_id,"/comment", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); /* File comment. */ std::string description(WData.GetFileDescription()); if (description.length() <= 0) { description = "No description"; } status = H5LTmake_dataset_string(file_id, "/comment/description", description.c_str()); if (status < 0) { std::string errorMsg("Exception while writing description in stfio::exportHDF5File"); H5Fclose(file_id); H5close(); throw std::runtime_error(errorMsg); } std::string comment(WData.GetComment()); if (comment.length() <= 0) { comment = "No comment"; } status = H5LTmake_dataset_string(file_id, "/comment/comment", comment.c_str()); if (status < 0) { std::string errorMsg("Exception while writing comment in stfio::exportHDF5File"); H5Fclose(file_id); H5close(); throw std::runtime_error(errorMsg); } H5Gclose(comment_group); std::vector<std::string> channel_name(WData.size()); hid_t channels_group = H5Gcreate2( file_id,"/channels", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT); for ( std::size_t n_c=0; n_c < WData.size(); ++n_c) { /* Channel descriptions. */ std::ostringstream ossname; ossname << WData[n_c].GetChannelName(); if ( ossname.str() == "" ) { ossname << "ch" << (n_c); } channel_name[n_c] = ossname.str(); hsize_t dimsc[1] = { 1 }; hid_t string_typec = H5Tcopy( H5T_C_S1 ); std::size_t cn_length = channel_name[n_c].length(); if (cn_length <= 0) cn_length = 1; H5Tset_size( string_typec, cn_length ); std::vector<char> datac(channel_name[n_c].length()); std::copy(channel_name[n_c].begin(),channel_name[n_c].end(), datac.begin()); std::ostringstream desc_path; desc_path << "/channels/ch" << (n_c); status = H5LTmake_dataset(file_id, desc_path.str().c_str(), 1, dimsc, string_typec, &datac[0]);//.........这里部分代码省略.........
开发者ID:410pfeliciano,项目名称:stimfit,代码行数:101,
示例9: H5TcopyDataType DataType::copy(hid_t source) { DataType hi_copy = H5Tcopy(source); hi_copy.check("Could not copy type"); return hi_copy;}
开发者ID:Prabhnith,项目名称:nix,代码行数:5,
示例10: nh5init_types_c/****if* H5_f/h5init_types_c * NAME * h5init_types_c * PURPOSE * Initialize predefined datatypes in Fortran * INPUTS * types - array with the predefined Native Fortran * type, its element and length must be the * same as the types array defined in the * H5f90global.f90 * floatingtypes - array with the predefined Floating Fortran * type, its element and length must be the * same as the floatingtypes array defined in the * H5f90global.f90 * integertypes - array with the predefined Integer Fortran * type, its element and length must be the * same as the integertypes array defined in the * H5f90global.f90 * RETURNS * 0 on success, -1 on failure * AUTHOR * Elena Pourmal * Tuesday, August 3, 1999 * SOURCE*/int_fnh5init_types_c( hid_t_f * types, hid_t_f * floatingtypes, hid_t_f * integertypes )/******/{ int ret_value = -1; hid_t c_type_id; size_t tmp_val;/* Fortran INTEGER is may not be the same as C in; do all checking to find an appropriate size*/ if (sizeof(int_f) == sizeof(int)) { if ((types[0] = (hid_t_f)H5Tcopy(H5T_NATIVE_INT)) < 0) return ret_value; } /*end if */ else if (sizeof(int_f) == sizeof(long)) { if ((types[0] = (hid_t_f)H5Tcopy(H5T_NATIVE_LONG)) < 0) return ret_value; } /*end if */ else if (sizeof(int_f) == sizeof(long long)) { if ((types[0] = (hid_t_f)H5Tcopy(H5T_NATIVE_LLONG)) < 0) return ret_value; } /*end else */ /* Find appropriate size to store Fortran REAL */ if(sizeof(real_f)==sizeof(float)) { if ((types[1] = (hid_t_f)H5Tcopy(H5T_NATIVE_FLOAT)) < 0) return ret_value; } /* end if */ else if(sizeof(real_f)==sizeof(double)){ if ((types[1] = (hid_t_f)H5Tcopy(H5T_NATIVE_DOUBLE)) < 0) return ret_value; } /* end if */#if H5_SIZEOF_LONG_DOUBLE!=0 else if (sizeof(real_f) == sizeof(long double)) { if ((types[1] = (hid_t_f)H5Tcopy(H5T_NATIVE_LDOUBLE)) < 0) return ret_value; } /* end else */#endif /* Find appropriate size to store Fortran DOUBLE */ if(sizeof(double_f)==sizeof(double)) { if ((types[2] = (hid_t_f)H5Tcopy(H5T_NATIVE_DOUBLE)) < 0) return ret_value; }/*end if */#if H5_SIZEOF_LONG_DOUBLE!=0 else if(sizeof(double_f)==sizeof(long double)) { if ((types[2] = (hid_t_f)H5Tcopy(H5T_NATIVE_LDOUBLE)) < 0) return ret_value; }/*end else */#endif/* if ((types[3] = H5Tcopy(H5T_NATIVE_UINT8)) < 0) return ret_value;*/ if ((c_type_id = H5Tcopy(H5T_FORTRAN_S1)) < 0) return ret_value; tmp_val = 1; if(H5Tset_size(c_type_id, tmp_val) < 0) return ret_value; if(H5Tset_strpad(c_type_id, H5T_STR_SPACEPAD) < 0) return ret_value; types[3] = (hid_t_f)c_type_id;/* if ((types[3] = H5Tcopy(H5T_C_S1)) < 0) return ret_value; if(H5Tset_strpad(types[3],H5T_STR_NULLTERM) < 0) return ret_value; if(H5Tset_size(types[3],1) < 0) return ret_value;*//* if ((types[3] = H5Tcopy(H5T_STD_I8BE)) < 0) return ret_value;*/ if ((types[4] = (hid_t_f)H5Tcopy(H5T_STD_REF_OBJ)) < 0) return ret_value; if ((types[5] = (hid_t_f)H5Tcopy(H5T_STD_REF_DSETREG)) < 0) return ret_value; /* * FIND H5T_NATIVE_INTEGER_1 */ if (sizeof(int_1_f) == sizeof(char)) { if ((types[6] = (hid_t_f)H5Tcopy(H5T_NATIVE_CHAR)) < 0) return ret_value; } /*end if */ else if (sizeof(int_1_f) == sizeof(short)) { if ((types[6] = (hid_t_f)H5Tcopy(H5T_NATIVE_SHORT)) < 0) return ret_value; } /*end if */ else if (sizeof(int_1_f) == sizeof(int)) {//.........这里部分代码省略.........
开发者ID:AndyHuang7601,项目名称:EpicGames-UnrealEngine,代码行数:101,
示例11: H5Fopenvoid stfio::importHDF5File(const std::string& fName, Recording& ReturnData, ProgressInfo& progDlg) { /* Create a new file using default properties. */ hid_t file_id = H5Fopen(fName.c_str(), H5F_ACC_RDONLY, H5P_DEFAULT); /* H5TBread_table const int NRECORDS = 1;*/ const int NFIELDS = 3; /* Calculate the size and the offsets of our struct members in memory */ size_t rt_offset[NFIELDS] = { HOFFSET( rt, channels ), HOFFSET( rt, date ), HOFFSET( rt, time )}; rt rt_buf[1]; size_t rt_sizes[NFIELDS] = { sizeof( rt_buf[0].channels), sizeof( rt_buf[0].date), sizeof( rt_buf[0].time)}; herr_t status=H5TBread_table( file_id, "description", sizeof(rt), rt_offset, rt_sizes, rt_buf ); if (status < 0) { std::string errorMsg("Exception while reading description in stfio::importHDF5File"); throw std::runtime_error(errorMsg); } int numberChannels =rt_buf[0].channels; if ( ReturnData.SetDate(rt_buf[0].date) || ReturnData.SetTime(rt_buf[0].time) ) { std::cout << "Warning HDF5: could not decode date/time " << rt_buf[0].date << " " << rt_buf[0].time << std::endl; } /* Create the data space for the dataset. */ hsize_t dims; H5T_class_t class_id; size_t type_size; std::string description, comment; hid_t group_id = H5Gopen2(file_id, "/comment", H5P_DEFAULT); status = H5Lexists(group_id, "/comment/description", 0); if (status==1) { status = H5LTget_dataset_info( file_id, "/comment/description", &dims, &class_id, &type_size ); if (status >= 0) { description.resize( type_size ); status = H5LTread_dataset_string (file_id, "/comment/description", &description[0]); if (status < 0) { std::string errorMsg("Exception while reading description in stfio::importHDF5File"); throw std::runtime_error(errorMsg); } } } ReturnData.SetFileDescription(description); status = H5Lexists(group_id, "/comment/comment", 0); if (status==1) { status = H5LTget_dataset_info( file_id, "/comment/comment", &dims, &class_id, &type_size ); if (status >= 0) { comment.resize( type_size ); status = H5LTread_dataset_string (file_id, "/comment/comment", &comment[0]); if (status < 0) { std::string errorMsg("Exception while reading comment in stfio::importHDF5File"); throw std::runtime_error(errorMsg); } } } ReturnData.SetComment(comment); double dt = 1.0; std::string yunits = ""; for (int n_c=0;n_c<numberChannels;++n_c) { /* Calculate the size and the offsets of our struct members in memory */ size_t ct_offset[NFIELDS] = { HOFFSET( ct, n_sections ) }; ct ct_buf[1]; size_t ct_sizes[NFIELDS] = { sizeof( ct_buf[0].n_sections) }; /* Read channel name */ hsize_t cdims; H5T_class_t cclass_id; size_t ctype_size; std::ostringstream desc_path; desc_path << "/channels/ch" << (n_c); status = H5LTget_dataset_info( file_id, desc_path.str().c_str(), &cdims, &cclass_id, &ctype_size ); if (status < 0) { std::string errorMsg("Exception while reading channel in stfio::importHDF5File"); throw std::runtime_error(errorMsg); } hid_t string_typec= H5Tcopy( H5T_C_S1 ); H5Tset_size( string_typec, ctype_size ); std::vector<char> szchannel_name(ctype_size); // szchannel_name.reset( new char[ctype_size] ); status = H5LTread_dataset(file_id, desc_path.str().c_str(), string_typec, &szchannel_name[0] ); if (status < 0) { std::string errorMsg("Exception while reading channel name in stfio::importHDF5File"); throw std::runtime_error(errorMsg); } std::ostringstream channel_name; for (std::size_t c=0; c<ctype_size; ++c) { channel_name << szchannel_name[c]; } std::ostringstream channel_path; channel_path << "/" << channel_name.str();//.........这里部分代码省略.........
开发者ID:410pfeliciano,项目名称:stimfit,代码行数:101,
示例12: mainint main(int argc, char *argv[]){ if(argc!=8) usage(argv[0]); char twop_type[Ntype][256]; strcpy(twop_type[0],"pseudoscalar"); strcpy(twop_type[1],"scalar"); strcpy(twop_type[2],"g5g1"); strcpy(twop_type[3],"g5g2"); strcpy(twop_type[4],"g5g3"); strcpy(twop_type[5],"g5g4"); strcpy(twop_type[6],"g1"); strcpy(twop_type[7],"g2"); strcpy(twop_type[8],"g3"); strcpy(twop_type[9],"g4"); char *h5_file, *out_file, *twop, *conf, *src_pos; asprintf(&h5_file ,"%s",argv[1]); asprintf(&out_file,"%s",argv[2]); asprintf(&twop ,"%s",argv[3]); asprintf(&conf ,"%s",argv[4]); asprintf(&src_pos ,"%s",argv[5]); int T = atoi(argv[6]); int xtype = atoi(argv[7]); bool twopOK = false; int dt; for(int i=0;(i<Ntype && !twopOK);i++) if(strcmp(twop,twop_type[i])==0){ twopOK = true; dt = i; } if(!twopOK){ printf("Error: Twop must be one of:/n"); for(int i=0;i<Ntype;i++) printf(" %s/n",twop_type[i]); exit(-1); } printf("Got the following input:/n"); printf("h5_file: %s/n",h5_file); printf("out_file: %s/n",out_file); printf("twop: %d - %s/n",dt,twop); printf("conf traj: %s/n",conf); printf("src pos: %s/n",src_pos); printf("T = %02d/n",T); //----------------------------------------- //-Open the h5 file hid_t file_id = H5Fopen(h5_file, H5F_ACC_RDONLY, H5P_DEFAULT); //-Get the momenta-related attributes hid_t Qattr = H5Aopen (file_id, QSQ_STR , H5P_DEFAULT); hid_t Mattr = H5Aopen (file_id, NMOM_STR, H5P_DEFAULT); hid_t Qattr_type = H5Aget_type(Qattr); hid_t Mattr_type = H5Aget_type(Mattr); size_t Qattr_dim = H5Tget_size(Qattr_type); size_t Mattr_dim = H5Tget_size(Mattr_type); hid_t type_id = H5Tcopy(H5T_C_S1); herr_t Qstatus = H5Tset_size (type_id, Qattr_dim); herr_t Mstatus = H5Tset_size (type_id, Mattr_dim); char *cQsq = (char*) malloc(Qattr_dim*sizeof(char)); char *cNmoms = (char*) malloc(Mattr_dim*sizeof(char)); Qstatus = H5Aread (Qattr, type_id, &(cQsq[0])); Mstatus = H5Aread (Mattr, type_id, &(cNmoms[0])); if (Mstatus<0 || Qstatus<0){ fprintf (stderr, "Momenta attributes read failed!/n"); Qstatus = H5Aclose(Qattr); Mstatus = H5Aclose(Mattr); Qstatus = H5Tclose(Qattr_type); Mstatus = H5Tclose(Mattr_type); Mstatus = H5Fclose(file_id); exit(-1); } int Qsq = atoi(cQsq); int Nmoms = atoi(cNmoms); printf("Momenta attributes: Qsq = %d , Nmoms = %d/n",Qsq,Nmoms); Qstatus = H5Aclose(Qattr); Mstatus = H5Aclose(Mattr); Qstatus = H5Tclose(Qattr_type); Mstatus = H5Tclose(Mattr_type); //------------------------------------------------------------------ //-Open the momenta dataset from the file and read the momenta int *moms = (int*) malloc(Nmoms*3*sizeof(int)); hid_t Mdset_id = H5Dopen(file_id, MOM_DSET, H5P_DEFAULT); Mstatus = H5Dread(Mdset_id, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, moms); if (Mstatus<0){//.........这里部分代码省略.........
开发者ID:ckallidonis,项目名称:file_util,代码行数:101,
示例13: mainint main( void ){ typedef struct Particle { char name[16]; int lati; int longi; float pressure; double temperature; } Particle; Particle dst_buf[ NRECORDS + NRECORDS_INS ]; /* Calculate the size and the offsets of our struct members in memory */ size_t dst_size = sizeof( Particle ); size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ), HOFFSET( Particle, lati ), HOFFSET( Particle, longi ), HOFFSET( Particle, pressure ), HOFFSET( Particle, temperature )}; size_t dst_sizes[NFIELDS] = { sizeof( dst_buf[0].name), sizeof( dst_buf[0].lati), sizeof( dst_buf[0].longi), sizeof( dst_buf[0].pressure), sizeof( dst_buf[0].temperature)}; /* Define an array of Particles */ Particle p_data[NRECORDS] = { {"zero",0,0, 0.0f, 0.0}, {"one",10,10, 1.0f, 10.0}, {"two", 20,20, 2.0f, 20.0}, {"three",30,30, 3.0f, 30.0}, {"four", 40,40, 4.0f, 40.0}, {"five", 50,50, 5.0f, 50.0}, {"six", 60,60, 6.0f, 60.0}, {"seven",70,70, 7.0f, 70.0} }; /* Define field information */ const char *field_names[NFIELDS] = { "Name","Latitude", "Longitude", "Pressure", "Temperature" }; hid_t field_type[NFIELDS]; hid_t string_type; hid_t file_id; hsize_t chunk_size = 10; int compress = 0; Particle fill_data[1] = { {"no data",-1,-1, -99.0f, -99.0} }; /* Fill value particle */ hsize_t start1; /* Record to start reading from 1st table */ hsize_t nrecords; /* Number of records to insert */ hsize_t start2; /* Record to start writing in 2nd table */ herr_t status; int i; hsize_t nfields_out; hsize_t nrecords_out; /* Initialize the field field_type */ string_type = H5Tcopy( H5T_C_S1 ); H5Tset_size( string_type, 16 ); field_type[0] = string_type; field_type[1] = H5T_NATIVE_INT; field_type[2] = H5T_NATIVE_INT; field_type[3] = H5T_NATIVE_FLOAT; field_type[4] = H5T_NATIVE_DOUBLE; /* Create a new file using default properties. */ file_id = H5Fcreate( "ex_table_09.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT ); /* Make 2 tables: TABLE2_NAME is empty */ status=H5TBmake_table( "Table Title",file_id,TABLE1_NAME,NFIELDS,NRECORDS, dst_size,field_names, dst_offset, field_type, chunk_size, fill_data, compress, p_data ); status=H5TBmake_table( "Table Title",file_id,TABLE2_NAME,NFIELDS,NRECORDS, dst_size,field_names, dst_offset, field_type, chunk_size, fill_data, compress, NULL ); /* Add 2 records from TABLE1_NAME to TABLE2_NAME */ start1 = 3; nrecords = NRECORDS_INS; start2 = 6; status=H5TBadd_records_from( file_id, TABLE1_NAME, start1, nrecords, TABLE2_NAME, start2 ); /* read TABLE2_NAME: it should have 2 more records now */ status=H5TBread_table( file_id, TABLE2_NAME, dst_size, dst_offset, dst_sizes, dst_buf ); /* Get table info */ status=H5TBget_table_info (file_id,TABLE2_NAME, &nfields_out, &nrecords_out ); /* print */ printf ("Table has %d fields and %d records/n",(int)nfields_out,(int)nrecords_out); /* print it by rows */ for (i=0; i<nrecords_out; i++) { printf ("%-5s %-5d %-5d %-5f %-5f", dst_buf[i].name, dst_buf[i].lati, dst_buf[i].longi, dst_buf[i].pressure,//.........这里部分代码省略.........
开发者ID:einon,项目名称:affymetrix-power-tools,代码行数:101,
示例14: mainintmain(int argc, char **argv){ printf("/n*** Testing HDF5/NetCDF-4 interoperability.../n"); printf("*** testing HDF5 compatibility..."); {#define GRPA_NAME "grpa"#define VAR_NAME "vara"#define NDIMS 2 int nrowCur = 7; /* current size */ int ncolCur = 3; int nrowMax = nrowCur + 0; /* maximum size */ int ncolMax = ncolCur + 0; hid_t xdimId; hid_t ydimId; hsize_t xscaleDims[1]; hsize_t yscaleDims[1]; hid_t xdimSpaceId, spaceId; hid_t fileId; hid_t fapl; hsize_t curDims[2]; hsize_t maxDims[2]; hid_t dataTypeId, dsPropertyId, grpaId, grpaPropId, dsId; hid_t ydimSpaceId; const char * dimNameBase = "This is a netCDF dimension but not a netCDF variable."; char dimNameBuf[1000]; char *varaName = "/grpa/vara"; short amat[nrowCur][ncolCur]; int ii, jj; xscaleDims[0] = nrowCur; yscaleDims[0] = ncolCur; if ((xdimSpaceId = H5Screate_simple(1, xscaleDims, NULL)) < 0) ERR; /* With the SEMI close degree, the HDF5 file close will fail if * anything is left open. */ if ((fapl = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR; if (H5Pset_fclose_degree(fapl, H5F_CLOSE_SEMI)) ERR; /* Create file */ if((fileId = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, H5Pcreate(H5P_FILE_CREATE), fapl)) < 0) ERR; if (H5Pclose(fapl) < 0) ERR; /* Create data space */ curDims[0] = nrowCur; curDims[1] = ncolCur; maxDims[0] = nrowMax; maxDims[1] = ncolMax; if ((spaceId = H5Screate_simple(2, curDims, maxDims)) < 0) ERR; if ((dataTypeId = H5Tcopy(H5T_NATIVE_SHORT)) < 0) ERR; if ((dsPropertyId = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR; if ((grpaPropId = H5Pcreate(H5P_GROUP_CREATE)) < 0) ERR; if ((grpaId = H5Gcreate2(fileId, GRPA_NAME, H5P_DEFAULT, grpaPropId, H5P_DEFAULT)) < 0) ERR; if (H5Pclose(grpaPropId) < 0) ERR; /* Create vara dataset */ if ((dsId = H5Dcreate2(fileId, varaName, dataTypeId, spaceId, H5P_DEFAULT, dsPropertyId, H5P_DEFAULT)) < 0) ERR; if (H5Pclose(dsPropertyId) < 0) ERR; if (H5Tclose(dataTypeId) < 0) ERR; if ((ydimSpaceId = H5Screate_simple(1, yscaleDims, NULL)) < 0) ERR; /* Create xdim dimension dataset */ if ((xdimId = H5Dcreate2(fileId, "/xdim", H5T_IEEE_F32BE, xdimSpaceId, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if (H5Sclose(xdimSpaceId) < 0) ERR; /* Create ydim dimension dataset */ if ((ydimId = H5Dcreate2(fileId, "/ydim", H5T_IEEE_F32BE, ydimSpaceId, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR; if (H5Sclose(ydimSpaceId) < 0) ERR; /* Create xdim scale */ sprintf(dimNameBuf, "%s%10d", dimNameBase, nrowCur); if (H5DSset_scale(xdimId, dimNameBuf) < 0) ERR; /* Create ydim scale */ sprintf(dimNameBuf, "%s%10d", dimNameBase, ncolCur); if (H5DSset_scale(ydimId, dimNameBuf) < 0) ERR; /* Attach dimension scales to the dataset */ if (H5DSattach_scale(dsId, xdimId, 0) < 0) ERR; if (H5DSattach_scale(dsId, ydimId, 1) < 0) ERR; /* Close stuff. */ if (H5Dclose(xdimId) < 0) ERR; if (H5Dclose(ydimId) < 0) ERR;//.........这里部分代码省略.........
开发者ID:Unidata,项目名称:netcdf-c,代码行数:101,
示例15: hdf5readint hdf5read(char *name, struct descriptor_xd *xd){ hid_t obj,type; H5G_stat_t statbuf; int item_type; int idx = 0; int status = FindItem(name, &obj, &item_type); if (status & 1) { if (item_type == H5G_DATASET) { int size; char dtype; int htype = 42; int is_signed; hsize_t ds_dims[64]; hid_t space = H5Dget_space(obj); int n_ds_dims = H5Sget_simple_extent_dims(space,ds_dims,0); size_t precision; H5Sclose(space); type = H5Dget_type(obj); switch (H5Tget_class(type)) { case H5T_COMPOUND: { printf("Compound data is not supported, skipping/n"); break; } case H5T_INTEGER: precision = H5Tget_precision(type); is_signed = (H5Tget_sign(type) != H5T_SGN_NONE); size = precision/8; switch (precision) { case 8: dtype = is_signed ? DTYPE_B : DTYPE_BU; htype = is_signed ? H5T_NATIVE_CHAR : H5T_NATIVE_UCHAR; break; case 16: dtype = is_signed ? DTYPE_W : DTYPE_WU; htype = is_signed ? H5T_NATIVE_SHORT : H5T_NATIVE_USHORT; break; case 32: dtype = is_signed ? DTYPE_L : DTYPE_LU; htype = is_signed ? H5T_NATIVE_INT : H5T_NATIVE_UINT; break; case 64: dtype = is_signed ? DTYPE_Q : DTYPE_QU; htype = is_signed ? H5T_NATIVE_LLONG : H5T_NATIVE_ULLONG; break; default: dtype = 0; break; } PutData(obj, dtype, htype, size, n_ds_dims, ds_dims,0,xd); break; case H5T_FLOAT: precision = H5Tget_precision(type); size = precision/8; switch (precision) { case 32: dtype = DTYPE_NATIVE_FLOAT; htype = H5T_NATIVE_FLOAT; break; case 64: dtype = DTYPE_NATIVE_DOUBLE; htype = H5T_NATIVE_DOUBLE; break; default: dtype = 0; break; } PutData(obj, dtype, htype, size, n_ds_dims, ds_dims,0,xd); break; case H5T_TIME: printf("dataset is time ---- UNSUPPORTED/n"); break; case H5T_STRING: { int slen = H5Tget_size(type); hid_t st_id; if (slen < 0) { printf("Badly formed string attribute/n"); return; }#if H5_VERS_MAJOR>=1&&H5_VERS_MINOR>=6&&H5_VERS_RELEASE>=1 if(H5Tis_variable_str(type)) { st_id = H5Tcopy (H5T_C_S1); H5Tset_size(st_id, H5T_VARIABLE); } else {#endif st_id = H5Tcopy (type); H5Tset_cset(st_id, H5T_CSET_ASCII);#if H5_VERS_MAJOR>=1&&H5_VERS_MINOR>=6&&H5_VERS_RELEASE>=1 } #endif if (H5Tget_size(st_id) > slen) { slen = H5Tget_size(st_id); } H5Tset_size (st_id, slen); PutData(obj, DTYPE_T, st_id, slen, n_ds_dims, ds_dims, 0, xd); //.........这里部分代码省略.........
开发者ID:dgarnier,项目名称:MDSplus-forked,代码行数:101,
示例16: size/***************************************************************************** This function generates attributes, groups, and datasets of many types. Parameters: fname: file_name. ngrps: number of top level groups. ndsets: number of datasets. attrs: number of attributes. nrow: number of rows in a dataset. chunk: chunk size (single number). vlen: max vlen size. comp: use latest format. latest: use gzip comnpression. Return: Non-negative on success/Negative on failure Programmer: Peter Cao <[email C++ H5Tequal函数代码示例 C++ H5Tclose函数代码示例
|