您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ H5Tclose函数代码示例

51自学网 2021-06-01 21:20:27
  C++
这篇教程C++ H5Tclose函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中H5Tclose函数的典型用法代码示例。如果您正苦于以下问题:C++ H5Tclose函数的具体用法?C++ H5Tclose怎么用?C++ H5Tclose使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了H5Tclose函数的30个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: H5Dopen2

void pyne::Material::_load_comp_protocol1(hid_t db, std::string datapath, int row) {  std::string nucpath;  hid_t data_set = H5Dopen2(db, datapath.c_str(), H5P_DEFAULT);  hsize_t data_offset[1] = {row};  if (row < 0) {    // Handle negative row indices    hid_t data_space = H5Dget_space(data_set);    hsize_t data_dims[1];    H5Sget_simple_extent_dims(data_space, data_dims, NULL);    data_offset[0] += data_dims[0];  };  // Grab the nucpath  hid_t nuc_attr = H5Aopen(data_set, "nucpath", H5P_DEFAULT);  H5A_info_t nuc_info;  H5Aget_info(nuc_attr, &nuc_info);  hsize_t nuc_attr_len = nuc_info.data_size;  hid_t str_attr = H5Tcopy(H5T_C_S1);  H5Tset_size(str_attr, nuc_attr_len);  char * nucpathbuf = new char [nuc_attr_len];  H5Aread(nuc_attr, str_attr, nucpathbuf);  nucpath = std::string(nucpathbuf, nuc_attr_len);  delete[] nucpathbuf;  // Grab the nuclides  std::vector<int> nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5T_NATIVE_INT);  int nuc_size = nuclides.size();  hsize_t nuc_dims[1] = {nuc_size};  // Get the data hyperslab  hid_t data_hyperslab = H5Dget_space(data_set);  hsize_t data_count[1] = {1};  H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);  // Get memory space for writing  hid_t mem_space = H5Screate_simple(1, data_count, NULL);  // Get material type  size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*nuc_size;  hid_t desc = H5Tcreate(H5T_COMPOUND, material_struct_size);  hid_t comp_values_array_type = H5Tarray_create2(H5T_NATIVE_DOUBLE, 1, nuc_dims);  // make the data table type  H5Tinsert(desc, "mass", HOFFSET(pyne::material_struct, mass), H5T_NATIVE_DOUBLE);  H5Tinsert(desc, "density", HOFFSET(pyne::material_struct, density),             H5T_NATIVE_DOUBLE);  H5Tinsert(desc, "atoms_per_molecule", HOFFSET(pyne::material_struct, atoms_per_mol),             H5T_NATIVE_DOUBLE);  H5Tinsert(desc, "comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type);  // make the data array, have to over-allocate  material_struct * mat_data = new material_struct [material_struct_size];  // Finally, get data and put in on this instance  H5Dread(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data);  mass = (*mat_data).mass;  density = (*mat_data).density;  atoms_per_molecule = (*mat_data).atoms_per_mol;  for (int i = 0; i < nuc_size; i++)    comp[nuclides[i]] = (double) (*mat_data).comp[i];  delete[] mat_data;  H5Tclose(str_attr);  //  // Get metadata from associated dataset, if available  //  std::string attrpath = datapath + "_metadata";  bool attrpath_exists = h5wrap::path_exists(db, attrpath);  if (!attrpath_exists)    return;  hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace;  int attrrank;   hvl_t attrdata [1];  attrtype = H5Tvlen_create(H5T_NATIVE_CHAR);  // Get the metadata from the file  metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT);  metadatalab = H5Dget_space(metadataet);  H5Sselect_hyperslab(metadatalab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);  attrmemspace = H5Screate_simple(1, data_count, NULL);  H5Dread(metadataet, attrtype, attrmemspace, metadatalab, H5P_DEFAULT, attrdata);  // convert to in-memory JSON  Json::Reader reader;  reader.parse((char *) attrdata[0].p, (char *) attrdata[0].p+attrdata[0].len, metadata, false);    // close attr data objects  H5Fflush(db, H5F_SCOPE_GLOBAL);  H5Dclose(metadataet);  H5Sclose(metadatapace);  H5Tclose(attrtype);  // Close out the HDF5 file  H5Fclose(db);};
开发者ID:crbates,项目名称:pyne,代码行数:100,


示例2: e5_merge_flash_scalars

//.........这里部分代码省略.........        f5_memspace = H5Screate_simple(1, &dimens_1d, NULL);        hstatus = H5Dread(f5_dataset, f5_real_list_type, f5_memspace, f5_dataspace, H5P_DEFAULT, f5_real_list);        if (hstatus < 0)        {            estatus = E5_READ_FAILURE;            e5_error(f5_file_id, estatus, "Failed to read '%s' from F5 data file/n", f5_list_name);            break;        }        f5_data = f5_real_list;        f5_type = f5_real_list_type;        if(f5_data)        {            // Add the scalar list as attributes for the top-level Emissivity group            e5_scalar_bytes = (dimens_1d + 1) * sizeof(e5_mutable_attr_double);            e5_mutable_attr_double* e5_double_scalars = (e5_mutable_attr_double*) e5_malloc(e5_scalar_bytes);            memset(e5_double_scalars, 0, e5_scalar_bytes);            for(s = 0; s < dimens_1d; s++)            {                e5_double_scalars[s].value = f5_real_list[s].value;                e5_double_scalars[s].name = f5_real_list[s].name;                e5_double_scalars[s].name = e5_trim(f5_real_list[s].name, E5_TRUE, E5_TRUE, F5_MAX_STRING_LENGTH);            }            e5_write_attr_list_double(e5_em_group_id, (e5_attr_double*)e5_double_scalars);            e5_free(e5_double_scalars);        }        break;    }    case F5_SCALAR_LIST_STRING:    {        hid_t f5_str_list_type;        f5_str_list_t *f5_str_list;        f5_scalar_bytes = dimens_1d * sizeof(f5_str_list_t);        f5_str_list = (f5_str_list_t *) e5_malloc(f5_scalar_bytes);        if(!f5_str_list)        {            estatus = E5_OUT_OF_MEMORY;            e5_error(f5_file_id, estatus, "Failed to allocate memory for reading '%s' from F5 data file/n", f5_list_name);            break;        }        memset(f5_str_list, 0, f5_scalar_bytes);        f5_str_list_type = H5Tcreate(H5T_COMPOUND, sizeof(f5_str_list_t));        H5Tinsert(f5_str_list_type, "name",  HOFFSET(f5_str_list_t, name), string_type);        H5Tinsert(f5_str_list_type, "value", HOFFSET(f5_str_list_t, value), string_type);        f5_memspace = H5Screate_simple(1, &dimens_1d, NULL);        hstatus = H5Dread(f5_dataset, f5_str_list_type, f5_memspace, f5_dataspace, H5P_DEFAULT, f5_str_list);        if (hstatus < 0)        {            estatus = E5_READ_FAILURE;            e5_error(f5_file_id, estatus, "Failed to read '%s' from F5 data file/n", f5_list_name);            break;        }        f5_data = f5_str_list;        f5_type = f5_str_list_type;        if(f5_data)        {            // Add the scalar list as attributes for the top-level Emissivity group            e5_scalar_bytes = (dimens_1d + 1) * sizeof(e5_mutable_attr_str);            e5_mutable_attr_str* e5_str_scalars = (e5_mutable_attr_str*) e5_malloc(e5_scalar_bytes);            memset(e5_str_scalars, 0, e5_scalar_bytes);            for(s = 0; s < dimens_1d; s++)            {                char* trimmed = e5_trim(f5_str_list[s].value, E5_TRUE, E5_TRUE, E5_MAX_ATTR_STRING_LENGTH);                strncpy( e5_str_scalars[s].value, trimmed, E5_MAX_ATTR_STRING_LENGTH);                e5_str_scalars[s].name = e5_trim(f5_str_list[s].name, E5_TRUE, E5_TRUE, F5_MAX_STRING_LENGTH);            }            e5_write_attr_list_str(e5_em_group_id, (e5_attr_str*)e5_str_scalars);            e5_free(e5_str_scalars);        }        break;    }    default:    {        estatus = E5_INVALID_DATASET;        e5_error(f5_file_id, estatus, "Unknown scalar list requested '%s' from F5 data file/n", f5_list_name);        break;    }    };    H5Tclose(f5_type);    H5Sclose(f5_memspace);    H5Sclose(f5_dataspace);    H5Dclose(f5_dataset);    e5_free(f5_data);    e5_close_group(e5_em_group_id);    return estatus;}
开发者ID:voidcycles,项目名称:void,代码行数:101,


示例3: main

intmain(){   printf("/n*** Checking HDF5 attribute functions some more./n");   printf("*** Creating tst_xplatform2_3.nc with HDF only...");   {      hid_t fapl_id, fcpl_id;      size_t chunk_cache_size = MY_CHUNK_CACHE_SIZE;      size_t chunk_cache_nelems = CHUNK_CACHE_NELEMS;      float chunk_cache_preemption = CHUNK_CACHE_PREEMPTION;      hid_t fileid, grpid, attid, spaceid;      hid_t s1_typeid, vlen_typeid, s3_typeid;      hid_t file_typeid1[NUM_OBJ], native_typeid1[NUM_OBJ];      hid_t file_typeid2, native_typeid2;      hsize_t num_obj;      H5O_info_t obj_info;      char obj_name[STR_LEN + 1];      hsize_t dims[1] = {ATT_LEN}; /* netcdf attributes always 1-D. */      struct s1      {	 float x;	 double y;      };      struct s3      {	 hvl_t data[NUM_VL];      };      /* cvc stands for "Compound with Vlen of Compound." */      struct s3 cvc_out[ATT_LEN];      int i, j, k;      /* Create some output data: a struct s3 array (length ATT_LEN)       * which holds an array of vlen (length NUM_VL) of struct s1. */      for (i = 0; i < ATT_LEN; i++)	 for (j = 0; j < NUM_VL; j++)	 {	    cvc_out[i].data[j].len = i + 1; 	    if (!(cvc_out[i].data[j].p = calloc(sizeof(struct s1), cvc_out[i].data[j].len))) ERR;	    for (k = 0; k < cvc_out[i].data[j].len; k++)	    {	       ((struct s1 *)cvc_out[i].data[j].p)[k].x = 42.42;	       ((struct s1 *)cvc_out[i].data[j].p)[k].y = 2.0;	    }	 }      /* Create the HDF5 file, with cache control, creation order, and       * all the timmings. */      if ((fapl_id = H5Pcreate(H5P_FILE_ACCESS)) < 0) ERR;      if (H5Pset_fclose_degree(fapl_id, H5F_CLOSE_STRONG)) ERR;      if (H5Pset_cache(fapl_id, 0, chunk_cache_nelems, chunk_cache_size, 		       chunk_cache_preemption) < 0) ERR;      if (H5Pset_libver_bounds(fapl_id, H5F_LIBVER_LATEST, 			       H5F_LIBVER_LATEST) < 0) ERR;      if ((fcpl_id = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR;      if (H5Pset_link_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | 					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;      if (H5Pset_attr_creation_order(fcpl_id, (H5P_CRT_ORDER_TRACKED | 					       H5P_CRT_ORDER_INDEXED)) < 0) ERR;      if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcpl_id, fapl_id)) < 0) ERR;      if (H5Pclose(fapl_id) < 0) ERR;      if (H5Pclose(fcpl_id) < 0) ERR;      /* Open the root group. */      if ((grpid = H5Gopen2(fileid, "/", H5P_DEFAULT)) < 0) ERR;      /* Create the compound type for struct s1. */      if ((s1_typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s1))) < 0) ERR;      if (H5Tinsert(s1_typeid, X_NAME, offsetof(struct s1, x), 		    H5T_NATIVE_FLOAT) < 0) ERR;      if (H5Tinsert(s1_typeid, Y_NAME, offsetof(struct s1, y), 		    H5T_NATIVE_DOUBLE) < 0) ERR;      if (H5Tcommit(grpid, S1_TYPE_NAME, s1_typeid) < 0) ERR;      /* Create a vlen type. Its a vlen of struct s1. */      if ((vlen_typeid = H5Tvlen_create(s1_typeid)) < 0) ERR;      if (H5Tcommit(grpid, VLEN_TYPE_NAME, vlen_typeid) < 0) ERR;      /* Create the struct s3 type, which contains the vlen. */      if ((s3_typeid = H5Tcreate(H5T_COMPOUND, sizeof(struct s3))) < 0) ERR;      if (H5Tinsert(s3_typeid, VL_NAME, offsetof(struct s3, data), 		    vlen_typeid) < 0) ERR;      if (H5Tcommit(grpid, S3_TYPE_NAME, s3_typeid) < 0) ERR;      /* Create an attribute of this new type. */      if ((spaceid = H5Screate_simple(1, dims, NULL)) < 0) ERR;      if ((attid = H5Acreate(grpid, S3_ATT_NAME, s3_typeid, spaceid, 			     H5P_DEFAULT)) < 0) ERR;      if (H5Awrite(attid, s3_typeid, cvc_out) < 0) ERR;      /* Close the types. */      if (H5Tclose(s1_typeid) < 0 ||	  H5Tclose(vlen_typeid) < 0 ||	  H5Tclose(s3_typeid) < 0) ERR;      /* Close the att. */      if (H5Aclose(attid) < 0) ERR;            /* Close the space. */      if (H5Sclose(spaceid) < 0) ERR;//.........这里部分代码省略.........
开发者ID:dschwen,项目名称:libmesh,代码行数:101,


示例4: test_objnames

/* * test_objnames * Tests that UTF-8 can be used for object names in the file. * Tests groups, datasets, named datatypes, and soft links. * Note that this test doesn't actually mark the names as being * in UTF-8.  At the time this test was written, that feature * didn't exist in HDF5, and when the character encoding property * was added to links it didn't change how they were stored in the file, * -JML 2/2/2006 */void test_objnames(hid_t fid, const char* string){  hid_t grp_id, grp1_id, grp2_id, grp3_id;  hid_t type_id, dset_id, space_id;  char read_buf[MAX_STRING_LENGTH];  char path_buf[MAX_PATH_LENGTH];  hsize_t dims=1;  hobj_ref_t obj_ref;  herr_t ret;  /* Create a group with a UTF-8 name */  grp_id = H5Gcreate2(fid, string, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(grp_id, FAIL, "H5Gcreate2");  /* Set a comment on the group to test that we can access the group   * Also test that UTF-8 comments can be read.   */  ret = H5Oset_comment_by_name(fid, string, string, H5P_DEFAULT);  CHECK(ret, FAIL, "H5Oset_comment_by_name");  ret = H5Oget_comment_by_name(fid, string, read_buf, (size_t)MAX_STRING_LENGTH, H5P_DEFAULT);  CHECK(ret, FAIL, "H5Oget_comment_by_name");  ret = H5Gclose(grp_id);  CHECK(ret, FAIL, "H5Gclose");  VERIFY(HDstrcmp(string, read_buf), 0, "strcmp");  /* Create a new dataset with a UTF-8 name */  grp1_id = H5Gcreate2(fid, GROUP1_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(grp1_id, FAIL, "H5Gcreate2");  space_id = H5Screate_simple(RANK, &dims, NULL);  CHECK(space_id, FAIL, "H5Screate_simple");  dset_id = H5Dcreate2(grp1_id, string, H5T_NATIVE_INT, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(dset_id, FAIL, "H5Dcreate2");  /* Make sure that dataset can be opened again */  ret = H5Dclose(dset_id);  CHECK(ret, FAIL, "H5Dclose");  ret = H5Sclose(space_id);  CHECK(ret, FAIL, "H5Sclose");  dset_id = H5Dopen2(grp1_id, string, H5P_DEFAULT);  CHECK(ret, FAIL, "H5Dopen2");  ret = H5Dclose(dset_id);  CHECK(ret, FAIL, "H5Dclose");  ret = H5Gclose(grp1_id);  CHECK(ret, FAIL, "H5Gclose");  /* Do the same for a named datatype */  grp2_id = H5Gcreate2(fid, GROUP2_NAME, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(grp2_id, FAIL, "H5Gcreate2");  type_id = H5Tcreate(H5T_OPAQUE, (size_t)1);  CHECK(type_id, FAIL, "H5Tcreate");  ret = H5Tcommit2(grp2_id, string, type_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(type_id, FAIL, "H5Tcommit2");  ret = H5Tclose(type_id);  CHECK(type_id, FAIL, "H5Tclose");  type_id = H5Topen2(grp2_id, string, H5P_DEFAULT);  CHECK(type_id, FAIL, "H5Topen2");  ret = H5Tclose(type_id);  CHECK(type_id, FAIL, "H5Tclose");  /* Don't close the group -- use it to test that object references   * can refer to objects named in UTF-8 */  space_id = H5Screate_simple(RANK, &dims, NULL);  CHECK(space_id, FAIL, "H5Screate_simple");  dset_id = H5Dcreate2(grp2_id, DSET3_NAME, H5T_STD_REF_OBJ, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(ret, FAIL, "H5Dcreate2");  /* Create reference to named datatype */  ret = H5Rcreate(&obj_ref, grp2_id, string, H5R_OBJECT, (hid_t)-1);  CHECK(ret, FAIL, "H5Rcreate");  /* Write selection and read it back*/  ret = H5Dwrite(dset_id, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, &obj_ref);  CHECK(ret, FAIL, "H5Dwrite");  ret = H5Dread(dset_id, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, &obj_ref);  CHECK(ret, FAIL, "H5Dread");  /* Ensure that we can open named datatype using object reference */  type_id = H5Rdereference2(dset_id, H5P_DEFAULT, H5R_OBJECT, &obj_ref);  CHECK(type_id, FAIL, "H5Rdereference2");  ret = H5Tcommitted(type_id);  VERIFY(ret, 1, "H5Tcommitted");  ret = H5Tclose(type_id);  CHECK(type_id, FAIL, "H5Tclose");//.........这里部分代码省略.........
开发者ID:ElaraFX,项目名称:hdf5,代码行数:101,


示例5: e5_read_grid_list

//.........这里部分代码省略.........    if(list_name && strlen(list_name))    {        e5_list_group_id = e5_create_group(e5_group_id, list_name);        close_group = 1;    }    else    {        e5_list_group_id = e5_group_id;        close_group = 0;    }    for(i = 0; grid_list && grid_list[i].name != 0; i++)    {        e5_grid_dataset* grid = &grid_list[i];        if(grid->name == 0 || strlen(grid->name) < 1)            continue;        e5_dataset_id = H5Dopen(e5_list_group_id, grid->name);        if (e5_dataset_id < 0)        {            e5_status = E5_INVALID_DATASET;            e5_error(e5_list_group_id, e5_status, "Failed to open grid dataset '%s'/n",  grid->name);            return e5_status;        }        if(!grid->data)        {            e5_status = E5_INVALID_POINTER;            e5_error(e5_dataspace_id, e5_status, "Failed to provide pointer for reading '%s' from E5 data file/n", grid->name);            break;        }        e5_dataspace_id = H5Dget_space(e5_dataset_id);        e5_type_id = H5Dget_type(e5_dataset_id);        H5Sget_simple_extent_dims(e5_dataspace_id, h5_min_dim, h5_max_dim);        for(d = 0; d < 3; d++)        {            grid->dim[d] = h5_min_dim[d] >= h5_max_dim[d] ? h5_min_dim[d] : h5_max_dim[d];            grid->dim[d] = grid->dim[d] < 1 ? 1 : grid->dim[d];        }        grid->type = e5_convert_hdf_type(e5_type_id);        switch(grid->type)        {        case E5_TYPE_FLOAT:        {            e5_info(e5_group_id, "Reading grid [type='float', name='%s', dim='%u %u %u']/n",                    grid->name, grid->dim[0], grid->dim[1], grid->dim[2]);            e5_memspace_id = H5Screate_simple(3, h5_min_dim, h5_max_dim);            h5_status = H5Dread(e5_dataset_id, H5T_NATIVE_FLOAT, e5_memspace_id, e5_dataspace_id, H5P_DEFAULT, (grid->data));            if (h5_status < 0)            {                e5_status = E5_READ_FAILURE;                e5_error(e5_dataset_id, e5_status, "Failed to read '%s' from F5 data file/n", grid->name);            }            H5Sclose(e5_memspace_id);            break;        }        case E5_TYPE_DOUBLE:        {            e5_info(e5_group_id, "Reading grid [type='double', name='%s', dim='%u %u %u']/n",                    grid->name, grid->dim[0], grid->dim[1], grid->dim[2]);            e5_memspace_id = H5Screate_simple(3, h5_min_dim, h5_max_dim);            h5_status = H5Dread(e5_dataset_id, H5T_NATIVE_DOUBLE, e5_memspace_id, e5_dataspace_id, H5P_DEFAULT, (grid->data));            if (h5_status < 0)            {                e5_status = E5_READ_FAILURE;                e5_error(e5_dataset_id, e5_status, "Failed to read '%s' from F5 data file/n", grid->name);            }            H5Sclose(e5_memspace_id);            break;        }        case E5_TYPE_INVALID:        default:        {            e5_status = E5_INVALID_TYPE;            e5_error(e5_dataset_id, e5_status, "Invalid type for grid '%s' data/n", grid->name);            break;        }        };        log_scale = 0;        if(e5_is_valid_attr(e5_dataset_id, "log10"))            e5_read_attr_int(e5_dataset_id, "log10", &log_scale);        grid->scale = log_scale ? E5_VALUE_SCALE_LOG10 : E5_VALUE_SCALE_LINEAR;        H5Sclose(e5_dataspace_id);        H5Dclose(e5_dataset_id);        H5Tclose(e5_type_id);    }    if(list_name)        e5_close_group(e5_list_group_id);    return e5_status;}
开发者ID:voidcycles,项目名称:void,代码行数:101,


示例6: cow_histogram_dumphdf5

void cow_histogram_dumphdf5(cow_histogram *h, char *fn, char *gn)// -----------------------------------------------------------------------------// Dumps the histogram to the HDF5 file named `fn`, under the group// `gn`/h->fullname. The function uses rank 0 to do the write.// -----------------------------------------------------------------------------{#if (COW_HDF5)  if (!(h->committed && h->sealed)) {    return;  }  char gname[1024];  int rank = 0;  snprintf(gname, 1024, "%s/%s", gn, h->nickname);#if (COW_MPI)  if (cow_mpirunning()) {    MPI_Comm_rank(h->comm, &rank);  }#endif  if (rank == 0) {    // -------------------------------------------------------------------------    // The write functions assume the file is already created. Have master    // create the file if it's not there already.    // -------------------------------------------------------------------------    FILE *testf = fopen(fn, "r");    hid_t fid;    if (testf == NULL) {      fid = H5Fcreate(fn, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);    }    else {      fclose(testf);      fid = H5Fopen(fn, H5F_ACC_RDWR, H5P_DEFAULT);    }    if (H5Lexists_safe(fid, gname)) {      printf("[%s] writing histogram as HDF5 to %s/%s (clobber existing)/n",	     MODULE, fn, gname);      H5Gunlink(fid, gname);    }    else {      printf("[%s] writing histogram as HDF5 to %s/%s/n", MODULE, fn, gname);    }    hid_t gcpl = H5Pcreate(H5P_LINK_CREATE);    H5Pset_create_intermediate_group(gcpl, 1);    hid_t memb = H5Gcreate(fid, gname, gcpl, H5P_DEFAULT, H5P_DEFAULT);    H5Pclose(gcpl);    H5Gclose(memb);    H5Fclose(fid);  }  else {    return;  }  // Create a group to represent this histogram, and an attribute to name it  // ---------------------------------------------------------------------------  hid_t fid = H5Fopen(fn, H5F_ACC_RDWR, H5P_DEFAULT);  hid_t grp = H5Gopen(fid, gname, H5P_DEFAULT);  if (h->fullname != NULL) {    hid_t aspc = H5Screate(H5S_SCALAR);    hid_t strn = H5Tcopy(H5T_C_S1);    H5Tset_size(strn, strlen(h->fullname));    hid_t attr = H5Acreate(grp, "fullname", strn, aspc, H5P_DEFAULT, H5P_DEFAULT);    H5Awrite(attr, strn, h->fullname); // write the full name    H5Aclose(attr);    H5Tclose(strn);    H5Sclose(aspc);  }  // Create the data sets in the group: binloc (bin centers) and binval (values)  // ---------------------------------------------------------------------------  double *binlocX = h->binlocx;  double *binlocY = h->binlocy;  double *binvalV = h->binvalv;  hsize_t sizeX[2] = { h->nbinsx };  hsize_t sizeY[2] = { h->nbinsy };  hsize_t sizeZ[2] = { h->nbinsx, h->nbinsy };  hid_t fspcZ = H5Screate_simple(h->n_dims, sizeZ, NULL);  if (h->n_dims >= 1) {    hid_t fspcX = H5Screate_simple(1, sizeX, NULL);    hid_t dsetbinX = H5Dcreate(grp, "binlocX", H5T_NATIVE_DOUBLE, fspcX,			       H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);    H5Dwrite(dsetbinX, H5T_NATIVE_DOUBLE, fspcX, fspcX, H5P_DEFAULT, binlocX);    H5Dclose(dsetbinX);    H5Sclose(fspcX);  }  if (h->n_dims >= 2) {    hid_t fspcY = H5Screate_simple(1, sizeY, NULL);    hid_t dsetbinY = H5Dcreate(grp, "binlocY", H5T_NATIVE_DOUBLE, fspcY,			       H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);    H5Dwrite(dsetbinY, H5T_NATIVE_DOUBLE, fspcY, fspcY, H5P_DEFAULT, binlocY);    H5Dclose(dsetbinY);    H5Sclose(fspcY);  }  hid_t dsetvalV = H5Dcreate(grp, "binval", H5T_NATIVE_DOUBLE, fspcZ,			     H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  H5Dwrite(dsetvalV, H5T_NATIVE_DOUBLE, fspcZ, fspcZ, H5P_DEFAULT, binvalV);  H5Dclose(dsetvalV);  H5Sclose(fspcZ);  H5Gclose(grp);  H5Fclose(fid);#endif}
开发者ID:darien0,项目名称:cow,代码行数:98,


示例7: main

intmain(void) {   hid_t fid;                         /* File, group, datasets, datatypes */   hid_t gid_a;                       /* and  dataspaces identifiers   */   hid_t did_b, sid_b, tid_b;   hid_t did_r, tid_r, sid_r;   H5O_type_t obj_type;   herr_t status;   hobj_ref_t *wbuf; /* buffer to write to disk */   hobj_ref_t *rbuf; /* buffer to read from disk */   hsize_t dim_r[1];   hsize_t dim_b[2];   /*    *  Create a file using default properties.    */   fid = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);   /*    *  Create  group "A" in the file.    */   gid_a = H5Gcreate2(fid, "A", H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  /*   *  Create dataset "B" in the file.   */   dim_b[0] = 2;   dim_b[1] = 6;   sid_b = H5Screate_simple(2, dim_b, NULL);   did_b = H5Dcreate2(fid, "B", H5T_NATIVE_FLOAT, sid_b, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);   /*    *  Create dataset "R" to store references to the objects "A" and "B".    */   dim_r[0] = 2;   sid_r = H5Screate_simple(1, dim_r, NULL);   tid_r = H5Tcopy(H5T_STD_REF_OBJ);   did_r = H5Dcreate2(fid, "R", tid_r, sid_r, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);   /*    *  Allocate write and read buffers.    */   wbuf = (hobj_ref_t *)malloc(sizeof(hobj_ref_t) * 2);   rbuf = (hobj_ref_t *)malloc(sizeof(hobj_ref_t) * 2);   /*    *  Create references to the group "A" and dataset "B"    *  and store them in the wbuf.    */   H5Rcreate(&wbuf[0], fid, "A", H5R_OBJECT, (hid_t)-1);   H5Rcreate(&wbuf[1], fid, "B", H5R_OBJECT, (hid_t)-1);   /*    *  Write dataset R using default transfer properties.    */   status = H5Dwrite(did_r, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, wbuf);   /*    *  Close all objects.    */   H5Gclose(gid_a);   H5Sclose(sid_b);   H5Dclose(did_b);   H5Tclose(tid_r);   H5Sclose(sid_r);   H5Dclose(did_r);   H5Fclose(fid);   /*    * Reopen the file.    */   fid = H5Fopen(H5FILE_NAME, H5F_ACC_RDWR, H5P_DEFAULT);   /*    *  Open and read dataset "R".    */   did_r  = H5Dopen2(fid, "R", H5P_DEFAULT);   status = H5Dread(did_r, H5T_STD_REF_OBJ, H5S_ALL, H5S_ALL, H5P_DEFAULT, rbuf);   /*    * Find the type of referenced objects.    */    status = H5Rget_obj_type2(did_r, H5R_OBJECT, &rbuf[0], &obj_type);    if(obj_type == H5O_TYPE_GROUP)        printf("First dereferenced object is a group. /n");    status = H5Rget_obj_type2(did_r, H5R_OBJECT, &rbuf[1], &obj_type);    if(obj_type == H5O_TYPE_DATASET)        printf("Second dereferenced object is a dataset. /n");   /*    *  Get datatype of the dataset "B"    */   did_b = H5Rdereference2(did_r, H5P_DEFAULT, H5R_OBJECT, &rbuf[1]);//.........这里部分代码省略.........
开发者ID:ElaraFX,项目名称:hdf5,代码行数:101,


示例8: diff_datasetid

//.........这里部分代码省略.........                    hs_offset[i - 1] += hs_size[i - 1];                    if(hs_offset[i - 1] == dims1[i - 1])                        hs_offset[i - 1] = 0;                    else                        carry = 0;                } /* i */            } /* elmtno */            H5Sclose(sm_space);        } /* hyperslab read */    } /*can_compare*/    /*-------------------------------------------------------------------------     * close     *-------------------------------------------------------------------------     */    h5difftrace("compare attributes?/n");    /* free */    if(buf1 != NULL) {        HDfree(buf1);        buf1 = NULL;    } /* end if */    if(buf2 != NULL) {        HDfree(buf2);        buf2 = NULL;    } /* end if */    if(sm_buf1 != NULL) {        HDfree(sm_buf1);        sm_buf1 = NULL;    } /* end if */    if(sm_buf2 != NULL) {        HDfree(sm_buf2);        sm_buf2 = NULL;    } /* end if */    H5E_BEGIN_TRY {        H5Sclose(sid1);        H5Sclose(sid2);        H5Tclose(f_tid1);        H5Tclose(f_tid2);        H5Tclose(m_tid1);        H5Tclose(m_tid2);    } H5E_END_TRY;    h5difftrace("diff_datasetid finish/n");    return nfound;error:    options->err_stat=1;    /* free */    if (buf1!=NULL)    {        /* reclaim any VL memory, if necessary */        if(vl_data)            H5Dvlen_reclaim(m_tid1, sid1, H5P_DEFAULT, buf1);        HDfree(buf1);        buf1=NULL;    }    if (buf2!=NULL)    {        /* reclaim any VL memory, if necessary */        if(vl_data)            H5Dvlen_reclaim(m_tid2, sid2, H5P_DEFAULT, buf2);        HDfree(buf2);        buf2=NULL;    }    if (sm_buf1!=NULL)    {        /* reclaim any VL memory, if necessary */        if(vl_data)            H5Dvlen_reclaim(m_tid1, sm_space, H5P_DEFAULT, sm_buf1);        HDfree(sm_buf1);        sm_buf1=NULL;    }    if (sm_buf2!=NULL)    {        /* reclaim any VL memory, if necessary */        if(vl_data)            H5Dvlen_reclaim(m_tid1, sm_space, H5P_DEFAULT, sm_buf2);        HDfree(sm_buf2);        sm_buf2=NULL;    }    /* disable error reporting */    H5E_BEGIN_TRY {        H5Sclose(sid1);        H5Sclose(sid2);        H5Tclose(f_tid1);        H5Tclose(f_tid2);        H5Tclose(m_tid1);        H5Tclose(m_tid2);        /* enable error reporting */    } H5E_END_TRY;    h5difftrace("diff_datasetid errored/n");    return nfound;}
开发者ID:MichaelToal,项目名称:hdf5,代码行数:101,


示例9: diff_can_type

//.........这里部分代码省略.........                parallel_print("/n");            }        }        can_compare = 0;        options->not_cmp = 1;        return can_compare;    }    /*-------------------------------------------------------------------------    * maximum dimensions; just give a warning    *-------------------------------------------------------------------------    */    if (maxdim1 && maxdim2 && maxdim_diff==1 && obj1_name )    {        if (options->m_verbose) {            parallel_print( "Warning: different maximum dimensions/n");            parallel_print("<%s> has max dimensions ", obj1_name);            print_dimensions(rank1,maxdim1);            parallel_print("/n");            parallel_print("<%s> has max dimensions ", obj2_name);            print_dimensions(rank2,maxdim2);            parallel_print("/n");        }    }    if ( tclass1 == H5T_COMPOUND )    {        int   nmembs1;        int   nmembs2;        int   j;        hid_t memb_type1;        hid_t memb_type2;        nmembs1 = H5Tget_nmembers(f_tid1);        nmembs2 = H5Tget_nmembers(f_tid2);        if ( nmembs1 != nmembs2 )        {            if ( (options->m_verbose||options->m_list_not_cmp) && obj1_name && obj2_name)            {                parallel_print("Not comparable: <%s> has %d members ", obj1_name, nmembs1);                parallel_print("<%s> has %d members ", obj2_name, nmembs2);                parallel_print("/n");            }            can_compare = 0;            options->not_cmp = 1;            return can_compare;        }        for (j = 0; j < nmembs1; j++)        {            memb_type1 = H5Tget_member_type(f_tid1, (unsigned)j);            memb_type2 = H5Tget_member_type(f_tid2, (unsigned)j);            if (diff_can_type(memb_type1,                memb_type2,                rank1,                rank2,                dims1,                dims2,                maxdim1,                maxdim2,                obj1_name,                obj2_name,                options,                1)!=1)            {                can_compare = 0;                options->not_cmp = 1;                H5Tclose(memb_type1);                H5Tclose(memb_type2);                return can_compare;            }            H5Tclose(memb_type1);            H5Tclose(memb_type2);        }    }    return can_compare;}
开发者ID:MichaelToal,项目名称:hdf5,代码行数:101,


示例10: add_attrs

intadd_attrs(hid_t objid){    hid_t scalar_spaceid = -1;    hid_t vlstr_typeid = -1, fixstr_typeid = -1;    char *vlstr;    hid_t attid = -1;    /* Create scalar dataspace */    if ((scalar_spaceid = H5Screate(H5S_SCALAR)) < 0) ERR_GOTO;        /* Create string datatypes */    if ((vlstr_typeid = H5Tcreate(H5T_STRING, (size_t)H5T_VARIABLE)) < 0) ERR_GOTO;    if ((fixstr_typeid = H5Tcreate(H5T_STRING, (size_t)10)) < 0) ERR_GOTO;    /* Create attribute with VL string datatype on object */    if ((attid = H5Acreate2(objid, VSTR_ATT1_NAME, vlstr_typeid, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    /* No write, use fill value */    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Create attribute with VL string datatype on object */    if ((attid = H5Acreate2(objid, VSTR_ATT2_NAME, vlstr_typeid, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    vlstr = NULL;    if (H5Awrite(attid, vlstr_typeid, &vlstr) < 0) ERR_GOTO;    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Create attribute with VL string datatype on object */    if ((attid = H5Acreate2(objid, VSTR_ATT3_NAME, vlstr_typeid, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    vlstr = malloc(10);    *vlstr = '/0';    if (H5Awrite(attid, vlstr_typeid, &vlstr) < 0) ERR_GOTO;    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Create attribute with VL string datatype on object */    if ((attid = H5Acreate2(objid, VSTR_ATT4_NAME, vlstr_typeid, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    strcpy(vlstr, "foo");    if (H5Awrite(attid, vlstr_typeid, &vlstr) < 0) ERR_GOTO;    free(vlstr);    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Create attribute with fixed-length string datatype on object */    if ((attid = H5Acreate2(objid, FSTR_ATT_NAME, fixstr_typeid, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Create attribute with native integer datatype on object */    if ((attid = H5Acreate2(objid, INT_ATT_NAME, H5T_NATIVE_INT, scalar_spaceid, H5P_DEFAULT, H5P_DEFAULT)) < 0) ERR_GOTO;    if (H5Aclose(attid) < 0) ERR_GOTO;    /* Clean up objects created */    if (H5Sclose(scalar_spaceid) < 0) ERR_GOTO;    if (H5Tclose(vlstr_typeid) < 0) ERR_GOTO;    if (H5Tclose(fixstr_typeid) < 0) ERR_GOTO;    return(0);error:    H5E_BEGIN_TRY {        H5Aclose(attid);        H5Sclose(scalar_spaceid);        H5Tclose(vlstr_typeid);        H5Tclose(fixstr_typeid);    } H5E_END_TRY;    return(-1);}
开发者ID:BJangeofan,项目名称:netcdf-c,代码行数:66,


示例11: process_cmpd_fields

/*------------------------------------------------------------------------- * Function:  process_cmpd_fields * * Purpose: To check whether the fields selected in "g_list_of_fields" *	    are valid fields associated with the dataset. * * Return: 0 on success; negative on failure * * Programmer:  Vailin Choi; August 2010 * *------------------------------------------------------------------------- */static herr_tprocess_cmpd_fields(hid_t fid, char *dsetname){    hid_t did=-1;			/* dataset id */    hid_t dtid=-1, tid=-1;	/* dataset's data type id */    size_t len;		/* number of comma-separated fields in "g_list_of_fields" */    herr_t ret_value = SUCCEED;	/* Return value */    HDassert(g_list_of_fields && *g_list_of_fields);        /* Open the dataset */    if((did = H5Dopen2(fid, dsetname, H5P_DEFAULT)) < 0) {	error_msg("error in opening dataset /"%s/"/n", dsetname);	ret_value = FAIL;	goto done;    }    /* Get the dataset's datatype  */    if(((dtid = H5Dget_type(did)) < 0) || (tid = H5Tget_native_type(dtid, H5T_DIR_DEFAULT)) < 0) {	error_msg("error in getting dataset's datatype/n");        ret_value = FAIL;        goto done;    }    /* Check to make sure that the dataset's datatype is compound type */    if(H5Tget_class(dtid) != H5T_COMPOUND) {	error_msg("dataset should be compound type for <list_of_fields>/n");	ret_value = FAIL;	goto done;		    }    /* Make a copy of "g_list_of_fields" */    if((g_dup_fields = HDstrdup(g_list_of_fields)) == NULL) {	error_msg("error in duplicating g_list_of_fields/n");        ret_value = FAIL;	goto done;		    }    /* Estimate the number of comma-separated fields in "g_list of_fields" */    len = HDstrlen(g_list_of_fields)/2 + 2;    /* Allocate memory for a list vector of H5LD_memb_t structures to store "g_list_of_fields" info */    if((g_listv = (H5LD_memb_t **)HDcalloc(len, sizeof(H5LD_memb_t *))) == NULL) {	error_msg("error in allocating memory for H5LD_memb_t/n");        ret_value = FAIL;	goto done;		    }    /* Process and store info for "g_listv" */    if(H5LD_construct_vector(g_dup_fields, g_listv, tid) < 0) {	error_msg("error in processing <list_of_fields>/n");	ret_value = FAIL;        goto done;    }    /* Will free memory for g_listv and g_dup_fields when exiting from h5watch */done:    /* Closing */    H5E_BEGIN_TRY	H5Tclose(dtid);	H5Tclose(tid);	H5Dclose(did);    H5E_END_TRY    return(ret_value);} /* process_cmpd_fields() */
开发者ID:FilipeMaia,项目名称:hdf5,代码行数:77,


示例12: main

intmain(){    printf("/n*** Creating file with datasets & attributes that have scalar dataspaces...");    {	hid_t fileid;        hid_t fcplid;	hid_t dsetid;        hid_t dcplid;	hid_t scalar_spaceid;        hid_t vlstr_typeid, fixstr_typeid;	hid_t attid;        /* Create scalar dataspace */	if ((scalar_spaceid = H5Screate(H5S_SCALAR)) < 0) ERR;        /* Set creation ordering for file, so we can revise its contents later */        if ((fcplid = H5Pcreate(H5P_FILE_CREATE)) < 0) ERR;        if (H5Pset_link_creation_order(fcplid, H5P_CRT_ORDER_TRACKED) < 0) ERR;        if (H5Pset_attr_creation_order(fcplid, H5P_CRT_ORDER_TRACKED) < 0) ERR;		/* Create new file, using default properties */	if ((fileid = H5Fcreate(FILE_NAME, H5F_ACC_TRUNC, fcplid, H5P_DEFAULT)) < 0) ERR;	        /* Close file creation property list */        if (H5Pclose(fcplid) < 0) ERR;        /* Create variable-length string datatype */        if ((vlstr_typeid = H5Tcreate(H5T_STRING, (size_t)H5T_VARIABLE)) < 0) ERR;        /* Create fixed-length string datatype */        if ((fixstr_typeid = H5Tcreate(H5T_STRING, (size_t)10)) < 0) ERR;        /* Set creation ordering for dataset, so we can revise its contents later */        if ((dcplid = H5Pcreate(H5P_DATASET_CREATE)) < 0) ERR;        if (H5Pset_attr_creation_order(dcplid, H5P_CRT_ORDER_TRACKED) < 0) ERR;	        /* Create scalar dataset with VL string datatype */        if ((dsetid = H5Dcreate2(fileid, VSTR_VAR1_NAME, vlstr_typeid, scalar_spaceid, H5P_DEFAULT, dcplid, H5P_DEFAULT)) < 0) ERR;        /* Add attributes to dataset */        if (add_attrs(dsetid) < 0) ERR;        /* Close VL string dataset */        if (H5Dclose(dsetid) < 0) ERR;        /* Create scalar dataset with fixed-length string datatype */        if ((dsetid = H5Dcreate2(fileid, FSTR_VAR_NAME, fixstr_typeid, scalar_spaceid, H5P_DEFAULT, dcplid, H5P_DEFAULT)) < 0) ERR;        /* Add attributes to dataset */        if (add_attrs(dsetid) < 0) ERR;        /* Close fixed-length string dataset */        if (H5Dclose(dsetid) < 0) ERR;        /* Create scalar dataset with native integer datatype */        if ((dsetid = H5Dcreate2(fileid, INT_VAR_NAME, H5T_NATIVE_INT, scalar_spaceid, H5P_DEFAULT, dcplid, H5P_DEFAULT)) < 0) ERR;        /* Add attributes to dataset */        if (add_attrs(dsetid) < 0) ERR;        /* Close native integer dataset */        if (H5Dclose(dsetid) < 0) ERR;        /* Add attributes to root group */        if (add_attrs(fileid) < 0) ERR;        /* Close dataset creation property list */        if (H5Pclose(dcplid) < 0) ERR;        /* Close string datatypes */        if (H5Tclose(vlstr_typeid) < 0) ERR;        if (H5Tclose(fixstr_typeid) < 0) ERR;        /* Close rest */	if (H5Sclose(scalar_spaceid) < 0) ERR;	if (H5Fclose(fileid) < 0) ERR;    }    SUMMARIZE_ERR;    printf("*** Checking accessing file through netCDF-4 API...");    {	int ncid, varid;        size_t len;        nc_type type;        int ndims;        char *vlstr;        int x;	if (nc_open(FILE_NAME, NC_NOWRITE, &ncid)) ERR;        /* Check the global attributes are OK *///.........这里部分代码省略.........
开发者ID:BJangeofan,项目名称:netcdf-c,代码行数:101,


示例13: _get_all_samples

static void* _get_all_samples(hid_t gid_series, char* nam_series, uint32_t type,			      int nsamples){	void*   data = NULL;	hid_t   id_data_set, dtyp_memory, g_sample, sz_dest;	herr_t  ec;	int     smpx ,len;	void    *data_prior = NULL, *data_cur = NULL;	char 	name_sample[MAX_GROUP_NAME+1];	hdf5_api_ops_t* ops;	ops = profile_factory(type);	if (ops == NULL) {		error("Failed to create operations for %s",		      acct_gather_profile_type_to_string(type));		return NULL;	}	data = (*(ops->init_job_series))(nsamples);	if (data == NULL) {		xfree(ops);		error("Failed to get memory for combined data");		return NULL;	}	dtyp_memory = (*(ops->create_memory_datatype))();	if (dtyp_memory < 0) {		xfree(ops);		xfree(data);		error("Failed to create %s memory datatype",		     acct_gather_profile_type_to_string(type));		return NULL;	}	for (smpx=0; smpx<nsamples; smpx++) {		len = H5Lget_name_by_idx(gid_series, ".", H5_INDEX_NAME,					 H5_ITER_INC, smpx, name_sample,					 MAX_GROUP_NAME, H5P_DEFAULT);		if (len<1 || len>MAX_GROUP_NAME) {			error("Invalid group name %s", name_sample);			continue;		}		g_sample = H5Gopen(gid_series, name_sample, H5P_DEFAULT);		if (g_sample < 0) {			info("Failed to open %s", name_sample);		}		id_data_set = H5Dopen(g_sample, get_data_set_name(name_sample),				      H5P_DEFAULT);		if (id_data_set < 0) {			H5Gclose(g_sample);			error("Failed to open %s dataset",			     acct_gather_profile_type_to_string(type));			continue;		}		sz_dest = (*(ops->dataset_size))();		data_cur = xmalloc(sz_dest);		if (data_cur == NULL) {			H5Dclose(id_data_set);			H5Gclose(g_sample);			error("Failed to get memory for prior data");			continue;		}		ec = H5Dread(id_data_set, dtyp_memory, H5S_ALL, H5S_ALL,			     H5P_DEFAULT, data_cur);		if (ec < 0) {			xfree(data_cur);			H5Dclose(id_data_set);			H5Gclose(g_sample);			error("Failed to read %s data",			      acct_gather_profile_type_to_string(type));			continue;		}		(*(ops->merge_step_series))(g_sample, data_prior, data_cur,					    data+(smpx)*sz_dest);		xfree(data_prior);		data_prior = data_cur;		H5Dclose(id_data_set);		H5Gclose(g_sample);	}	xfree(data_cur);	H5Tclose(dtyp_memory);	xfree(ops);	return data;}
开发者ID:jsollom,项目名称:slurm,代码行数:84,


示例14: H5Aget_space

void H5Attribute::copy(const hid_t src, const hid_t dest, const std::string & name){    hid_t type, stype;    hid_t space, sspace;    char * data = 0;    hsize_t size;    hsize_t * dims = 0;    hsize_t ndims;    sspace = H5Aget_space(src);    if (sspace < 0)    {        throw H5Exception(__LINE__, __FILE__, _("Cannot copy the attribute"));    }    space = H5Scopy(sspace);    H5Sclose(sspace);    stype = H5Aget_type(src);    if (stype < 0)    {        H5Sclose(space);        throw H5Exception(__LINE__, __FILE__, _("Cannot copy the attribute"));    }    type = H5Tcopy(stype);    H5Tclose(stype);    size = H5Tget_size(type);    dims = new hsize_t[__SCILAB_HDF5_MAX_DIMS__];    ndims = H5Sget_simple_extent_dims(space, dims, 0);    for (unsigned int i = 0; i < ndims; i++)    {        size *= dims[i];    }    data = new char[size];    if (H5Aread(src, type, data) < 0)    {        H5Sclose(space);        H5Tclose(type);        delete[] dims;        delete[] data;        throw H5Exception(__LINE__, __FILE__, _("Cannot read attribute data."));    }    try    {        hid_t attr = create(dest, name, type, type, space, space, data);        H5Aclose(attr);        H5Sclose(space);        H5Tclose(type);        delete[] dims;        delete[] data;    }    catch (const H5Exception & /*e*/)    {        H5Sclose(space);        H5Tclose(type);        delete[] dims;        delete[] data;        throw;    }}
开发者ID:FOSSEE-Internship,项目名称:scilab,代码行数:63,


示例15: Py_INCREF

PyObject *H5UIget_info( hid_t loc_id,                        const char *dset_name,                        char *byteorder){  hid_t       dataset_id;  int         rank;  hsize_t     *dims;  hid_t       space_id;  H5T_class_t class_id;  H5T_order_t order;  hid_t       type_id;  PyObject    *t;  int         i;  /* Open the dataset. */  if ( (dataset_id = H5Dopen( loc_id, dset_name, H5P_DEFAULT )) < 0 ) {    Py_INCREF(Py_None);    return Py_None;     /* Not chunked, so return None */  }  /* Get an identifier for the datatype. */  type_id = H5Dget_type( dataset_id );  /* Get the class. */  class_id = H5Tget_class( type_id );  /* Get the dataspace handle */  if ( (space_id = H5Dget_space( dataset_id )) < 0 )    goto out;  /* Get rank */  if ( (rank = H5Sget_simple_extent_ndims( space_id )) < 0 )    goto out;  /* Book resources for dims */  dims = (hsize_t *)malloc(rank * sizeof(hsize_t));  /* Get dimensions */  if ( H5Sget_simple_extent_dims( space_id, dims, NULL) < 0 )    goto out;  /* Assign the dimensions to a tuple */  t = PyTuple_New(rank);  for(i=0;i<rank;i++) {    /* I don't know if I should increase the reference count for dims[i]! */    PyTuple_SetItem(t, i, PyLong_FromLong((long)dims[i]));  }  /* Release resources */  free(dims);  /* Terminate access to the dataspace */  if ( H5Sclose( space_id ) < 0 )    goto out;  /* Get the byteorder */  /* Only integer, float, time and enum classes can be byteordered */  if ((class_id == H5T_INTEGER) || (class_id == H5T_FLOAT)      || (class_id == H5T_BITFIELD) || (class_id == H5T_TIME)      ||  (class_id == H5T_ENUM)) {    order = H5Tget_order( type_id );    if (order == H5T_ORDER_LE)      strcpy(byteorder, "little");    else if (order == H5T_ORDER_BE)      strcpy(byteorder, "big");    else {      fprintf(stderr, "Error: unsupported byteorder: %d/n", order);      goto out;    }  }  else {    strcpy(byteorder, "irrelevant");  }  /* End access to the dataset */  H5Dclose( dataset_id );  /* Return the dimensions tuple */  return t;out: H5Tclose( type_id ); H5Dclose( dataset_id ); Py_INCREF(Py_None); return Py_None;    /* Not chunked, so return None */}
开发者ID:bbudescu,项目名称:PyTables,代码行数:87,


示例16: GOME_LV1_WR_H5_PCD

/*+++++++++++++++++++++++++ Main Program or Function +++++++++++++++*/void GOME_LV1_WR_H5_PCD( struct param_record param, short nr_pcd, 			 const short *indx_pcd, const struct pcd_gome *pcd ){     register hsize_t ni, nr, nx, ny;     unsigned short *usbuff;     short          *sbuff;     float          *rbuff;     hid_t    type_id;     hid_t    grp_id;     hbool_t  compress;     hsize_t  nrpix, dims[2];     struct   glr1_gome *glr;     struct   cr1_gome  *cld;/* * check number of PCD records */     if ( nr_pcd == 0 || indx_pcd == NULL || pcd == NULL ) return;/* * set HDF5 boolean variable for compression */     if ( param.flag_deflate == PARAM_SET )          compress = TRUE;     else          compress = FALSE;/* * open or create group Earth */     grp_id = NADC_OPEN_HDF5_Group( param.hdf_file_id, "/EARTH" );     if ( grp_id < 0 ) NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/EARTH" );     (void) H5Gclose( grp_id );/* * create group /EARTH/PCD */     grp_id = H5Gcreate( param.hdf_file_id, "/EARTH/PCD", 			 H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT );     if ( grp_id < 0 ) 	  NADC_RETURN_ERROR( NADC_ERR_HDF_GRP, "/EARTH/PCD" );/* * +++++ create datasets in the /EARTH/PCD group */     dims[0] = (size_t) nr_pcd;/* * Write geolocation information */     type_id = H5Topen( param.hdf_file_id, "glr", H5P_DEFAULT );     glr = (struct glr1_gome *)	  malloc( dims[0] * sizeof( struct glr1_gome ));     if ( glr == NULL ) NADC_RETURN_ERROR( NADC_ERR_ALLOC, "glr" );     for ( nr = 0; nr < dims[0]; nr++ )	  (void) memcpy( glr+nr, &pcd[nr].glr, sizeof( struct glr1_gome ) );     NADC_WR_HDF5_Dataset( compress, grp_id, "glr", 			  type_id, 1, dims, glr );     free( glr );     (void) H5Tclose( type_id );     if ( IS_ERR_STAT_FATAL )	  NADC_RETURN_ERROR( NADC_ERR_HDF_WR, "glr" );/* * Write geolocation information */     type_id = H5Topen( param.hdf_file_id, "cld", H5P_DEFAULT );     cld = (struct cr1_gome *)	  malloc( dims[0] * sizeof( struct cr1_gome ));     if ( cld == NULL ) NADC_RETURN_ERROR( NADC_ERR_ALLOC, "cld" );     for ( nr = 0; nr < dims[0]; nr++ )	  (void) memcpy( cld+nr, &pcd[nr].cld, sizeof( struct cr1_gome ) );     NADC_WR_HDF5_Dataset( compress, grp_id, "cld", 			   type_id, 1, dims, cld );     free( cld );     (void) H5Tclose( type_id );     if ( IS_ERR_STAT_FATAL )	  NADC_RETURN_ERROR( NADC_ERR_HDF_WR, "cld" );/* * Dark current and Noise Correction Factor */     rbuff = (float *) malloc( dims[0] * sizeof( float ));     if ( rbuff == NULL ) NADC_RETURN_ERROR( NADC_ERR_ALLOC, "rbuff" );     for ( ny = 0; ny < dims[0]; ny++ )	  rbuff[ny] = pcd[indx_pcd[ny]].dark_current;     NADC_WR_HDF5_Dataset( compress, grp_id, "DarkCurrent", 			  H5T_NATIVE_FLOAT, 1, dims, rbuff );     for ( ny = 0; ny < dims[0]; ny++ )	  rbuff[ny] = pcd[indx_pcd[ny]].noise_factor;     NADC_WR_HDF5_Dataset( compress, grp_id, "NoiseCorrection", 			  H5T_NATIVE_FLOAT, 1, dims, rbuff );/* * Plane of the Polarisation */     for ( ny = 0; ny < dims[0]; ny++ )	  rbuff[ny] = pcd[indx_pcd[ny]].polar.chi;     NADC_WR_HDF5_Dataset( compress, grp_id, "PolarisationPlaneAngle",			  H5T_NATIVE_FLOAT, 1, dims, rbuff );     free( rbuff );/* * Indices */     sbuff = (short *) malloc( dims[0] * sizeof( short ));//.........这里部分代码省略.........
开发者ID:rmvanhees,项目名称:nadc_tools,代码行数:101,


示例17: main

//.........这里部分代码省略.........  /* Define an array of Particles */ Particle  p_data[NRECORDS] = {  {"zero",0,0, 0.0f, 0.0}, {"one",10,10, 1.0f, 10.0}, {"two",  20,20, 2.0f, 20.0}, {"three",30,30, 3.0f, 30.0}, {"four", 40,40, 4.0f, 40.0}, {"five", 50,50, 5.0f, 50.0}, {"six",  60,60, 6.0f, 60.0}, {"seven",70,70, 7.0f, 70.0}  };  /* Calculate the size and the offsets of our struct members in memory */ size_t dst_size =  sizeof( Particle ); size_t dst_offset[NFIELDS] = { HOFFSET( Particle, name ),                                HOFFSET( Particle, lati ),                                HOFFSET( Particle, longi ),                                HOFFSET( Particle, pressure ),                                HOFFSET( Particle, temperature )}; size_t dst_sizes[NFIELDS] = { sizeof( p_data[0].name),                               sizeof( p_data[0].lati),                               sizeof( p_data[0].longi),                               sizeof( p_data[0].pressure),                               sizeof( p_data[0].temperature)};  /* Define an array of Particles to insert */ Particle  p_data_insert[NRECORDS_INS] =  { {"new",30,30, 3.0f, 30.0}, {"new",40,40, 4.0f, 40.0} };  /* Define field information */ const char *field_names[NFIELDS]  =  { "Name","Latitude", "Longitude", "Pressure", "Temperature" }; hid_t      field_type[NFIELDS]; hid_t      string_type; hid_t      file_id; hsize_t    chunk_size = 10; int        compress  = 0; int        *fill_data = NULL; hsize_t    start;      /* Record to start reading */ hsize_t    nrecords;   /* Number of records to insert/delete */ herr_t     status; hsize_t    nfields_out; hsize_t    nrecords_out; int        i;  /* Initialize the field field_type */ string_type = H5Tcopy( H5T_C_S1 ); H5Tset_size( string_type, 16 ); field_type[0] = string_type; field_type[1] = H5T_NATIVE_INT; field_type[2] = H5T_NATIVE_INT; field_type[3] = H5T_NATIVE_FLOAT; field_type[4] = H5T_NATIVE_DOUBLE;  /* Create a new file using default properties. */ file_id = H5Fcreate( "ex_table_08.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT );  /* Make the table */ status=H5TBmake_table( "Table Title",file_id,TABLE_NAME,NFIELDS,NRECORDS,                          dst_size,field_names, dst_offset, field_type,                          chunk_size, fill_data, compress, p_data  ); /* Insert records */ start    = 3;       nrecords = NRECORDS_INS;  status=H5TBinsert_record( file_id, TABLE_NAME, start, nrecords, dst_size, dst_offset,   dst_sizes, p_data_insert ); /* read the table */ status=H5TBread_table( file_id, TABLE_NAME, dst_size, dst_offset, dst_sizes, dst_buf ); /* get table info  */ status=H5TBget_table_info(file_id,TABLE_NAME, &nfields_out, &nrecords_out ); /* print */ printf ("Table has %d fields and %d records/n",(int)nfields_out,(int)nrecords_out);   /* print it by rows */ for (i=0; i<nrecords_out; i++) {  printf ("%-5s %-5d %-5d %-5f %-5f",    dst_buf[i].name,   dst_buf[i].lati,   dst_buf[i].longi,   dst_buf[i].pressure,   dst_buf[i].temperature);  printf ("/n"); }  /* close type */ H5Tclose( string_type );  /* close the file */ H5Fclose( file_id );  return 0; }
开发者ID:MattNapsAlot,项目名称:rHDF5,代码行数:101,


示例18: dataset_stats

//.........这里部分代码省略.........    ndims = H5Sget_simple_extent_dims(sid, dims, NULL);    HDassert(ndims >= 0);    /* Check for larger rank of dataset */    if((unsigned)ndims > iter->max_dset_rank)        iter->max_dset_rank = (unsigned)ndims;    /* Track the number of datasets with each rank */    (iter->dset_rank_count[ndims])++;    /* Only gather dim size statistics on 1-D datasets */    if(ndims == 1) {       iter->max_dset_dims = dims[0];       if(dims[0] < SIZE_SMALL_DSETS)           (iter->small_dset_dims[(size_t)dims[0]])++;       /* Add dim count to proper bin */       bin = ceil_log10((unsigned long)dims[0]);       if((bin + 1) > iter->dset_dim_nbins) {          /* Allocate more storage for info about dataset's datatype */          iter->dset_dim_bins = (unsigned long *)HDrealloc(iter->dset_dim_bins, (bin + 1) * sizeof(unsigned long));          HDassert(iter->dset_dim_bins);          /* Initialize counts for intermediate bins */          while(iter->dset_dim_nbins < bin)              iter->dset_dim_bins[iter->dset_dim_nbins++] = 0;          iter->dset_dim_nbins++;          /* Initialize count for this bin */          iter->dset_dim_bins[bin] = 1;        } /* end if */        else            (iter->dset_dim_bins[bin])++;    } /* end if */    ret = H5Sclose(sid);    HDassert(ret >= 0);    /* Gather datatype statistics */    tid = H5Dget_type(did);    HDassert(tid > 0);    type_found = FALSE;    for(u = 0; u < iter->dset_ntypes; u++)        if(H5Tequal(iter->dset_type_info[u].tid, tid) > 0) {            type_found = TRUE;            break;        } /* end for */    if(type_found)         (iter->dset_type_info[u].count)++;    else {        unsigned curr_ntype = iter->dset_ntypes;        /* Increment # of datatypes seen for datasets */        iter->dset_ntypes++;        /* Allocate more storage for info about dataset's datatype */        iter->dset_type_info = (dtype_info_t *)HDrealloc(iter->dset_type_info, iter->dset_ntypes * sizeof(dtype_info_t));        HDassert(iter->dset_type_info);        /* Initialize information about datatype */        iter->dset_type_info[curr_ntype].tid = H5Tcopy(tid);        HDassert(iter->dset_type_info[curr_ntype].tid > 0);        iter->dset_type_info[curr_ntype].count = 1;        iter->dset_type_info[curr_ntype].named = 0;        /* Set index for later */        u = curr_ntype;    } /* end else */    /* Check if the datatype is a named datatype */    if(H5Tcommitted(tid) > 0)        (iter->dset_type_info[u].named)++;    ret = H5Tclose(tid);    HDassert(ret >= 0);    /* Track different filters */    if((nfltr = H5Pget_nfilters(dcpl)) >= 0) {       if(nfltr == 0)           iter->dset_comptype[0]++;        for(u = 0; u < (unsigned)nfltr; u++) {            fltr = H5Pget_filter2(dcpl, u, 0, 0, 0, 0, 0, NULL);            if(fltr >= 0) {                if(fltr < (H5_NFILTERS_IMPL - 1))                    iter->dset_comptype[fltr]++;                else                    iter->dset_comptype[H5_NFILTERS_IMPL - 1]++; /*other filters*/            } /* end if */        } /* end for */    } /* endif nfltr */     ret = H5Pclose(dcpl);     HDassert(ret >= 0);     ret = H5Dclose(did);     HDassert(ret >= 0);     return 0;}  /* end dataset_stats() */
开发者ID:ArielleBassanelli,项目名称:gempak,代码行数:101,


示例19: PYTABLE_append_array

/*+++++++++++++++++++++++++.IDENTifer   PYTABLE_append_array.PURPOSE     append data to HDF5 dataset, extending the dimension ''extdim''.INPUT/OUTPUT  call as    stat = PYTABLE_append_array( locID, dset_name,                                           extdim, count, buffer );     input:            hid_t locID      :   HDF5 identifier of file or group	    char  *dset_name :   name of dataset	    int   extdim     :   dimension to extend            int   count      :   number of arrays to write	    void  *buffer    :   data to write	    .RETURNS     A negative value is returned on failure. .COMMENTS    none-------------------------*/herr_t PYTABLE_append_array( hid_t locID, const char *dset_name, 			     int extdim, int count, const void *buffer ){     int      rank;     hid_t    dataID;     hid_t    spaceID = -1;     hid_t    mem_spaceID = -1;     hid_t    typeID = -1;     hsize_t  *dims = NULL;     hsize_t  *dims_ext = NULL;     hsize_t  *offset = NULL;     herr_t   stat;/* open the dataset. */     if ( (dataID = H5Dopen( locID, dset_name, H5P_DEFAULT )) < 0 ) return -1;/* get the dataspace handle */     if ( (spaceID = H5Dget_space( dataID )) < 0 ) goto done;/* get rank */     if ( (rank = H5Sget_simple_extent_ndims( spaceID )) < 0 ) goto done;/* get dimensions */     dims = (hsize_t *) malloc( rank * sizeof(hsize_t) );     dims_ext = (hsize_t *) malloc( rank * sizeof(hsize_t) );     offset = (hsize_t *) calloc( rank, sizeof(hsize_t) );     if ( H5Sget_simple_extent_dims( spaceID, dims, NULL ) < 0 )       goto done;     offset[extdim] = dims[extdim];     (void) memcpy( dims_ext, dims, rank * sizeof(hsize_t) );     dims_ext[extdim] = count;     dims[extdim] += count;/* terminate access to the dataspace */     if ( H5Sclose( spaceID ) < 0 ) goto done;/* extend the dataset */     if ( H5Dset_extent( dataID, dims ) < 0 ) goto done;/* select a hyperslab */     if ( (spaceID = H5Dget_space( dataID )) < 0 ) goto done;     stat = H5Sselect_hyperslab( spaceID, H5S_SELECT_SET, offset, NULL,				 dims_ext, NULL );     free( dims );     free( offset );     if ( stat < 0 ) goto done;/* define memory space */     if ( (mem_spaceID = H5Screate_simple( rank, dims_ext, NULL )) < 0 )       goto done;     free( dims_ext );/* get an identifier for the datatype. */     if ( (typeID = H5Dget_type( dataID )) < 0 ) goto done;/* write the data to the hyperslab */     stat = H5Dwrite( dataID, typeID, mem_spaceID, spaceID, H5P_DEFAULT, 		      buffer );     if ( stat < 0 ) goto done;/* end access to the dataset */     if ( H5Dclose( dataID ) ) goto done;/* terminate access to the datatype */     if ( H5Tclose( typeID ) < 0 ) goto done;/* terminate access to the dataspace */     if ( H5Sclose( mem_spaceID ) < 0 ) goto done;     if ( H5Sclose( spaceID ) < 0 ) goto done;     return 0; done:     if ( dims     != 0 ) free( dims );     if ( dims_ext != 0 ) free( dims_ext );     if ( offset   != 0 ) free( offset );     if ( typeID > 0 ) (void) H5Tclose( typeID );     if ( spaceID > 0 ) (void) H5Sclose( spaceID );     if ( mem_spaceID > 0 ) (void) H5Sclose( mem_spaceID );     if ( dataID > 0 ) (void) H5Dclose( dataID );     return -1;}
开发者ID:rmvanhees,项目名称:nadc_tools,代码行数:98,


示例20: H5Dopen

void BAGDataset::LoadMetadata(){/* -------------------------------------------------------------------- *//*      Load the metadata from the file.                                *//* -------------------------------------------------------------------- */    hid_t hMDDS = H5Dopen( hHDF5, "/BAG_root/metadata" );    hid_t datatype     = H5Dget_type( hMDDS );    hid_t dataspace    = H5Dget_space( hMDDS );    hid_t native       = H5Tget_native_type( datatype, H5T_DIR_ASCEND );    hsize_t dims[3], maxdims[3];    H5Sget_simple_extent_dims( dataspace, dims, maxdims );    pszXMLMetadata = (char *) CPLCalloc((int) (dims[0]+1),1);    H5Dread( hMDDS, native, H5S_ALL, dataspace, H5P_DEFAULT, pszXMLMetadata );    H5Tclose( native );    H5Sclose( dataspace );    H5Tclose( datatype );    H5Dclose( hMDDS );    if( strlen(pszXMLMetadata) == 0 )        return;/* -------------------------------------------------------------------- *//*      Try to get the geotransform.                                    *//* -------------------------------------------------------------------- */    CPLXMLNode *psRoot = CPLParseXMLString( pszXMLMetadata );    if( psRoot == NULL )        return;    CPLStripXMLNamespace( psRoot, NULL, TRUE );    CPLXMLNode *psGeo = CPLSearchXMLNode( psRoot, "=MD_Georectified" );    if( psGeo != NULL )    {        char **papszCornerTokens =             CSLTokenizeStringComplex(                 CPLGetXMLValue( psGeo, "cornerPoints.Point.coordinates", "" ),                " ,", FALSE, FALSE );        if( CSLCount(papszCornerTokens ) == 4 )        {            double dfLLX = atof( papszCornerTokens[0] );            double dfLLY = atof( papszCornerTokens[1] );            double dfURX = atof( papszCornerTokens[2] );            double dfURY = atof( papszCornerTokens[3] );            adfGeoTransform[0] = dfLLX;            adfGeoTransform[1] = (dfURX - dfLLX) / (GetRasterXSize()-1);            adfGeoTransform[3] = dfURY;            adfGeoTransform[5] = (dfLLY - dfURY) / (GetRasterYSize()-1);            adfGeoTransform[0] -= adfGeoTransform[1] * 0.5;            adfGeoTransform[3] -= adfGeoTransform[5] * 0.5;        }        CSLDestroy( papszCornerTokens );    }/* -------------------------------------------------------------------- *//*      Try to get the coordinate system.                               *//* -------------------------------------------------------------------- */    OGRSpatialReference oSRS;    if( OGR_SRS_ImportFromISO19115( &oSRS, pszXMLMetadata )        == OGRERR_NONE )    {        oSRS.exportToWkt( &pszProjection );    }     else    {        ParseWKTFromXML( pszXMLMetadata );    }/* -------------------------------------------------------------------- *//*      Fetch acquisition date.                                         *//* -------------------------------------------------------------------- */    CPLXMLNode *psDateTime = CPLSearchXMLNode( psRoot, "=dateTime" );    if( psDateTime != NULL )    {        const char *pszDateTimeValue = CPLGetXMLValue( psDateTime, NULL, "" );        if( pszDateTimeValue )            SetMetadataItem( "BAG_DATETIME", pszDateTimeValue );    }    CPLDestroyXMLNode( psRoot );}
开发者ID:TUW-GEO,项目名称:OGRSpatialRef3D,代码行数:91,


示例21: test_strpad

/* * test_strpad * Tests string padding for a UTF-8 string. * Converts strings to shorter and then longer strings. * Borrows heavily from dtypes.c, but is more complicated because * the string is randomly generated. */void test_strpad(hid_t H5_ATTR_UNUSED fid, const char *string){    /* buf is used to hold the data that H5Tconvert operates on. */    char     buf[LONG_BUF_SIZE];    /* cmpbuf holds the output that H5Tconvert should produce,     * to compare against the actual output. */    char     cmpbuf[LONG_BUF_SIZE];    /* new_string is a slightly modified version of the UTF-8     * string to make the tests run more smoothly. */    char     new_string[MAX_STRING_LENGTH + 2];    size_t   length;  /* Length of new_string in bytes */    size_t   small_len;  /* Size of the small datatype */    size_t   big_len;   /* Size of the larger datatype */    hid_t    src_type, dst_type;    herr_t   ret;    /* The following tests are simpler if the UTF-8 string contains     * the right number of bytes (even or odd, depending on the test).     * We create a 'new_string' whose length is convenient by prepending     * an 'x' to 'string' when necessary. */    length = HDstrlen(string);    if(length % 2 != 1)    {      HDstrcpy(new_string, "x");      HDstrcat(new_string, string);      length++;    } else {      HDstrcpy(new_string, string);    }    /* Convert a null-terminated string to a shorter and longer null     * terminated string. */    /* Create a src_type that holds the UTF-8 string and its final NULL */    big_len = length + 1;                     /* +1 byte for final NULL */    HDassert((2*big_len)<=sizeof(cmpbuf));    src_type = mkstr(big_len, H5T_STR_NULLTERM);    CHECK(src_type, FAIL, "mkstr");    /* Create a dst_type that holds half of the UTF-8 string and a final     * NULL */    small_len = (length + 1) / 2;    dst_type = mkstr(small_len, H5T_STR_NULLTERM);    CHECK(dst_type, FAIL, "mkstr");    /* Fill the buffer with two copies of the UTF-8 string, each with a     * terminating NULL.  It will look like "abcdefg/0abcdefg/0". */    strncpy(buf, new_string, big_len);    strncpy(&buf[big_len], new_string, big_len);    ret = H5Tconvert(src_type, dst_type, (size_t)2, buf, NULL, H5P_DEFAULT);    CHECK(ret, FAIL, "H5Tconvert");    /* After conversion, the buffer should look like     * "abc/0abc/0abcdefg/0".  Note that this is just what the bytes look     * like; UTF-8 characters may well have been truncated.     * To check that the conversion worked properly, we'll build this     * string manually. */    HDstrncpy(cmpbuf, new_string, small_len - 1);    cmpbuf[small_len - 1] = '/0';    HDstrncpy(&cmpbuf[small_len], new_string, small_len -1);    cmpbuf[2 * small_len - 1] = '/0';    HDstrcpy(&cmpbuf[2 * small_len], new_string);    VERIFY(HDmemcmp(buf, cmpbuf, 2*big_len), 0, "HDmemcmp");    /* Now convert from smaller datatype to bigger datatype.  This should     * leave our buffer looking like: "abc/0/0/0/0/0abc/0/0/0/0/0" */    ret = H5Tconvert(dst_type, src_type, (size_t)2, buf, NULL, H5P_DEFAULT);    CHECK(ret, FAIL, "H5Tconvert");    /* First fill the buffer with NULLs */    HDmemset(cmpbuf, '/0', (size_t)LONG_BUF_SIZE);    /* Copy in the characters */    HDstrncpy(cmpbuf, new_string, small_len -1);    HDstrncpy(&cmpbuf[big_len], new_string, small_len -1);    VERIFY(HDmemcmp(buf, cmpbuf, 2*big_len), 0, "HDmemcmp");    ret = H5Tclose(src_type);    CHECK(ret, FAIL, "H5Tclose");    ret = H5Tclose(dst_type);    CHECK(ret, FAIL, "H5Tclose");    /* Now test null padding.  Null-padded strings do *not* need     * terminating NULLs, so the sizes of the datatypes are slightly     * different and we want a string with an even number of characters. */    length = HDstrlen(string);    if(length % 2 != 0)//.........这里部分代码省略.........
开发者ID:ElaraFX,项目名称:hdf5,代码行数:101,


示例22: test_big_table

/*------------------------------------------------------------------------- * test_big_table * * Ensures that a FL packet table will not break when many (BIG_TABLE_SIZE) * packets are used. * *------------------------------------------------------------------------- */static int    test_big_table(hid_t fid){    herr_t err;    hid_t table;    hid_t part_t;    size_t c;    particle_t readPart;    hsize_t count;    TESTING("large packet table");    /* Create a datatype for the particle struct */    part_t = make_particle_type();    HDassert(part_t != -1);    /* Create a new table */    table = H5PTcreate_fl(fid, "Packet Test Dataset2", part_t, (hsize_t)33, -1);    H5Tclose(part_t);    if( H5PTis_valid(table) < 0)        goto out;    /* Add many particles */    for(c = 0; c < BIG_TABLE_SIZE ; c+=8)    {        /* Append eight particles at once*/        err = H5PTappend(table, (size_t)8, &(testPart[0]));        if( err < 0)            goto out;    }    /* Count the number of packets in the table  */    err = H5PTget_num_packets(table, &count);    if( err < 0)        goto out;    if( count != BIG_TABLE_SIZE )        goto out;    /* Read particles to ensure that all of them were written correctly  */    /* Also, ensure that H5PTcreate_fl set the current packet to */    /* the first packet in the table                                     */    for(c = 0; c < BIG_TABLE_SIZE; c++)    {        err = H5PTget_next(table, 1, &readPart);        if(err < 0)            goto out;        /* Ensure that particles were read correctly */        if( cmp_par(c % 8, 0, testPart, &readPart) != 0)            goto out;    }    /* Close the table */    err = H5PTclose(table);    if( err < 0)        goto out;    PASSED();    return 0;out:    H5_FAILED();    if( H5PTis_valid(table) < 0)        H5PTclose(table);    return -1;}
开发者ID:quinoacomputing,项目名称:HDF5,代码行数:74,


示例23: test_compound

/* * test_compound * Test that compound datatypes can have UTF-8 field names. */void test_compound(hid_t fid, const char * string){  /* Define two compound structures, s1_t and s2_t.   * s2_t is a subset of s1_t, with two out of three   * fields.   * This is stolen from the h5_compound example.   */  typedef struct s1_t {      int    a;      double c;      float b;  } s1_t;  typedef struct s2_t {      double c;      int    a;  } s2_t;  /* Actual variable declarations */  s1_t       s1;  s2_t       s2;  hid_t      s1_tid, s2_tid;  hid_t      space_id, dset_id;  hsize_t    dim = 1;  char      *readbuf;  herr_t     ret;  /* Initialize compound data */  HDmemset(&s1, 0, sizeof(s1_t));        /* To make purify happy */  s1.a = COMP_INT_VAL;  s1.c = COMP_DOUBLE_VAL;  s1.b = COMP_FLOAT_VAL;  /* Create compound datatypes using UTF-8 field name */  s1_tid = H5Tcreate (H5T_COMPOUND, sizeof(s1_t));  CHECK(s1_tid, FAIL, "H5Tcreate");  ret = H5Tinsert(s1_tid, string, HOFFSET(s1_t, a), H5T_NATIVE_INT);  CHECK(ret, FAIL, "H5Tinsert");  /* Check that the field name was stored correctly */  readbuf = H5Tget_member_name(s1_tid, 0);  ret = HDstrcmp(readbuf, string);  VERIFY(ret, 0, "strcmp");  H5free_memory(readbuf);  /* Add the other fields to the datatype */  ret = H5Tinsert(s1_tid, "c_name", HOFFSET(s1_t, c), H5T_NATIVE_DOUBLE);  CHECK(ret, FAIL, "H5Tinsert");  ret = H5Tinsert(s1_tid, "b_name", HOFFSET(s1_t, b), H5T_NATIVE_FLOAT);  CHECK(ret, FAIL, "H5Tinsert");  /* Create second datatype, with only two fields. */  s2_tid = H5Tcreate (H5T_COMPOUND, sizeof(s2_t));  CHECK(s2_tid, FAIL, "H5Tcreate");  ret = H5Tinsert(s2_tid, "c_name", HOFFSET(s2_t, c), H5T_NATIVE_DOUBLE);  CHECK(ret, FAIL, "H5Tinsert");  ret = H5Tinsert(s2_tid, string, HOFFSET(s2_t, a), H5T_NATIVE_INT);  CHECK(ret, FAIL, "H5Tinsert");  /* Create the dataspace and dataset. */  space_id = H5Screate_simple(1, &dim, NULL);  CHECK(space_id, FAIL, "H5Screate_simple");  dset_id = H5Dcreate2(fid, DSET4_NAME, s1_tid, space_id, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);  CHECK(dset_id, FAIL, "H5Dcreate2");  /* Write data to the dataset. */  ret = H5Dwrite(dset_id, s1_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, &s1);  CHECK(ret, FAIL, "H5Dwrite");  /* Ensure that data can be read back by field name into s2 struct */  ret = H5Dread(dset_id, s2_tid, H5S_ALL, H5S_ALL, H5P_DEFAULT, &s2);  CHECK(ret, FAIL, "H5Dread");  VERIFY(s2.a, COMP_INT_VAL, "H5Dread");  VERIFY(s2.c, COMP_DOUBLE_VAL, "H5Dread");  /* Clean up */  ret = H5Tclose(s1_tid);  CHECK(ret, FAIL, "H5Tclose");  ret = H5Tclose(s2_tid);  CHECK(ret, FAIL, "H5Tclose");  ret = H5Sclose(space_id);  CHECK(ret, FAIL, "H5Sclose");  ret = H5Dclose(dset_id);  CHECK(ret, FAIL, "H5Dclose");}
开发者ID:ElaraFX,项目名称:hdf5,代码行数:88,


示例24: test_compress

//.........这里部分代码省略.........     * packet table, and it should pass the same tests. */    /* Add many particles */    for(c = 0; c < BIG_TABLE_SIZE ; c+=8)    {        /* Append eight particles at once*/        err = H5PTappend(table, (size_t)8, &(testPart[0]));        if( err < 0) TEST_ERROR;    }    /* Count the number of packets in the table  */    err = H5PTget_num_packets(table, &count);    if( err < 0) TEST_ERROR;    if( count != BIG_TABLE_SIZE ) TEST_ERROR;    /* Read particles to ensure that all of them were written correctly  */    HDmemset(readPart, 0, sizeof(readPart));    for(c = 0; c < BIG_TABLE_SIZE; c++)    {        err = H5PTget_next(table, 1, readPart);        if(err < 0) TEST_ERROR;        /* Ensure that particles were read correctly */        if( cmp_par(c % 8, 0, testPart, readPart) != 0) TEST_ERROR;    }    /* Close the table */    err = H5PTclose(table);    if( err < 0) TEST_ERROR;    /* Open the packet table as a regular dataset and make sure that the     * compression filter is set.     */    dset_id = H5Dopen2(fid1, "Compressed Test Dataset", H5P_DEFAULT);    if( dset_id < 0) TEST_ERROR;    plist_id = H5Dget_create_plist(dset_id);    if( plist_id < 0) TEST_ERROR;    err = H5Pget_filter_by_id2(plist_id, H5Z_FILTER_DEFLATE, NULL, &num_elems,                               filter_vals, 0, NULL, NULL);    if( err < 0) TEST_ERROR;    /* The compression level should be 8, the value we passed in */    if(filter_vals[0] != 8) TEST_ERROR;    /* Clean up */    err = H5Pclose(plist_id);    if( err < 0) TEST_ERROR;    err = H5Dclose(dset_id);    if( err < 0) TEST_ERROR;    /* Create a new table without compression. */    table = H5PTcreate_fl(fid1, "Uncompressed Dataset", part_t, (hsize_t)80, -1);    if(table < 0) TEST_ERROR;    /* Close the packet table */    err = H5PTclose(table);    if( err < 0) TEST_ERROR;    /* Open the packet table as a regular dataset and make sure that the     * compression filter is not set.     */    dset_id = H5Dopen2(fid1, "Uncompressed Dataset", H5P_DEFAULT);    if( dset_id < 0) TEST_ERROR;    plist_id = H5Dget_create_plist(dset_id);    if( plist_id < 0) TEST_ERROR;    H5E_BEGIN_TRY {        err = H5Pget_filter_by_id2(plist_id, H5Z_FILTER_DEFLATE, NULL, &num_elems,        filter_vals, 0, NULL, NULL);        if( err >= 0) TEST_ERROR;    } H5E_END_TRY    /* Clean up */    err = H5Pclose(plist_id);    if( err < 0) TEST_ERROR;    err = H5Dclose(dset_id);    if( err < 0) TEST_ERROR;    /* Close the datatype and the file */    err = H5Tclose(part_t);    if( err < 0) TEST_ERROR;    err = H5Fclose(fid1);    if( err < 0) TEST_ERROR;    PASSED();    return 0;error:    H5E_BEGIN_TRY {        H5Pclose(plist_id);        H5Dclose(dset_id);        H5Tclose(part_t);        H5PTclose(table);        H5Fclose(fid1);    } H5E_END_TRY    H5_FAILED();    return -1;}
开发者ID:quinoacomputing,项目名称:HDF5,代码行数:101,


示例25: e5_read_data_info_list

estatus_te5_read_data_info_list(    eid_t e5_group_id, const char* list_name, e5_data_info* info_list){    int i;    int d;    int log_scale;    int close_group;    estatus_t status;    hsize_t h5_min_dim[3];    hsize_t h5_max_dim[3];    eid_t e5_list_group_id;    eid_t e5_type_id;    eid_t e5_dataset_id;    eid_t e5_dataspace_id;    status = E5_SUCCESS;    if(list_name && strlen(list_name))    {        e5_list_group_id = e5_create_group(e5_group_id, list_name);        close_group = 1;    }    else    {        e5_list_group_id = e5_group_id;        close_group = 0;    }    for(i = 0; info_list && info_list[i].name != 0; i++)    {        e5_data_info* info = &info_list[i];        e5_dataset_id = H5Dopen(e5_list_group_id, info->name);        if (e5_dataset_id < 0)        {            status = E5_INVALID_DATASET;            e5_error(e5_list_group_id, status, "Failed to open info for dataset '%s'/n",  info->name);            return status;        }        e5_dataspace_id = H5Dget_space(e5_dataset_id);        e5_type_id = H5Dget_type(e5_dataset_id);        H5Sget_simple_extent_dims(e5_dataspace_id, h5_min_dim, h5_max_dim);        info->type = e5_convert_hdf_type(e5_type_id);        for(d = 0; d < 3; d++)        {            info->dim[d] = h5_min_dim[d] >= h5_max_dim[d] ? h5_min_dim[d] : h5_max_dim[d];            info->dim[d] = info->dim[d] < 1 ? 1 : info->dim[d];        }        log_scale = 0;        if(e5_is_valid_attr(e5_group_id, "log10"))            e5_read_attr_int(e5_dataset_id, "log10", &log_scale);        info->scale = log_scale ? E5_VALUE_SCALE_LOG10 : E5_VALUE_SCALE_LINEAR;        e5_info(e5_group_id, "Read data info [type='%s', name='%s', dim='%u %u %u']/n",                e5_typename(info->type), info->name, info->dim[0], info->dim[1], info->dim[2]);        H5Sclose(e5_dataspace_id);        H5Dclose(e5_dataset_id);        H5Tclose(e5_type_id);    }    if(close_group)        e5_close_group(e5_list_group_id);    return E5_SUCCESS;}
开发者ID:voidcycles,项目名称:void,代码行数:73,


示例26: HDF5AttrIterate

//.........这里部分代码省略.........	    szValue[0] ='/0';	    H5Aread( hAttrID, hAttrNativeType, buf );	}	if( H5Tequal( H5T_NATIVE_CHAR, hAttrNativeType ) ){	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%c ", ((char *) buf)[i]);		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_UCHAR,  hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%c", ((char *) buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_SHORT,  hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%d ", ((short *) buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_USHORT, hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%ud ", ((unsigned short *) buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_INT,    hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%d ", ((int *) buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_UINT,   hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%ud ", ((unsigned int *) buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_LONG,   hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%ld ", ((long *)buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_ULONG,  hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%ld ", ((unsigned long *)buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_FLOAT,  hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%f ",  ((float *)buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	else if( H5Tequal( H5T_NATIVE_DOUBLE, hAttrNativeType ) ) {	    for( i=0; i < nAttrElmts; i++ ) {		sprintf( szData, "%g ",  ((double *)buf)[i] );		if( CPLStrlcat(szValue,szData,MAX_METADATA_LEN) >= MAX_METADATA_LEN )		    CPLError( CE_Warning, CPLE_OutOfMemory,			      "Header data too long. Truncated/n");	    }	}	CPLFree( buf );    }    H5Sclose(hAttrSpace);    H5Tclose(hAttrNativeType);    H5Tclose(hAttrTypeID);    H5Aclose( hAttrID );    //printf( "%s = %s/n",szTemp, szValue );    poDS->papszMetadata =	CSLSetNameValue( poDS->papszMetadata, szTemp,  			 CPLSPrintf( "%s", szValue ) );    CPLFree( szTemp );    CPLFree( szData );    CPLFree( szValue );    return 0;}
开发者ID:dlsyaim,项目名称:osgEarthX,代码行数:101,


示例27: main

//.........这里部分代码省略.........     * dataset class, order, size, rank and dimensions.     */    datatype = H5Dget_type(dataset); /* datatype handle */        t_class = H5Tget_class(datatype);    if (t_class == H5T_INTEGER)        printf("Dataset has INTEGER type/n");    order = H5Tget_order(datatype);    if (order == H5T_ORDER_LE)        printf("Little endian order/n");    size = H5Tget_size(datatype);    printf("Data size is %d/n", (int)size);    dataspace = H5Dget_space(dataset); /* dataspace handle */    rank      = H5Sget_simple_extent_ndims(dataspace);    status_n  = H5Sget_simple_extent_dims(dataspace, dims_out, NULL);    printf("rank %d, dimensions %lu x %lu/n", rank,        (unsigned long)(dims_out[0]),        (unsigned long)(dims_out[1]));    /*     * Define hyperslab in the dataset.     */    offset[0] = 1;    offset[1] = 2;    count[0] = NX_SUB;    count[1] = NY_SUB;    status = H5Sselect_hyperslab(dataspace, H5S_SELECT_SET, offset, NULL, count, NULL);    /*     * Define the memory dataspace.     */    dimsm[0] = NX;    dimsm[1] = NY;    dimsm[2] = NZ;    memspace = H5Screate_simple(RANK_OUT, dimsm, NULL);    /*     * Define memory hyperslab     */    offset_out[0] = 3;    offset_out[1] = 0;    offset_out[2] = 0;    count_out[0] = NX_SUB;    count_out[1] = NY_SUB;    count_out[2] = 1;    status = H5Sselect_hyperslab(memspace, H5S_SELECT_SET, offset_out, NULL, count_out, NULL);    /*     * Read data from hyperslab in the file into the hyperslab     * in memory and display it.     */    status = H5Dread(dataset, H5T_NATIVE_INT, memspace, dataspace, H5P_DEFAULT, data_out);    for (j = 0; j < NX; j++)    {        for (i = 0; i < NY; i++)        {            printf("%d ", data_out[j][i][0]);        }        printf("/n");    }    /* Result:     * 0 0 0 0 0 0 0     * 0 0 0 0 0 0 0     * 0 0 0 0 0 0 0     * 3 4 5 6 0 0 0     * 4 5 6 7 0 0 0     * 5 6 7 8 0 0 0     * 0 0 0 0 0 0 0     */    /*     * Close/release resources.     */    H5Tclose(datatype);    H5Dclose(dataset);    H5Sclose(dataspace);    H5Sclose(memspace);    H5Fclose(file);    return 0;}
开发者ID:ludwig,项目名称:examples,代码行数:101,


示例28: main

intmain (void){    hid_t       file,  dataset;		/* file and dataset handles */    hid_t 	dataspace, datatype;	/* handles */    hid_t       attr; 			/* attribute identifiers */    herr_t      status;    int wdata = 45;    /*     * Create a new file using H5F_ACC_TRUNC access,     * default file creation properties, and default file     * access properties.     */    file = H5Fcreate(H5FILE_NAME, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);    /*     * Describe the size of the array and create the data space for fixed     * size dataset.     */    dataspace = H5Screate(H5S_SCALAR);    /*     * Define datatype for the data in the file.     */    datatype = H5Tcopy(H5T_NATIVE_INT);    /*     * Create a new dataset within the file using defined dataspace and     * datatype and default dataset creation properties.     */    dataset = H5Dcreate2(file, "scalar", datatype, dataspace, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);    /*     * Write the data to the dataset using default transfer properties.     */    status = H5Dwrite(dataset, H5T_NATIVE_INT, H5S_ALL, H5S_ALL, H5P_DEFAULT, &wdata);    /*     * Create scalar attributes.     */    attr = H5Acreate2(dataset, "value", H5T_NATIVE_INT, dataspace, H5P_DEFAULT, H5P_DEFAULT);    status  = H5Awrite(attr, H5T_NATIVE_INT, &wdata);    /*     * Close attributes.     */    H5Aclose(attr);    /*     * Close/release resources.     */    H5Sclose(dataspace);    H5Tclose(datatype);    H5Dclose(dataset);    H5Fclose(file);    return 0;}
开发者ID:OPENDAP,项目名称:hdf5_handler,代码行数:66,


示例29: H5Fcreate

void hdfFileSaver::saveNow(void){    QDir prnt;    herr_t status;    hsize_t maxdims[1];    hsize_t d_dims[1];    int rank = 1;    hsize_t datasetlength;    size_t      memdims[1];    hsize_t     count[1];              /* size of subset in the file */      hsize_t     offset[1];             /* subset offset in the file */      hsize_t     mcount[1];      hsize_t     moffset[1];hid_t prop;    if (events==0)        return;        if (access(fname.toStdString().c_str(), F_OK) ==-1)        {            hfile = H5Fcreate(                        fname.toStdString().c_str(),                        H5F_ACC_EXCL ,                        H5P_DEFAULT,                        H5P_DEFAULT ) ;            topgroup = H5Gcreate(                        hfile,                        "iqdata_raw",                        H5P_DEFAULT,                        H5P_DEFAULT,                        H5P_DEFAULT);            hid_t aid3  = H5Screate(H5S_SCALAR);            hid_t  atype = H5Tcopy(H5T_C_S1);            H5Tset_size(atype, 4);            hid_t  attr3 = H5Acreate1(topgroup, "type", atype, aid3, H5P_DEFAULT);            status = H5Awrite(attr3, atype,"dict");            H5Aclose(attr3);            H5Tclose(atype);            H5Sclose(aid3);        }        else        {           //printf("File already exists, will append/n");            hfile = H5Fopen(                fname.toStdString().c_str(),                H5F_ACC_RDWR,                H5P_DEFAULT );            topgroup = H5Gopen(hfile, "iqdata_raw", H5P_DEFAULT);            //fprintf(stderr,"Bad Hdf file already existant, cannot open/n");         }        if (hfile <=0 || topgroup <=0)        {           fprintf(stderr,"Bad Hdf file, cannot open/n");           return;        }        if (true)        {            //QHash<int,QHash<QString,QList<float> > > events;            QList<int> keys1=(*events).keys();            for (int i=0;i<keys1.length();i++)            {                int chan = keys1[i];                if ((*events)[chan]["bin"].length() > 0)                {                    int bin = (int)((*events)[chan]["bin"][0]);                    QString dirname = QString("keyint_%1").arg(chan);                    //turn off errors when we query the group, using open                    hid_t error_stack = H5Eget_current_stack();                    H5E_auto2_t  oldfunc;                    void *old_client_data;                    H5Eget_auto(error_stack, &oldfunc, &old_client_data);                    H5Eset_auto(error_stack, NULL, NULL);                    channelgroup = H5Gopen(                                topgroup,                                dirname.toStdString().c_str(),                                H5P_DEFAULT);//.........这里部分代码省略.........
开发者ID:argonnexraydetector,项目名称:RoachFirmPy,代码行数:101,


示例30: H5Eset_auto2

//.........这里部分代码省略.........    // Create the data set    data_set = H5Dcreate2(db, datapath.c_str(), desc, data_space, H5P_DEFAULT,                             data_set_params, H5P_DEFAULT);    H5Dset_extent(data_set, data_dims);    // Add attribute pointing to nuc path    hid_t nuc_attr_type = H5Tcopy(H5T_C_S1);    H5Tset_size(nuc_attr_type, nucpath.length());    hid_t nuc_attr_space = H5Screate(H5S_SCALAR);    hid_t nuc_attr = H5Acreate2(data_set, "nucpath", nuc_attr_type, nuc_attr_space,                                 H5P_DEFAULT, H5P_DEFAULT);    H5Awrite(nuc_attr, nuc_attr_type, nucpath.c_str());    H5Fflush(db, H5F_SCOPE_GLOBAL);    // Remember to de-allocate    delete[] data_fill_value;  };  // Get the data hyperslab  data_hyperslab = H5Dget_space(data_set);  hsize_t data_count[1] = {1};  H5Sselect_hyperslab(data_hyperslab, H5S_SELECT_SET, data_offset, NULL, data_count, NULL);  // Get a memory space for writing  hid_t mem_space = H5Screate_simple(1, data_count, data_max_dims);  // Write the row...  H5Dwrite(data_set, desc, mem_space, data_hyperslab, H5P_DEFAULT, mat_data);  // Close out the Dataset  H5Fflush(db, H5F_SCOPE_GLOBAL);  H5Dclose(data_set);  H5Sclose(data_space);  H5Tclose(desc);  //  // Write out the metadata to the file  //  std::string attrpath = datapath + "_metadata";  hid_t metadatapace, attrtype, metadataet, metadatalab, attrmemspace;  int attrrank;   attrtype = H5Tvlen_create(H5T_NATIVE_CHAR);  // get / make the data set  bool attrpath_exists = h5wrap::path_exists(db, attrpath);  if (attrpath_exists) {    metadataet = H5Dopen2(db, attrpath.c_str(), H5P_DEFAULT);    metadatapace = H5Dget_space(metadataet);    attrrank = H5Sget_simple_extent_dims(metadatapace, data_dims, data_max_dims);    if (data_dims[0] <= row_num) {      // row == -0, extend to data set so that we can append, or      // row_num is larger than current dimension, resize to accomodate.      data_dims[0] = row_num + 1;      H5Dset_extent(metadataet, data_dims);    }    data_offset[0] = row_num;  } else {    hid_t metadataetparams;    hsize_t attrchunkdims [1];    // Make data set properties to enable chunking    metadataetparams = H5Pcreate(H5P_DATASET_CREATE);    attrchunkdims[0] = chunksize; 
开发者ID:crbates,项目名称:pyne,代码行数:67,



注:本文中的H5Tclose函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ H5Tcopy函数代码示例
C++ H5Sselect_hyperslab函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。