您当前的位置:首页 > IT编程 > C++
| C语言 | Java | VB | VC | python | Android | TensorFlow | C++ | oracle | 学术与代码 | cnn卷积神经网络 | gnn | 图像修复 | Keras | 数据集 | Neo4j | 自然语言处理 | 深度学习 | 医学CAD | 医学影像 | 超参数 | pointnet | pytorch | 异常检测 | Transformers | 情感分类 | 知识图谱 |

自学教程:C++ HOFFSET函数代码示例

51自学网 2021-06-01 21:23:39
  C++
这篇教程C++ HOFFSET函数代码示例写得很实用,希望能帮到您。

本文整理汇总了C++中HOFFSET函数的典型用法代码示例。如果您正苦于以下问题:C++ HOFFSET函数的具体用法?C++ HOFFSET怎么用?C++ HOFFSET使用的例子?那么恭喜您, 这里精选的函数代码示例或许可以为您提供帮助。

在下文中一共展示了HOFFSET函数的30个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的C++代码示例。

示例1: create_symbol_datatype

/*------------------------------------------------------------------------- * Function:    create_symbol_datatype * * Purpose:     Create's the HDF5 datatype used for elements in the SWMR *              testing datasets. * * Parameters:  N/A * * Return:      Success:    An HDF5 type ID *              Failure:    -1 * *------------------------------------------------------------------------- */hid_tcreate_symbol_datatype(void){    hid_t sym_type_id;          /* Datatype ID for symbol */    hid_t opaq_type_id;         /* Datatype ID for opaque part of record */    /* Create opaque datatype to represent other information for this record */    if((opaq_type_id = H5Tcreate(H5T_OPAQUE, (size_t)DTYPE_SIZE)) < 0)        return -1;    /* Create compound datatype for symbol */    if((sym_type_id = H5Tcreate(H5T_COMPOUND, sizeof(symbol_t))) < 0)        return -1;    /* Insert fields in symbol datatype */    if(H5Tinsert(sym_type_id, "rec_id", HOFFSET(symbol_t, rec_id), H5T_NATIVE_UINT64) < 0)        return -1;    if(H5Tinsert(sym_type_id, "info", HOFFSET(symbol_t, info), opaq_type_id) < 0)        return -1;    /* Close opaque datatype */    if(H5Tclose(opaq_type_id) < 0)        return -1;    return sym_type_id;} /* end create_symbol_datatype() */
开发者ID:FilipeMaia,项目名称:hdf5,代码行数:39,


示例2: create_ieee_complex256

/* Counterpart for complex256 */hid_t create_ieee_complex256(const char *byteorder) {  herr_t err = 0;  hid_t float_id, complex_id;  H5T_order_t h5order = H5Tget_order(H5T_NATIVE_LDOUBLE);  complex_id = H5Tcreate(H5T_COMPOUND, sizeof(npy_complex256));  float_id = H5Tcopy(H5T_NATIVE_LDOUBLE);  if (float_id < 0)  {    H5Tclose(complex_id);    return float_id;  }  if ((strcmp(byteorder, "little") == 0) && (h5order != H5T_ORDER_LE))    err = H5Tset_order(float_id, H5T_ORDER_LE);  else if ((strcmp(byteorder, "big") == 0) && (h5order != H5T_ORDER_BE))    err = H5Tset_order(float_id, H5T_ORDER_BE);  if (err < 0)  {    H5Tclose(complex_id);    return err;  }  H5Tinsert(complex_id, "r", HOFFSET(npy_complex256, real), float_id);  H5Tinsert(complex_id, "i", HOFFSET(npy_complex256, imag), float_id);  H5Tclose(float_id);  return complex_id;}
开发者ID:bbudescu,项目名称:PyTables,代码行数:30,


示例3: make_particle_type

/*------------------------------------------------------------------------- * function to create a datatype representing the particle struct *------------------------------------------------------------------------- */static hid_tmake_particle_type(void){    hid_t type_id;    hid_t string_type;    size_t type_size = sizeof(particle_t);    /* Create the memory data type. */    if ((type_id = H5Tcreate (H5T_COMPOUND, type_size )) < 0 )        return -1;    /* Insert fields. */    string_type = H5Tcopy( H5T_C_S1 );    H5Tset_size( string_type, (size_t)16 );    if ( H5Tinsert(type_id, "Name", HOFFSET(particle_t, name) , string_type ) < 0 )        return -1;    if ( H5Tinsert(type_id, "Lat", HOFFSET(particle_t, lati) , H5T_NATIVE_INT ) < 0 )        return -1;    if ( H5Tinsert(type_id, "Long", HOFFSET(particle_t, longi) , H5T_NATIVE_INT ) < 0 )        return -1;    if ( H5Tinsert(type_id, "Pressure", HOFFSET(particle_t, pressure) , H5T_NATIVE_FLOAT ) < 0 )        return -1;    if ( H5Tinsert(type_id, "Temperature", HOFFSET(particle_t, temperature) , H5T_NATIVE_DOUBLE ) < 0 )        return -1;    return type_id;}
开发者ID:quinoacomputing,项目名称:HDF5,代码行数:32,


示例4: hdf5

    /** Creates a HDF5 type identifier for the [kmer,abundance] structure. This type will be used     * for dumping Count instances in a HDF5 file (like SortingCount algorithm does).     * /param[in] isCompound : tells whether the structure is compound (SHOULD BE OBSOLETE IN THE FUTURE)     * /return the HDF5 identifier for the type. */    static hid_t hdf5 (bool& isCompound)    {        hid_t abundanceType = H5T_NATIVE_UINT16;        if (sizeof(Number)==1) {            abundanceType = H5T_NATIVE_UINT8;        }        else if (sizeof(Number)==2) {            abundanceType = H5T_NATIVE_UINT16;        }        else if (sizeof(Number)==4) {            abundanceType = H5T_NATIVE_UINT32;        }        else if (sizeof(Number)==8) {            abundanceType = H5T_NATIVE_UINT64;        }        else {            throw "Bad type size for Abundance HDF5 serialization";        }        hid_t result = H5Tcreate (H5T_COMPOUND, sizeof(Abundance));        H5Tinsert (result, "value",      HOFFSET(Abundance, value),     Type::hdf5(isCompound));        H5Tinsert (result, "abundance",  HOFFSET(Abundance, abundance), abundanceType);        isCompound = true;        return result;    }
开发者ID:cdeltel,项目名称:gatb-core-mirrored,代码行数:32,


示例5: gent_compound

/*------------------------------------------------------------------------- * Function:    gent_compound * * Purpose:     Generate a compound dataset in LOC_ID * *------------------------------------------------------------------------- */static void gent_compound(hid_t loc_id){    typedef struct s_t    {        char str1[20];        char str2[20];    } s_t;    hid_t   sid, did, tid_c, tid_s;    hsize_t dims[1] = {2};    s_t     buf[2]  = {{"str1", "str2"}, {"str3", "str4"}};    /* create dataspace */    sid = H5Screate_simple(1, dims, NULL);    /* create a compound type */    tid_c = H5Tcreate(H5T_COMPOUND, sizeof(s_t));    tid_s = H5Tcopy(H5T_C_S1);    H5Tset_size(tid_s, 20);    H5Tinsert(tid_c, "str1", HOFFSET(s_t,str1), tid_s);    H5Tinsert(tid_c, "str2", HOFFSET(s_t,str2), tid_s);    /* create dataset */    did = H5Dcreate2(loc_id, DATASET_COMPOUND, tid_c, sid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);    /* write */    H5Dwrite(did, tid_c, H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);    /* close */    H5Sclose(sid);    H5Dclose(did);    H5Tclose(tid_c);    H5Tclose(tid_s);}
开发者ID:Hulalazz,项目名称:rnnlib,代码行数:41,


示例6: main

int main(int argc, char *argv[]){    hdf_sa_t arr[LEN];    initArr(arr, LEN);    // create data type corresponding to compound struct    hid_t cid = H5Tcreate(H5T_COMPOUND, sizeof(hdf_sa_t));    H5Tinsert(cid, "a", HOFFSET(hdf_sa_t, a), H5T_NATIVE_INT);    H5Tinsert(cid, "b", HOFFSET(hdf_sa_t, b), H5T_NATIVE_FLOAT);    H5Tinsert(cid, "c", HOFFSET(hdf_sa_t, c), H5T_NATIVE_DOUBLE);    // write data to file    hid_t fid = H5Fcreate("compound.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);    // create data space    hsize_t dim[1] = {LEN};    hid_t space = H5Screate_simple(1, dim, NULL);    hid_t dataset = H5Dcreate(fid, "compound", cid, space, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);    // write data     H5Dwrite(dataset, cid, H5S_ALL, H5S_ALL, H5P_DEFAULT, arr);    H5Sclose(space);    H5Dclose(dataset);    H5Fclose(fid);    return 0;}
开发者ID:wxm71,项目名称:todo,代码行数:28,


示例7: sizeof

void pyne::Material::_load_comp_protocol1(H5::H5File * db, std::string datapath, int row){  H5::DataSet data_set = (*db).openDataSet(datapath);  hsize_t data_offset[1] = {row};  if (row < 0)  {    // Handle negative row indecies    H5::DataSpace data_space = data_set.getSpace();    hsize_t data_dims[1];    int data_rank = data_space.getSimpleExtentDims(data_dims);    data_offset[0] += data_dims[0];  };  // Grab the nucpath  std::string nucpath;  H5::Attribute nuc_attr = data_set.openAttribute("nucpath");  hsize_t nuc_attr_len = nuc_attr.getStorageSize() / sizeof(char);  H5::StrType nuc_attr_type(0, nuc_attr_len);  nuc_attr.read(nuc_attr_type, nucpath);  // Grab the nuclides  std::vector<int> nuclides = h5wrap::h5_array_to_cpp_vector_1d<int>(db, nucpath, H5::PredType::NATIVE_INT);  int nuc_size = nuclides.size();  hsize_t nuc_dims[1] = {nuc_size};  // Get the data hyperslab  H5::DataSpace data_hyperslab = data_set.getSpace();  hsize_t data_count[1] = {1};  data_hyperslab.selectHyperslab(H5S_SELECT_SET, data_count, data_offset);  // Get memory space for writing  H5::DataSpace mem_space (1, data_count);  // Get material type  size_t material_struct_size = sizeof(pyne::material_struct) + sizeof(double)*(nuc_size);  H5::CompType data_desc(material_struct_size);  H5::ArrayType comp_values_array_type (H5::PredType::NATIVE_DOUBLE, 1, nuc_dims);  // make the data table type  data_desc.insertMember("name", HOFFSET(pyne::material_struct, name), H5::StrType(0, 20));  data_desc.insertMember("mass", HOFFSET(pyne::material_struct, mass), H5::PredType::NATIVE_DOUBLE);  data_desc.insertMember("atoms_per_mol", HOFFSET(pyne::material_struct, atoms_per_mol), H5::PredType::NATIVE_DOUBLE);  data_desc.insertMember("comp", HOFFSET(pyne::material_struct, comp), comp_values_array_type);  // make the data array, have to over-allocate  material_struct * mat_data = (material_struct *) malloc(material_struct_size);  // Finally, get data and put in on this instance  data_set.read(mat_data, data_desc, mem_space, data_hyperslab);  name = std::string((*mat_data).name);  mass = (*mat_data).mass;  atoms_per_mol = (*mat_data).atoms_per_mol;  for (int i = 0; i < nuc_size; i++)    comp[nuclides[i]] = (double) (*mat_data).comp[i];  free(mat_data);};
开发者ID:chrisdembia,项目名称:pyne,代码行数:59,


示例8: hdf5

 inline static hid_t hdf5 (bool& compound) {     hid_t result = H5Tcreate (H5T_COMPOUND, sizeof(Entry));     H5Tinsert (result, "index",      HOFFSET(Entry, index),     H5T_NATIVE_UINT16);     H5Tinsert (result, "abundance",  HOFFSET(Entry, abundance), H5T_NATIVE_UINT64);     compound = true;     return result; }
开发者ID:zy26,项目名称:gatb-core,代码行数:8,


示例9: require_group

void NSDFWriter::createEventMap(){    herr_t status;        hid_t eventMapContainer = require_group(filehandle_, MAPEVENTSRC);    // Open the container for the event maps    // Create the Datasets themselves (one for each field - each row    // for one object).    for (map< string, vector < string > >::iterator ii = classFieldToEventSrc_.begin();         ii != classFieldToEventSrc_.end();         ++ii){        vector < string > pathTokens;        tokenize(ii->first, "/", pathTokens);        string className = pathTokens[0];        string fieldName = pathTokens[1];        hid_t classGroup = require_group(eventMapContainer, className);        hid_t strtype = H5Tcopy(H5T_C_S1);        status = H5Tset_size(strtype, H5T_VARIABLE);        // create file space        hid_t ftype = H5Tcreate(H5T_COMPOUND, sizeof(hvl_t) +sizeof(hobj_ref_t));        status = H5Tinsert(ftype, "source", 0, strtype);        status = H5Tinsert(ftype, "data", sizeof(hvl_t), H5T_STD_REF_OBJ);        hsize_t dims[1] = {ii->second.size()};        hid_t space = H5Screate_simple(1, dims, NULL);        // The dataset for mapping is named after the field        hid_t ds = H5Dcreate2(classGroup, fieldName.c_str(), ftype, space,                              H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT);        status = H5Sclose(space);        map_type * buf = (map_type*)calloc(ii->second.size(), sizeof(map_type));        // Populate the buffer entries with source uid and data        // reference        for (unsigned int jj = 0; jj < ii->second.size(); ++jj){            buf->source = ii->second[jj].c_str();            char * dsname = (char*)calloc(256, sizeof(char));            ssize_t size = H5Iget_name(classFieldToEvent_[ii->first][jj], dsname, 255);            if (size > 255){                free(dsname);                dsname = (char*)calloc(size, sizeof(char));                size = H5Iget_name(classFieldToEvent_[ii->first][jj], dsname, 255);            }            status = H5Rcreate(&(buf->data), filehandle_, dsname, H5R_OBJECT, -1);            free(dsname);            assert(status >= 0);                    }        // create memory space        hid_t memtype = H5Tcreate(H5T_COMPOUND, sizeof(map_type));        status = H5Tinsert(memtype, "source",                           HOFFSET(map_type, source), strtype);        status = H5Tinsert(memtype, "data",                           HOFFSET(map_type, data), H5T_STD_REF_OBJ);        status = H5Dwrite(ds, memtype,  H5S_ALL, H5S_ALL, H5P_DEFAULT, buf);        free(buf);        status = H5Tclose(strtype);        status = H5Tclose(ftype);        status = H5Tclose(memtype);        status = H5Dclose(ds);    }}
开发者ID:asiaszmek,项目名称:moose-core,代码行数:57,


示例10: arma_H5Tcreate

inlinehid_tget_hdf5_type< std::complex<double> >()  {  hid_t type = arma_H5Tcreate(H5T_COMPOUND, sizeof(hdf5_complex_t<double>));  arma_H5Tinsert(type, "real", HOFFSET(hdf5_complex_t<double>, real), arma_H5T_NATIVE_DOUBLE);  arma_H5Tinsert(type, "imag", HOFFSET(hdf5_complex_t<double>, imag), arma_H5T_NATIVE_DOUBLE);  return type;  }
开发者ID:RcppCore,项目名称:RcppArmadillo,代码行数:11,


示例11: H5Tcreate

inlinehid_tget_hdf5_type< std::complex<float> >()  {  hid_t type = H5Tcreate(H5T_COMPOUND, sizeof(hdf5_complex_t<float>));    H5Tinsert(type, "real", HOFFSET(hdf5_complex_t<float>, real), H5T_NATIVE_FLOAT);  H5Tinsert(type, "imag", HOFFSET(hdf5_complex_t<float>, imag), H5T_NATIVE_FLOAT);    return type;  }
开发者ID:ELEN4002-Lab-Project-2012,项目名称:ELEN4002-Lab-Project,代码行数:11,


示例12: get_species_comp_type

 static H5::CompType get_species_comp_type() {     H5::CompType h5_species_comp_type(sizeof(h5_species_struct));     h5_species_comp_type.insertMember(         std::string("id"), HOFFSET(h5_species_struct, id),         H5::PredType::STD_I32LE);     h5_species_comp_type.insertMember(         std::string("serial"), HOFFSET(h5_species_struct, serial),         H5::StrType(H5::PredType::C_S1, 32));     return h5_species_comp_type; }
开发者ID:greatlse,项目名称:ecell4,代码行数:11,


示例13: get_particle_comp_type

 static H5::CompType get_particle_comp_type() {     H5::CompType h5_particle_comp_type(sizeof(h5_particle_struct));     h5_particle_comp_type.insertMember(         std::string("lot"), HOFFSET(h5_particle_struct, lot),         H5::PredType::NATIVE_INT);     h5_particle_comp_type.insertMember(         std::string("serial"), HOFFSET(h5_particle_struct, serial),         H5::PredType::NATIVE_INT);     h5_particle_comp_type.insertMember(         std::string("sid"), HOFFSET(h5_particle_struct, sid),         H5::PredType::STD_I32LE);     h5_particle_comp_type.insertMember(         std::string("posx"), HOFFSET(h5_particle_struct, posx),         H5::PredType::NATIVE_DOUBLE);     h5_particle_comp_type.insertMember(         std::string("posy"), HOFFSET(h5_particle_struct, posy),         H5::PredType::NATIVE_DOUBLE);     h5_particle_comp_type.insertMember(         std::string("posz"), HOFFSET(h5_particle_struct, posz),         H5::PredType::NATIVE_DOUBLE);     h5_particle_comp_type.insertMember(         std::string("radius"), HOFFSET(h5_particle_struct, radius),         H5::PredType::NATIVE_DOUBLE);     h5_particle_comp_type.insertMember(         std::string("D"), HOFFSET(h5_particle_struct, D),         H5::PredType::NATIVE_DOUBLE);     return h5_particle_comp_type; }
开发者ID:greatlse,项目名称:ecell4,代码行数:29,


示例14: write

void write(hid_t& file, std::vector<Region>& regions){  const size_t record_size = sizeof(Region);  size_t record_offset[] = { HOFFSET(Region, bam_file_key),                             HOFFSET(Region, chromosome),                             HOFFSET(Region, region_name),                             HOFFSET(Region, start),                             HOFFSET(Region, stop),                             HOFFSET(Region, strand),                             HOFFSET(Region, count),                             HOFFSET(Region, normalized_count) };  size_t field_sizes[] = { sizeof(Region::bam_file_key),                           sizeof(Region::chromosome),                           sizeof(Region::region_name),                           sizeof(Region::start),                           sizeof(Region::stop),                           sizeof(Region::strand),                           sizeof(Region::count),                           sizeof(Region::normalized_count) };  herr_t status = H5TBappend_records(file, "region_counts", regions.size(), record_size, record_offset,                                     field_sizes, regions.data());  if (status != 0)  {    std::stringstream ss;    ss << "Error appending record, status = " << status;    throw std::runtime_error(ss.str());  }}
开发者ID:BoulderLabs,项目名称:pipeline,代码行数:31,


示例15: linkDatatype

hid_t linkDatatype(){	hid_t tLink;	hid_t tPosition;	hid_t tNumber;			tLink = H5Tcreate(H5T_COMPOUND, sizeof(link_t));	tPosition = H5Tcopy(H5T_NATIVE_INT32);	tNumber = H5Tcopy(H5T_NATIVE_INT32);	H5Tinsert(tLink, "position", HOFFSET(link_t, position), tPosition);	H5Tinsert(tLink, "number", HOFFSET(link_t, number), tNumber);	H5Tclose(tPosition);	H5Tclose(tNumber);	return tLink;}
开发者ID:hbredin,项目名称:pinocchIO,代码行数:15,


示例16: create_ieee_complex128

/* Counterpart for complex128 */hid_t create_ieee_complex128(const char *byteorder) {  hid_t float_id, complex_id;  complex_id = H5Tcreate(H5T_COMPOUND, sizeof(npy_complex128));  if (byteorder == NULL)    float_id = H5Tcopy(H5T_NATIVE_DOUBLE);  else if (strcmp(byteorder, "little") == 0)    float_id = H5Tcopy(H5T_IEEE_F64LE);  else    float_id = H5Tcopy(H5T_IEEE_F64BE);  H5Tinsert(complex_id, "r", HOFFSET(npy_complex128, real), float_id);  H5Tinsert(complex_id, "i", HOFFSET(npy_complex128, imag), float_id);  H5Tclose(float_id);  return complex_id;}
开发者ID:andreas-h,项目名称:PyTables,代码行数:16,


示例17: readDoubleComplexMatrix

int readDoubleComplexMatrix(int _iDatasetId, double *_pdblReal, double *_pdblImg){    hid_t compoundId;    herr_t status;    int iDims = 0;    int* piDims = NULL;    int iComplex = 0;    int iSize = 1;    doublecomplex* pData = NULL;    int i = 0;    /*define compound dataset*/    compoundId = H5Tcreate(H5T_COMPOUND, sizeof(doublecomplex));    H5Tinsert(compoundId, "real", HOFFSET(doublecomplex, r), H5T_NATIVE_DOUBLE);    H5Tinsert(compoundId, "imag", HOFFSET(doublecomplex, i), H5T_NATIVE_DOUBLE);    //get dimension from dataset    getDatasetInfo(_iDatasetId, &iComplex, &iDims, NULL);    piDims = (int*)MALLOC(sizeof(int) * iDims);    iSize = getDatasetInfo(_iDatasetId, &iComplex, &iDims, piDims);    if (iSize < 0)    {        FREE(piDims);        return -1;    }    FREE(piDims);    //alloc temp array    pData = (doublecomplex*)MALLOC(sizeof(doublecomplex) * iSize);    //Read the data.    status = H5Dread(_iDatasetId, compoundId, H5S_ALL, H5S_ALL, H5P_DEFAULT, pData);    if (status < 0)    {        FREE(pData);        return -1;    }    vGetPointerFromDoubleComplex(pData, iSize, _pdblReal, _pdblImg);    FREE(pData);    status = H5Dclose(_iDatasetId);    if (status < 0)    {        return -1;    }    return 0;}
开发者ID:ScilabOrg,项目名称:scilab,代码行数:48,


示例18: main

int main(){  hid_t fprop;  hid_t fid;  hid_t vol_id = H5VL_memvol_init();  char name[1024];  // create some datatypes  hid_t tid = H5Tcreate (H5T_COMPOUND, sizeof(complex_type));  H5Tinsert(tid, "re", HOFFSET(complex_type,re), H5T_NATIVE_DOUBLE);  H5Tinsert(tid, "im", HOFFSET(complex_type,im), H5T_NATIVE_DOUBLE);  hid_t s10 = H5Tcopy(H5T_C_S1);  H5Tset_size(s10, 10);  H5Tinsert(tid, "name", HOFFSET(complex_type,name), s10);  H5Tinsert(tid, "val", HOFFSET(complex_type,val), H5T_NATIVE_INT);  // packed version of the datatype  hid_t disk_tid = H5Tcopy (tid);  H5Tpack(disk_tid);  fprop = H5Pcreate(H5P_FILE_ACCESS);  H5Pset_vol(fprop, vol_id, &fprop);  fid = H5Fcreate("test", H5F_ACC_TRUNC, H5P_DEFAULT, fprop);  H5VLget_plugin_name(fid, name, 1024);  printf ("%s using VOL %s/n", __FILE__ , name);  assert(H5Tcommit(fid, "t_complex", tid, H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) >= 0);  assert(H5Tcommit(fid, "t_complex_p", disk_tid,  H5P_DEFAULT, H5P_DEFAULT, H5P_DEFAULT) >= 0);  hid_t tid_stored1 = H5Topen(fid, "t_complex", H5P_DEFAULT);  hid_t tid_stored2 = H5Topen(fid, "t_complex_p", H5P_DEFAULT);  // hid_t tid_stored3 = H5Topen(fid, "NotExisting", H5P_DEFAULT);  // assert(tid_stored3 < 0);  assert(H5Tequal(tid_stored1, tid));  assert(H5Tequal(tid_stored2, disk_tid));  H5Fclose(fid);  H5Tclose(tid);  H5Tclose(disk_tid);  H5VL_memvol_finalize();  return 0;}
开发者ID:ESiWACE,项目名称:ESD-Middleware,代码行数:47,


示例19: atom_dec_desc

void pyne::_load_atomic_decay(){  // Loads the important parts of atomic_decay table into memory  //Check to see if the file is in HDF5 format.  if (!pyne::file_exists(pyne::NUC_DATA_PATH))    throw pyne::FileNotFound(pyne::NUC_DATA_PATH);  bool isH5 = H5::H5File::isHdf5(pyne::NUC_DATA_PATH);  if (!isH5)    throw h5wrap::FileNotHDF5(pyne::NUC_DATA_PATH);  // Get the HDF5 compound type (table) description  H5::CompType atom_dec_desc(sizeof(atomic_decay_struct));  atom_dec_desc.insertMember("from_nuc_name", HOFFSET(atomic_decay_struct, from_nuc_name), H5::StrType(0, 6));  atom_dec_desc.insertMember("from_nuc_zz",   HOFFSET(atomic_decay_struct, from_nuc_zz),   H5::PredType::NATIVE_INT);  atom_dec_desc.insertMember("level", HOFFSET(atomic_decay_struct, level), H5::PredType::NATIVE_DOUBLE);  atom_dec_desc.insertMember("to_nuc_name", HOFFSET(atomic_decay_struct, to_nuc_name), H5::StrType(0, 6));  atom_dec_desc.insertMember("to_nuc_zz",   HOFFSET(atomic_decay_struct, to_nuc_zz),   H5::PredType::NATIVE_INT);  atom_dec_desc.insertMember("half_life", HOFFSET(atomic_decay_struct, half_life), H5::PredType::NATIVE_DOUBLE);  atom_dec_desc.insertMember("decay_const", HOFFSET(atomic_decay_struct, decay_const), H5::PredType::NATIVE_DOUBLE);  atom_dec_desc.insertMember("branch_ratio", HOFFSET(atomic_decay_struct, branch_ratio), H5::PredType::NATIVE_DOUBLE);  // Open the HDF5 file  H5::H5File nuc_data_h5 (pyne::NUC_DATA_PATH.c_str(), H5F_ACC_RDONLY);  // Open the data set  H5::DataSet atom_dec_set = nuc_data_h5.openDataSet("/atomic_decay");  H5::DataSpace atom_dec_space = atom_dec_set.getSpace();  int atom_dec_length = atom_dec_space.getSimpleExtentNpoints();  // Read in the data  atomic_decay_struct * atom_dec_array = new atomic_decay_struct[atom_dec_length];  atom_dec_set.read(atom_dec_array, atom_dec_desc);  // close the nuc_data library, before doing anythng stupid  nuc_data_h5.close();  // Ok now that we have the array of stucts, put it in the maps  // giving precednece to ground state values or those seen first.  int from_nuc;  double level;  for(int n = 0; n < atom_dec_length; n++)  {    from_nuc = atom_dec_array[n].from_nuc_zz;    level = atom_dec_array[n].level;    if (0 == half_life_map.count(from_nuc) || 0.0 == level)      half_life_map[from_nuc] = atom_dec_array[n].half_life;    if (0 == decay_const_map.count(from_nuc) || 0.0 == level)      decay_const_map[from_nuc] = atom_dec_array[n].decay_const;  };};
开发者ID:chrisdembia,项目名称:pyne,代码行数:55,


示例20: create_hl_table

/* Create a normal HL table just like the HL examples do */static int create_hl_table(hid_t fid){    /* Calculate the offsets of the particle struct members in memory */    size_t part_offset[NFIELDS] = { HOFFSET( particle_t, name ),                                    HOFFSET( particle_t, lati ),                                    HOFFSET( particle_t, longi ),                                    HOFFSET( particle_t, pressure ),                                    HOFFSET( particle_t, temperature )                                  };    /* Define field information */    const char *field_names[NFIELDS]  =    { "Name","Latitude", "Longitude", "Pressure", "Temperature" };    hid_t      field_type[NFIELDS];    hid_t      string_type;    hsize_t    chunk_size = 10;    int        *fill_data = NULL;    int        compress  = 0;    herr_t     status;    /* Initialize the field field_type */    string_type = H5Tcopy( H5T_C_S1 );    H5Tset_size( string_type, (size_t)16 );    field_type[0] = string_type;    field_type[1] = H5T_NATIVE_INT;    field_type[2] = H5T_NATIVE_INT;    field_type[3] = H5T_NATIVE_FLOAT;    field_type[4] = H5T_NATIVE_DOUBLE;    /*------------------------------------------------------------------------    * H5TBmake_table    *-------------------------------------------------------------------------    */    status=H5TBmake_table( "Table Title", fid, H5TB_TABLE_NAME, (hsize_t)NFIELDS,                           (hsize_t)NRECORDS, sizeof(particle_t),                           field_names, part_offset, field_type,                           chunk_size, fill_data, compress, testPart  );    if(status<0)        return -1;    else        return 0;}
开发者ID:quinoacomputing,项目名称:HDF5,代码行数:46,


示例21: atomic_weight_desc

void pyne::_load_atomic_mass_map(){  // Loads the important parts of atomic_wight table into atomic_mass_map  //Check to see if the file is in HDF5 format.  if (!pyne::file_exists(pyne::NUC_DATA_PATH))    throw pyne::FileNotFound(pyne::NUC_DATA_PATH);  bool isH5 = H5::H5File::isHdf5(pyne::NUC_DATA_PATH);  if (!isH5)    throw h5wrap::FileNotHDF5(pyne::NUC_DATA_PATH);  // Get the HDF5 compound type (table) description  H5::CompType atomic_weight_desc(sizeof(atomic_weight_struct));  atomic_weight_desc.insertMember("nuc_name", HOFFSET(atomic_weight_struct, nuc_name), H5::StrType(0, 6));  atomic_weight_desc.insertMember("nuc_zz",   HOFFSET(atomic_weight_struct, nuc_zz),   H5::PredType::NATIVE_INT);  atomic_weight_desc.insertMember("mass",     HOFFSET(atomic_weight_struct, mass),     H5::PredType::NATIVE_DOUBLE);  atomic_weight_desc.insertMember("error",    HOFFSET(atomic_weight_struct, error),    H5::PredType::NATIVE_DOUBLE);  atomic_weight_desc.insertMember("abund",    HOFFSET(atomic_weight_struct, abund),    H5::PredType::NATIVE_DOUBLE);  // Open the HDF5 file  H5::H5File nuc_data_h5 (pyne::NUC_DATA_PATH.c_str(), H5F_ACC_RDONLY);  // Open the data set  H5::DataSet atomic_weight_set = nuc_data_h5.openDataSet("/atomic_weight");  H5::DataSpace atomic_weight_space = atomic_weight_set.getSpace();  int atomic_weight_length = atomic_weight_space.getSimpleExtentNpoints();  // Read in the data  atomic_weight_struct * atomic_weight_array = new atomic_weight_struct[atomic_weight_length];  atomic_weight_set.read(atomic_weight_array, atomic_weight_desc);  // close the nuc_data library, before doing anythng stupid  nuc_data_h5.close();  // Ok now that we have the array of stucts, put it in the map  for(int n = 0; n < atomic_weight_length; n++)    atomic_mass_map[atomic_weight_array[n].nuc_zz] = atomic_weight_array[n].mass;};
开发者ID:chrisdembia,项目名称:pyne,代码行数:39,


示例22: create_ieee_complex64

/* Create a HDF5 compound datatype that represents complex numbers   defined by numpy as complex64. */hid_t create_ieee_complex64(const char *byteorder) {  hid_t float_id, complex_id;  complex_id = H5Tcreate(H5T_COMPOUND, sizeof(npy_complex64));  if (byteorder == NULL)    float_id = H5Tcopy(H5T_NATIVE_FLOAT);  else if (strcmp(byteorder, "little") == 0)    float_id = H5Tcopy(H5T_IEEE_F32LE);  else    float_id = H5Tcopy(H5T_IEEE_F32BE);  if (float_id < 0)  {    H5Tclose(complex_id);    return float_id;  }  H5Tinsert(complex_id, "r", HOFFSET(npy_complex64, real), float_id);  H5Tinsert(complex_id, "i", HOFFSET(npy_complex64, imag), float_id);  H5Tclose(float_id);  return complex_id;}
开发者ID:bbudescu,项目名称:PyTables,代码行数:24,


示例23: write

void write(hid_t& file,           const std::vector<CountH5Record>& records){  const size_t record_size = sizeof(CountH5Record);  size_t record_offset[] = { HOFFSET(CountH5Record, bin_number),                             HOFFSET(CountH5Record, cell_type),                             HOFFSET(CountH5Record, chromosome),                             HOFFSET(CountH5Record, count),                             HOFFSET(CountH5Record, file_name) };  size_t field_sizes[] = { sizeof(CountH5Record::bin_number),                           sizeof(CountH5Record::cell_type),                           sizeof(CountH5Record::chromosome),                           sizeof(CountH5Record::count),                           sizeof(CountH5Record::file_name) };  herr_t status = H5TBappend_records(file, "bin_counts", records.size(), record_size,                                     record_offset, field_sizes, records.data());  if (status != 0)  {    std::cerr << "Error appending record, status = " << status << std::endl;  }}
开发者ID:afederation,项目名称:pipeline,代码行数:24,


示例24: parallel

  // Constructor  FileIO::FileIO(Parallel *_parallel, Setup *setup):     parallel(_parallel)  {    // Set Initial values    inputFileName         = setup->get("DataOutput.InputFileName", "--- None ---");    outputFileName        = setup->get("DataOutput.OutputFileName", "default.h5");    info                  = setup->get("DataOutput.Info", "No information provided");    resumeFile            = setup->get("DataOutput.Resume", 0);    overwriteFile         = setup->get("DataOutput.Overwrite", 0) || (setup->flags & HELIOS_OVERWRITE);        dataFileFlushTiming  = Timing(setup->get("DataOutput.Flush.Step", -1)       , setup->get("DataOutput.Flush.Time", 100.));         ///////// Define Timeing Datatype    timing_tid = H5Tcreate(H5T_COMPOUND, sizeof(Timing));    H5Tinsert(timing_tid, "Timestep", HOFFSET(Timing, step), H5T_NATIVE_INT   );    H5Tinsert(timing_tid, "Time"    , HOFFSET(Timing, time), H5T_NATIVE_DOUBLE);    // don't changes r and i name otherwise it will break compatibiltiy with pyTables    complex_tid = H5Tcreate(H5T_COMPOUND, sizeof (Complex));    H5Tinsert(complex_tid, "r", HOFFSET(Complex,r), H5T_NATIVE_DOUBLE);    H5Tinsert(complex_tid, "i", HOFFSET(Complex,i), H5T_NATIVE_DOUBLE);        vector3D_tid = H5Tcreate(H5T_COMPOUND, sizeof (Vector3D));    H5Tinsert(vector3D_tid, "x", HOFFSET(Vector3D,x), H5T_NATIVE_DOUBLE);    H5Tinsert(vector3D_tid, "y", HOFFSET(Vector3D,y), H5T_NATIVE_DOUBLE);    H5Tinsert(vector3D_tid, "z", HOFFSET(Vector3D,z), H5T_NATIVE_DOUBLE);     hsize_t species_dim[] = { setup->get("Grid.Ns", 1)};     species_tid = H5Tarray_create(H5T_NATIVE_DOUBLE, 1, species_dim);        // used for species name          // BUG : Somehow HDF-8 stores up to 8 chars fro 64 possible. Rest are truncated ! Why ?    s256_tid = H5Tcopy(H5T_C_S1); H5Tset_size(s256_tid, 64); H5Tset_strpad(s256_tid, H5T_STR_NULLTERM);    offset0[0] = 0;    offset0[1] = 0;    offset0[2] = 0;    offset0[3] = 0;    offset0[4] = 0;    offset0[5] = 0;    offset0[6] = 0;    offset0[7] = 0;        // Create/Load HDF5 file    if(resumeFile == false || (inputFileName != outputFileName)) create(setup);   }
开发者ID:xyuan,项目名称:gkc,代码行数:51,


示例25: scat_len_desc

void pyne::_load_scattering_lengths(){  // Loads the important parts of atomic_wight table into atomic_mass_map  //Check to see if the file is in HDF5 format.  if (!pyne::file_exists(pyne::NUC_DATA_PATH))    throw pyne::FileNotFound(pyne::NUC_DATA_PATH);  bool isH5 = H5::H5File::isHdf5(pyne::NUC_DATA_PATH);  if (!isH5)    throw h5wrap::FileNotHDF5(pyne::NUC_DATA_PATH);  // Get the HDF5 compound type (table) description  H5::CompType scat_len_desc(sizeof(scattering_lengths_struct));  scat_len_desc.insertMember("nuc_name", HOFFSET(scattering_lengths_struct, nuc_name), H5::StrType(0, 6));  scat_len_desc.insertMember("nuc_zz",   HOFFSET(scattering_lengths_struct, nuc_zz),   H5::PredType::NATIVE_INT);  scat_len_desc.insertMember("b_coherent", HOFFSET(scattering_lengths_struct, b_coherent), h5wrap::PYTABLES_COMPLEX128);  scat_len_desc.insertMember("b_incoherent", HOFFSET(scattering_lengths_struct, b_incoherent), h5wrap::PYTABLES_COMPLEX128);  scat_len_desc.insertMember("xs_coherent", HOFFSET(scattering_lengths_struct, xs_coherent), H5::PredType::NATIVE_DOUBLE);  scat_len_desc.insertMember("xs_incoherent", HOFFSET(scattering_lengths_struct, xs_incoherent), H5::PredType::NATIVE_DOUBLE);  scat_len_desc.insertMember("xs", HOFFSET(scattering_lengths_struct, xs), H5::PredType::NATIVE_DOUBLE);  // Open the HDF5 file  H5::H5File nuc_data_h5 (pyne::NUC_DATA_PATH.c_str(), H5F_ACC_RDONLY);  // Open the data set  H5::DataSet scat_len_set = nuc_data_h5.openDataSet("/neutron/scattering_lengths");  H5::DataSpace scat_len_space = scat_len_set.getSpace();  int scat_len_length = scat_len_space.getSimpleExtentNpoints();  // Read in the data  scattering_lengths_struct * scat_len_array = new scattering_lengths_struct[scat_len_length];  scat_len_set.read(scat_len_array, scat_len_desc);  // close the nuc_data library, before doing anythng stupid  nuc_data_h5.close();  // Ok now that we have the array of stucts, put it in the maps  for(int n = 0; n < scat_len_length; n++)  {    b_coherent_map[scat_len_array[n].nuc_zz] = scat_len_array[n].b_coherent;    b_incoherent_map[scat_len_array[n].nuc_zz] = scat_len_array[n].b_incoherent;  };};
开发者ID:chrisdembia,项目名称:pyne,代码行数:45,


示例26: test_data_conv

/*------------------------------------------------------------------------- * Function:	test_data_conv * * Purpose:	Test data conversion * * Return:	Success:	0 * *		Failure:	1 * * Programmer:  Raymond Lu	 *              30 November 2012 * *------------------------------------------------------------------------- */static inttest_data_conv(hid_t file){    typedef struct {	int a, b, c[4], d, e;    } src_type_t;    typedef struct {	int a,    c[4],    e;    } dst_type_t;    hid_t       dataspace = -1, dataset = -1;    hid_t       mem_space = -1;    hid_t       cparms = -1, dxpl = -1;    hsize_t     dims[2]  = {NX, NY};            hsize_t     maxdims[2] = {H5S_UNLIMITED, H5S_UNLIMITED};    hsize_t     chunk_dims[2] ={CHUNK_NX, CHUNK_NY};    herr_t      status;    int         i, j, n;    const hsize_t	four = 4;    hid_t	st=-1, dt=-1;    hid_t       array_dt;    unsigned    filter_mask = 0;    src_type_t  direct_buf[CHUNK_NX][CHUNK_NY];    dst_type_t  check_chunk[CHUNK_NX][CHUNK_NY];    hsize_t     offset[2] = {0, 0};    size_t      buf_size = CHUNK_NX*CHUNK_NY*sizeof(src_type_t);    hsize_t start[2];  /* Start of hyperslab */    hsize_t stride[2]; /* Stride of hyperslab */    hsize_t count[2];  /* Block count */    hsize_t block[2];  /* Block sizes */    TESTING("data conversion for H5DOwrite_chunk");    /*     * Create the data space with unlimited dimensions.     */    if((dataspace = H5Screate_simple(RANK, dims, maxdims)) < 0)        goto error;    if((mem_space = H5Screate_simple(RANK, chunk_dims, NULL)) < 0)        goto error;    /*     * Modify dataset creation properties, i.e. enable chunking     */    if((cparms = H5Pcreate(H5P_DATASET_CREATE)) < 0)        goto error;    if((status = H5Pset_chunk( cparms, RANK, chunk_dims)) < 0)        goto error;    /* Build hdf5 datatypes */    array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four);    if((st = H5Tcreate(H5T_COMPOUND, sizeof(src_type_t))) < 0 ||            H5Tinsert(st, "a", HOFFSET(src_type_t, a), H5T_NATIVE_INT) < 0 ||            H5Tinsert(st, "b", HOFFSET(src_type_t, b), H5T_NATIVE_INT) < 0 ||            H5Tinsert(st, "c", HOFFSET(src_type_t, c), array_dt) < 0 ||            H5Tinsert(st, "d", HOFFSET(src_type_t, d), H5T_NATIVE_INT) < 0 ||            H5Tinsert(st, "e", HOFFSET(src_type_t, e), H5T_NATIVE_INT) < 0)        goto error;    if(H5Tclose(array_dt) < 0)        goto error;    array_dt = H5Tarray_create2(H5T_NATIVE_INT, 1, &four);    if((dt = H5Tcreate(H5T_COMPOUND, sizeof(dst_type_t))) < 0 ||            H5Tinsert(dt, "a", HOFFSET(dst_type_t, a), H5T_NATIVE_INT) < 0 ||            H5Tinsert(dt, "c", HOFFSET(dst_type_t, c), array_dt) < 0 ||            H5Tinsert(dt, "e", HOFFSET(dst_type_t, e), H5T_NATIVE_INT) < 0)        goto error;    if(H5Tclose(array_dt) < 0)        goto error;    /*     * Create a new dataset within the file using cparms     * creation properties.     */    if((dataset = H5Dcreate2(file, DATASETNAME4, st, dataspace, H5P_DEFAULT,			cparms, H5P_DEFAULT)) < 0)        goto error;    if((dxpl = H5Pcreate(H5P_DATASET_XFER)) < 0)        goto error;//.........这里部分代码省略.........
开发者ID:CommonLibrary,项目名称:hdf5,代码行数:101,


示例27: dspace

void TChainWriteBuffer::write(const std::string& fname, const std::string& group, const std::string& chain, const std::string& meta) {	H5::H5File* h5file = H5Utils::openFile(fname);	H5::Group* h5group = H5Utils::openGroup(h5file, group);		// Dataset properties: optimized for reading/writing entire buffer at once	int rank = 3;	hsize_t dim[3] = {length_, nSamples_+2, nDim_};	H5::DataSpace dspace(rank, &(dim[0]));	H5::DSetCreatPropList plist;	plist.setDeflate(9);	// gzip compression level	plist.setChunk(rank, &(dim[0]));	float fillvalue = 0;	plist.setFillValue(H5::PredType::NATIVE_FLOAT, &fillvalue);		H5::DataSet* dataset = NULL;	try {		dataset = new H5::DataSet(h5group->createDataSet(chain, H5::PredType::NATIVE_FLOAT, dspace, plist));	} catch(H5::GroupIException &group_exception) {		std::cerr << "Could not create dataset for chain." << std::endl;		std::cerr << "Dataset '" << group << "/" << chain << "' most likely already exists." << std::endl;		throw;	}		dataset->write(buf, H5::PredType::NATIVE_FLOAT);		if(meta == "") {	// Store metadata as attributes		bool *converged = new bool[length_];		float *lnZ = new float[length_];		for(unsigned int i=0; i<length_; i++) {			converged[i] = metadata[i].converged;			lnZ[i] = metadata[i].lnZ;		}				// Allow large attributes to be stored in dense storage, versus compact (which has 64 kB limit)		//if(length_ > 5) {		//	hid_t dID = dataset->getCreatePlist().getId();		//	herr_t res = H5Pset_attr_phase_change(dID, 0, 0);		//	std::cerr << res << std::endl;		//	if(res < 0) {		//		std::cerr << "Failed to specify dense storage." << std::endl;		//	}		//}				H5::DataSpace convSpace(1, &(dim[0]));		H5::Attribute convAtt = dataset->createAttribute("converged", H5::PredType::NATIVE_CHAR, convSpace);		convAtt.write(H5::PredType::NATIVE_CHAR, reinterpret_cast<char*>(converged));				H5::DataSpace lnZSpace(1, &(dim[0]));		H5::Attribute lnZAtt = dataset->createAttribute("ln(Z)", H5::PredType::NATIVE_FLOAT, lnZSpace);		lnZAtt.write(H5::PredType::NATIVE_FLOAT, lnZ);				delete[] converged;		delete[] lnZ;	} else {	 	// Store metadata as separate dataset		H5::CompType metaType(sizeof(TChainMetadata));		metaType.insertMember("converged", HOFFSET(TChainMetadata, converged), H5::PredType::NATIVE_CHAR);		metaType.insertMember("ln(Z)", HOFFSET(TChainMetadata, lnZ), H5::PredType::NATIVE_FLOAT);				rank = 1;		H5::DataSpace metaSpace(rank, &(dim[0]));		H5::DSetCreatPropList metaProp;		TChainMetadata emptyMetadata = {0, 0};		metaProp.setFillValue(metaType, &emptyMetadata);		metaProp.setDeflate(9);		metaProp.setChunk(rank, &(dim[0]));				H5::DataSet* metaDataset = new H5::DataSet(h5group->createDataSet(meta, metaType, metaSpace, metaProp));		metaDataset->write(metadata.data(), metaType);				delete metaDataset;		metaDataset = NULL;	}		delete dataset;	delete h5group;	delete h5file;		//std::cerr << "Cleaned up." << std::endl;}
开发者ID:gregreen,项目名称:bayestar,代码行数:79,


示例28: catch

bool TChain::save(std::string fname, std::string group_name, size_t index,                  std::string dim_name, int compression, int subsample,                  bool converged, float lnZ) const {	if((compression<0) || (compression > 9)) {		std::cerr << "! Invalid gzip compression level: " << compression << std::endl;		return false;	}		H5::Exception::dontPrint();		H5::H5File *file = H5Utils::openFile(fname);	if(file == NULL) { return false; }		/*	try {		file->unlink(group_name);	} catch(...) {		// pass	}	*/		H5::Group *group = H5Utils::openGroup(file, group_name);	if(group == NULL) {		delete file;		return false;	}		/*	 *  Attributes	 */		// Datatype	H5::CompType att_type(sizeof(TChainAttribute));	hid_t tid = H5Tcopy(H5T_C_S1);	H5Tset_size(tid, H5T_VARIABLE);	att_type.insertMember("dim_name", HOFFSET(TChainAttribute, dim_name), tid);	//att_type.insertMember("total_weight", HOFFSET(TChainAttribute, total_weight), H5::PredType::NATIVE_FLOAT);	//att_type.insertMember("ndim", HOFFSET(TChainAttribute, ndim), H5::PredType::NATIVE_UINT64);	//att_type.insertMember("length", HOFFSET(TChainAttribute, length), H5::PredType::NATIVE_UINT64);		// Dataspace	int att_rank = 1;	hsize_t att_dim = 1;	H5::DataSpace att_space(att_rank, &att_dim);		// Dataset	//H5::Attribute att = group->createAttribute("parameter names", att_type, att_space);		TChainAttribute att_data;	att_data.dim_name = new char[dim_name.size()+1];	std::strcpy(att_data.dim_name, dim_name.c_str());	//att_data.total_weight = total_weight;	//att_data.ndim = N;	//att_data.length = length;		//att.write(att_type, &att_data);	delete[] att_data.dim_name;		//int att_rank = 1;	//hsize_t att_dim = 1;		H5::DataType conv_dtype = H5::PredType::NATIVE_UCHAR;	H5::DataSpace conv_dspace(att_rank, &att_dim);	//H5::Attribute conv_att = H5Utils::openAttribute(group, "converged", conv_dtype, conv_dspace);	//conv_att.write(conv_dtype, &converged);		H5::DataType lnZ_dtype = H5::PredType::NATIVE_FLOAT;	H5::DataSpace lnZ_dspace(att_rank, &att_dim);	//H5::Attribute lnZ_att = H5Utils::openAttribute(group, "ln Z", lnZ_dtype, lnZ_dspace);	//lnZ_att.write(lnZ_dtype, &lnZ);		// Creation property list to be used for all three datasets	H5::DSetCreatPropList plist;	//plist.setDeflate(compression);	// gzip compression level	float fillvalue = 0;	plist.setFillValue(H5::PredType::NATIVE_FLOAT, &fillvalue);		H5D_layout_t layout = H5D_COMPACT;	plist.setLayout(layout);		/*	 *  Choose subsample of points in chain	 */	size_t *el_idx = NULL;	size_t *subsample_idx = NULL;	if(subsample > 0) {		size_t tot_weight_tmp = (size_t)ceil(total_weight);		el_idx = new size_t[tot_weight_tmp];		size_t unrolled_idx = 0;		size_t chain_idx = 0;		std::vector<double>::const_iterator it, it_end;		it_end = w.end();		for(it = w.begin(); it != it_end; ++it, chain_idx++) {			for(size_t n = unrolled_idx; n < unrolled_idx + (size_t)(*it); n++) {				el_idx[n] = chain_idx;			}			unrolled_idx += (size_t)(*it);		}				assert(chain_idx == length);//.........这里部分代码省略.........
开发者ID:gregreen,项目名称:bayestar,代码行数:101,


示例29: main

int main(int argc, char *argv[]){    (void)argc;    (void)argv;typedef struct rt {    int channels;    char date[DATELEN];    char time[TIMELEN];} rt;//    H5Fis_hdf5("/dev/null");/** Create a new file using H5ACC_TRUNC access,* default file creation properties, and default file* access properties.* Then close the file.*/    const int NRECORDS = 1;    const int NFIELDS = 3;    char fName[] = "tmp.h5";    /* Calculate the size and the offsets of our struct members in memory */    size_t rt_offset[NFIELDS] = {  HOFFSET( rt, channels ),                                   HOFFSET( rt, date ),                                   HOFFSET( rt, time )};    rt p_data;    p_data.channels = 1;    strcpy( p_data.date, "1234-Dec-31");    strcpy( p_data.time, "12:34:56");    hid_t file_id = H5Fcreate(fName, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);    /* Define field information */    const char *field_names[NFIELDS]  =  { "channels", "date", "time" };    hid_t      field_type[NFIELDS];    /* Initialize the field field_type */    hid_t string_type1 = H5Tcopy( H5T_C_S1 );    hid_t string_type2 = H5Tcopy( H5T_C_S1 );    H5Tset_size( string_type1,  strlen(p_data.date));    H5Tset_size( string_type2,  strlen(p_data.time));    field_type[0] = H5T_NATIVE_INT;    field_type[1] = string_type1;    field_type[2] = string_type2;    std::ostringstream desc;    desc << "Description of " << fName;    herr_t status = H5TBmake_table( desc.str().c_str(), file_id, "description", (hsize_t)NFIELDS, (hsize_t)NRECORDS, sizeof(rt),                                    field_names, rt_offset, field_type, 10, NULL, 0, &p_data  );    if (status < 0) {        perror("Exception while writing description in stfio::exportHDF5File");        H5Fclose(file_id);        H5close();        exit(-1);    }    H5Fclose(file_id);    return(0);}
开发者ID:3togo,项目名称:mxe,代码行数:68,


示例30: H5Fcreate

void HDF5Output::open(const std::string& filename) {	file = H5Fcreate(filename.c_str(), H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);	sid = H5Tcreate(H5T_COMPOUND, sizeof(OutputRow));	H5Tinsert(sid, "D", HOFFSET(OutputRow, D), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "z", HOFFSET(OutputRow, z), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "SN", HOFFSET(OutputRow, SN), H5T_NATIVE_UINT64);	H5Tinsert(sid, "ID", HOFFSET(OutputRow, ID), H5T_NATIVE_INT32);	H5Tinsert(sid, "E", HOFFSET(OutputRow, E), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "X", HOFFSET(OutputRow, X), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Y", HOFFSET(OutputRow, Y), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Z", HOFFSET(OutputRow, Z), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Px", HOFFSET(OutputRow, Px), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Py", HOFFSET(OutputRow, Py), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Pz", HOFFSET(OutputRow, Pz), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "SN0", HOFFSET(OutputRow, SN0), H5T_NATIVE_UINT64);	H5Tinsert(sid, "ID0", HOFFSET(OutputRow, ID0), H5T_NATIVE_INT32);	H5Tinsert(sid, "E0", HOFFSET(OutputRow, E0), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "X0", HOFFSET(OutputRow, X0), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Y0", HOFFSET(OutputRow, Y0), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Z0", HOFFSET(OutputRow, Z0), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P0x", HOFFSET(OutputRow, P0x), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P0y", HOFFSET(OutputRow, P0y), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P0z", HOFFSET(OutputRow, P0z), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "SN1", HOFFSET(OutputRow, SN1), H5T_NATIVE_UINT64);	H5Tinsert(sid, "ID1", HOFFSET(OutputRow, ID1), H5T_NATIVE_INT32);	H5Tinsert(sid, "E1", HOFFSET(OutputRow, E1), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "X1", HOFFSET(OutputRow, X1), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Y1", HOFFSET(OutputRow, Y1), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "Z1", HOFFSET(OutputRow, Z1), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P1x", HOFFSET(OutputRow, P1x), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P1y", HOFFSET(OutputRow, P1y), H5T_NATIVE_DOUBLE);	H5Tinsert(sid, "P1z", HOFFSET(OutputRow, P1z), H5T_NATIVE_DOUBLE);	// chunked prop	hid_t plist = H5Pcreate(H5P_DATASET_CREATE);	H5Pset_layout(plist, H5D_CHUNKED);	hsize_t chunk_dims[RANK] = {BUFFER_SIZE};	H5Pset_chunk(plist, RANK, chunk_dims);	H5Pset_deflate(plist, 5);	hsize_t dims[RANK] = {0};	hsize_t max_dims[RANK] = {H5S_UNLIMITED};	dataspace = H5Screate_simple(RANK, dims, max_dims);	dset = H5Dcreate2(file, "CRPROPA3", sid, dataspace, H5P_DEFAULT, plist, H5P_DEFAULT);	H5Pclose(plist);	buffer.reserve(BUFFER_SIZE);}
开发者ID:DavidWalz,项目名称:CRPropa3,代码行数:54,



注:本文中的HOFFSET函数示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


C++ HOKUYO_EXCEPT函数代码示例
C++ HMAC_Update函数代码示例
万事OK自学网:51自学网_软件自学网_CAD自学网自学excel、自学PS、自学CAD、自学C语言、自学css3实例,是一个通过网络自主学习工作技能的自学平台,网友喜欢的软件自学网站。