149 lines
4.8 KiB
C++
Executable File
149 lines
4.8 KiB
C++
Executable File
#ifndef UNET_COMMON_H_
|
|
#define UNET_COMMON_H_
|
|
|
|
#include <fstream>
|
|
#include <map>
|
|
#include <sstream>
|
|
#include <vector>
|
|
#include <opencv2/opencv.hpp>
|
|
#include <dirent.h>
|
|
#include "NvInfer.h"
|
|
|
|
|
|
#define CHECK(status) \
|
|
do\
|
|
{\
|
|
auto ret = (status);\
|
|
if (ret != 0)\
|
|
{\
|
|
std::cerr << "Cuda failure: " << ret << std::endl;\
|
|
abort();\
|
|
}\
|
|
} while (0)
|
|
|
|
using namespace nvinfer1;
|
|
|
|
|
|
|
|
|
|
|
|
// TensorRT weight files have a simple space delimited format:
|
|
// [type] [size] <data x size in hex>
|
|
// std::map<std::string, Weights> loadWeights(const std::string file) {
|
|
// std::cout << "Loading weights: " << file << std::endl;
|
|
// std::map<std::string, Weights> weightMap;
|
|
|
|
// // Open weights file
|
|
// std::ifstream input(file);
|
|
// assert(input.is_open() && "Unable to load weight file. please check if the .wts file path is right!!!!!!");
|
|
|
|
// // Read number of weight blobs
|
|
// int32_t count;
|
|
// input >> count;
|
|
// assert(count > 0 && "Invalid weight map file.");
|
|
|
|
// while (count--)
|
|
// {
|
|
// Weights wt{DataType::kFLOAT, nullptr, 0};
|
|
// uint32_t size;
|
|
|
|
// // Read name and type of blob
|
|
// std::string name;
|
|
// input >> name >> std::dec >> size;
|
|
// wt.type = DataType::kFLOAT;
|
|
|
|
// // Load blob
|
|
// uint32_t* val = reinterpret_cast<uint32_t*>(malloc(sizeof(val) * size));
|
|
// for (uint32_t x = 0, y = size; x < y; ++x)
|
|
// {
|
|
// input >> std::hex >> val[x];
|
|
// }
|
|
// wt.values = val;
|
|
|
|
// wt.count = size;
|
|
// weightMap[name] = wt;
|
|
// }
|
|
|
|
// return weightMap;
|
|
// }
|
|
|
|
// IScaleLayer* addBatchNorm2d(INetworkDefinition *network, std::map<std::string, Weights>& weightMap, ITensor& input, std::string lname, float eps) {
|
|
// float *gamma = (float*)weightMap[lname + ".weight"].values;
|
|
// float *beta = (float*)weightMap[lname + ".bias"].values;
|
|
// float *mean = (float*)weightMap[lname + ".running_mean"].values;
|
|
// float *var = (float*)weightMap[lname + ".running_var"].values;
|
|
// int len = weightMap[lname + ".running_var"].count;
|
|
|
|
// float *scval = reinterpret_cast<float*>(malloc(sizeof(float) * len));
|
|
// for (int i = 0; i < len; i++) {
|
|
// scval[i] = gamma[i] / sqrt(var[i] + eps);
|
|
// }
|
|
// Weights scale{DataType::kFLOAT, scval, len};
|
|
|
|
// float *shval = reinterpret_cast<float*>(malloc(sizeof(float) * len));
|
|
// for (int i = 0; i < len; i++) {
|
|
// shval[i] = beta[i] - mean[i] * gamma[i] / sqrt(var[i] + eps);
|
|
// }
|
|
// Weights shift{DataType::kFLOAT, shval, len};
|
|
|
|
// float *pval = reinterpret_cast<float*>(malloc(sizeof(float) * len));
|
|
// for (int i = 0; i < len; i++) {
|
|
// pval[i] = 1.0;
|
|
// }
|
|
// Weights power{DataType::kFLOAT, pval, len};
|
|
|
|
// weightMap[lname + ".scale"] = scale;
|
|
// weightMap[lname + ".shift"] = shift;
|
|
// weightMap[lname + ".power"] = power;
|
|
// IScaleLayer* scale_1 = network->addScale(input, ScaleMode::kCHANNEL, shift, scale, power);
|
|
// assert(scale_1);
|
|
// return scale_1;
|
|
// }
|
|
|
|
|
|
// ILayer* convBlock(INetworkDefinition *network, std::map<std::string, Weights>& weightMap, ITensor& input, int outch, int ksize, int s, int g, std::string lname) {
|
|
// Weights emptywts{DataType::kFLOAT, nullptr, 0};
|
|
// int p = ksize / 2;
|
|
// IConvolutionLayer* conv1 = network->addConvolutionNd(input, outch, DimsHW{ksize, ksize}, weightMap[lname + ".conv.weight"], emptywts);
|
|
// assert(conv1);
|
|
// conv1->setStrideNd(DimsHW{s, s});
|
|
// conv1->setPaddingNd(DimsHW{p, p});
|
|
// conv1->setNbGroups(g);
|
|
// IScaleLayer* bn1 = addBatchNorm2d(network, weightMap, *conv1->getOutput(0), lname + ".bn", 1e-3);
|
|
|
|
// // hard_swish = x * hard_sigmoid
|
|
// auto hsig = network->addActivation(*bn1->getOutput(0), ActivationType::kHARD_SIGMOID);
|
|
// assert(hsig);
|
|
// hsig->setAlpha(1.0 / 6.0);
|
|
// hsig->setBeta(0.5);
|
|
// auto ew = network->addElementWise(*bn1->getOutput(0), *hsig->getOutput(0), ElementWiseOperation::kPROD);
|
|
// assert(ew);
|
|
// return ew;
|
|
// }
|
|
|
|
|
|
|
|
// int read_files_in_dir(const char *p_dir_name, std::vector<std::string> &file_names) {
|
|
// DIR *p_dir = opendir(p_dir_name);
|
|
// if (p_dir == nullptr) {
|
|
// return -1;
|
|
// }
|
|
|
|
// struct dirent* p_file = nullptr;
|
|
// while ((p_file = readdir(p_dir)) != nullptr) {
|
|
// if (strcmp(p_file->d_name, ".") != 0 &&
|
|
// strcmp(p_file->d_name, "..") != 0) {
|
|
// //std::string cur_file_name(p_dir_name);
|
|
// //cur_file_name += "/";
|
|
// //cur_file_name += p_file->d_name;
|
|
// std::string cur_file_name(p_file->d_name);
|
|
// file_names.push_back(cur_file_name);
|
|
// }
|
|
// }
|
|
|
|
// closedir(p_dir);
|
|
// return 0;
|
|
// }
|
|
|
|
#endif
|