在alexnet 训练最后,会有全链接转卷积过程。用来替代滑动窗口。下面函数能够实现这个功能。ios
emmmm...注意使用时候,根据你的层名字,修改对应代码。我懒得写成通用的了,实在是没啥意义~网络
而后通常别人都会提供两个网络,一个是全链接,一个是卷积的。 两个对应的层名字不一样。我不推荐这个,那样你要去修改他的上下层名字。多麻烦的,直接不应名字,以下面,方便点。函数
以上~~~google
/**
*将全链接网络,转换成为卷积网络
*/
int ChangeFCToConv(char *pszSrcSolver, char *pszDstSolver, char *pszSrcCaffeModel, char *pszDstCaffeModel)
{
caffe::NetParameter netparam;
string model = pszSrcCaffeModel;
ReadNetParamsFromBinaryFileOrDie(model, &netparam);string
caffe::LayerParameter *srcFc6 = nullptr, *srcFc7 = nullptr, *srcFc8 = nullptr;
caffe::NetParameter srcNetParam;
ReadNetParamsFromTextFileOrDie(pszSrcSolver, &srcNetParam);
for (int i = 0; i < srcNetParam.layer_size(); i++)
{
caffe::LayerParameter *xxx = srcNetParam.mutable_layer(i);
const string &name = xxx->name();
if (name == "fc7-conv")
{
srcFc6 = srcNetParam.mutable_layer(i);
}
}it
int layer_size = netparam.layer_size();
cout << "layer_size = " << layer_size << endl;
caffe::LayerParameter* layerparam = NULL;io
caffe::Net<float> net(pszSrcSolver, caffe::TEST);
net.CopyTrainedLayersFrom(netparam);
auto netBlob = net.blob_by_name("fc6");
for (int i = 0; i < layer_size; i++) {
layerparam = netparam.mutable_layer(i);
const string& layername = layerparam->name();
const string& layertype = layerparam->type();
//cout << "layertype: " << layertype << endl;
if (layername == "conv2")
{
srcFc6 = layerparam;
}table
if (layername == "fc6") {
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;stream
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(4096, 256, 6, 6);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1); //设置你须要的参数
conv->set_kernel_size(0, 6);
conv->set_num_output(4096);
layerparam->set_allocated_convolution_param(conv);model
}
if (layername == "fc7")
{
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(4096, 4096, 1, 1);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1);
conv->set_kernel_size(0, 1);
conv->set_num_output(4096);
layerparam->set_allocated_convolution_param(conv);
}
if (layername == "fc8")
{
const string& layername = layerparam->name();
cout << "layername: " << layername << endl;
const string& topname = layerparam->top(0);
cout << " " << topname << endl;
cout << " " << layerparam->type() << endl;
layerparam->release_inner_product_param();
caffe::BlobProto *blob0 = layerparam->mutable_blobs(0);
caffe::Blob<float> x_blob0;
x_blob0.FromProto(*blob0, true);
x_blob0.Reshape(2, 4096, 1, 1);
x_blob0.ToProto(blob0);
layerparam->set_type("Convolution");
caffe::ConvolutionParameter *conv = new caffe::ConvolutionParameter();
conv->add_kernel_size(1);
conv->set_kernel_size(0, 1);
conv->set_num_output(1000);
layerparam->set_allocated_convolution_param(conv);
}
if (layername == "data")
{
layerparam->release_data_param();
}
}
//fstream outcaffemodel(pszDstCaffeModel,ios_base::out|ios_base::trunc|ios_base::binary);
//netparam.SerializeToOstream(&outcaffemodel);
WriteProtoToBinaryFile(netparam, pszDstCaffeModel);
//outcaffemodel.close();
google::protobuf::ShutdownProtobufLibrary();
return 0;
}