Apache SINGA
A distributed deep learning platform .
 All Classes Namespaces Files Functions Variables Typedefs Macros
input_layer.h
1 /************************************************************
2 *
3 * Licensed to the Apache Software Foundation (ASF) under one
4 * or more contributor license agreements. See the NOTICE file
5 * distributed with this work for additional information
6 * regarding copyright ownership. The ASF licenses this file
7 * to you under the Apache License, Version 2.0 (the
8 * "License"); you may not use this file except in compliance
9 * with the License. You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing,
14 * software distributed under the License is distributed on an
15 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 * KIND, either express or implied. See the License for the
17 * specific language governing permissions and limitations
18 * under the License.
19 *
20 *************************************************************/
21 
22 #ifndef SINGA_NEURALNET_INPUT_LAYER_H_
23 #define SINGA_NEURALNET_INPUT_LAYER_H_
24 
25 #include <string>
26 #include <vector>
27 #include "neuralnet/layer.h"
28 #include "utils/data_shard.h"
42 namespace singa {
46 class DataLayer: virtual public InputLayer {
47  public:
48  void ComputeGradient(int flag, Metric* perf) override {}
49  Blob<float>* mutable_data(const Layer* layer) override {
50  return nullptr;
51  }
52  Blob<float>* mutable_grad(const Layer* layer) override {
53  return nullptr;
54  }
55  ConnectionType dst_layer_connection() const override {
56  return kOneToMany;
57  }
58  inline int batchsize() const { return batchsize_; }
59  virtual const Record& sample() const {
60  return sample_;
61  }
65  virtual const std::vector<Record>& records() const {
66  return records_;
67  }
68 
69  protected:
70  int random_skip_;
71  int batchsize_;
72  Record sample_;
73  std::vector<Record> records_;
74 };
80 class ShardDataLayer : public DataLayer {
81  public:
82  ~ShardDataLayer();
83 
84  void Setup(const LayerProto& proto, int npartitions) override;
85  void ComputeFeature(int flag, Metric *perf) override;
86 
87  private:
88  DataShard* shard_;
89 };
90 
91 #ifdef USE_LMDB
92 #include <lmdb.h>
93 class LMDBDataLayer : public DataLayer {
94  public:
95  ~LMDBDataLayer();
96 
97  void Setup(const LayerProto& proto, int npartitions) override;
98  void OpenLMDB(const std::string& path);
99  void ComputeFeature(int flag, Metric *perf) override;
100  void ConvertCaffeDatumToRecord(const CaffeDatum& datum,
101  SingleLabelImageRecord* record);
102 
103  private:
104  MDB_env* mdb_env_;
105  MDB_dbi mdb_dbi_;
106  MDB_txn* mdb_txn_;
107  MDB_cursor* mdb_cursor_;
108  MDB_val mdb_key_, mdb_value_;
109 };
110 #endif
111 
115 class ParserLayer : public InputLayer {
116  public:
117  void ComputeFeature(int flag, Metric* perf) override;
118  void ComputeGradient(int flag, Metric* perf) override {}
119  ConnectionType dst_layer_connection() const override {
120  return kOneToMany;
121  }
125  virtual void ParseRecords(int flag, const std::vector<Record>& records,
126  Blob<float>* blob) = 0;
127  Blob<float>* mutable_grad(const Layer* layer) override {
128  return nullptr;
129  }
130  const Blob<float>& grad(const Layer* from) const override {
131  CHECK(false) << "Parser layer has not gradient blob";
132  return grad_;
133  }
134 };
135 
139 class LabelLayer : public ParserLayer {
140  public:
141  void Setup(const LayerProto& proto, int npartitions) override;
142  void ParseRecords(int flag, const std::vector<Record>& records,
143  Blob<float>* blob) override;
144 };
145 
149 class MnistLayer : public ParserLayer {
150  public:
151  void Setup(const LayerProto& proto, int npartitions) override;
152  void ParseRecords(int flag, const std::vector<Record>& records,
153  Blob<float>* blob) override;
154 
155  protected:
156  float norm_a_, norm_b_;
157 };
162 class RGBImageLayer : public ParserLayer {
163  public:
164  void Setup(const LayerProto& proto, int npartitions) override;
165  void ParseRecords(int flag, const std::vector<Record>& records,
166  Blob<float>* blob) override;
167 
168  private:
169  float scale_;
170  int cropsize_;
171  bool mirror_;
172  Blob<float> mean_;
173 };
181 class PrefetchLayer : public Layer {
182  public:
183  ~PrefetchLayer();
184  void ComputeFeature(int flag, Metric* perf) override;
185  void ComputeGradient(int flag, Metric* perf) override {}
186 
187  protected:
188  std::thread thread_;
189 };
190 
191 } // namespace singa
192 
193 #endif // SINGA_NEURALNET_INPUT_LAYER_H_
Base layer for parsing the input records into Blobs.
Definition: input_layer.h:115
void ComputeFeature(int flag, Metric *perf) override
Compute features of this layer based on connected layers.
Derived from ParserLayer to parse label from SingaleLabelImageRecord.
Definition: input_layer.h:139
ConnectionType dst_layer_connection() const override
Return the connection type of this layer and all dst layers.
Definition: input_layer.h:119
ConnectionType dst_layer_connection() const override
Return the connection type of this layer and all dst layers.
Definition: input_layer.h:55
void Setup(const LayerProto &proto, int npartitions) override
Setup layer properties.
Derived from ParserLayer to parse RGB image feature from SingaleLabelImageRecord. ...
Definition: input_layer.h:162
void ComputeGradient(int flag, Metric *perf) override
Compute gradients for parameters and connected layers.
Definition: input_layer.h:48
Layer for loading Record from DataShard.
Definition: input_layer.h:80
Base layer for reading records from local Shard, HDFS, lmdb, etc.
Definition: input_layer.h:46
void Setup(const LayerProto &proto, int npartitions) override
Setup layer properties.
void ComputeGradient(int flag, Metric *perf) override
Compute gradients for parameters and connected layers.
Definition: input_layer.h:185
Data shard stores training/validation/test tuples.
Definition: data_shard.h:51
Blob< float > * mutable_grad(const Layer *layer) override
Definition: input_layer.h:52
void ComputeGradient(int flag, Metric *perf) override
Compute gradients for parameters and connected layers.
Definition: input_layer.h:118
Layer for prefetching data records and parsing them.
Definition: input_layer.h:181
Base layer for getting input data.
Definition: layer.h:201
void ParseRecords(int flag, const std::vector< Record > &records, Blob< float > *blob) override
Parse records from DataLayer into blob.
Base layer class.
Definition: layer.h:44
Derived from ParserLayer to parse MNIST feature from SingaleLabelImageRecord.
Definition: input_layer.h:149
void Setup(const LayerProto &proto, int npartitions) override
Setup layer properties.
void ComputeFeature(int flag, Metric *perf) override
Compute features of this layer based on connected layers.
void ParseRecords(int flag, const std::vector< Record > &records, Blob< float > *blob) override
Parse records from DataLayer into blob.
Performance mtrics.
Definition: common.h:85
Blob< float > * mutable_grad(const Layer *layer) override
Definition: input_layer.h:127
virtual void ParseRecords(int flag, const std::vector< Record > &records, Blob< float > *blob)=0
Parse records from DataLayer into blob.
void Setup(const LayerProto &proto, int npartitions) override
Setup layer properties.
virtual const std::vector< Record > & records() const
Definition: input_layer.h:65
void ParseRecords(int flag, const std::vector< Record > &records, Blob< float > *blob) override
Parse records from DataLayer into blob.
void ComputeFeature(int flag, Metric *perf) override
Compute features of this layer based on connected layers.