Apache SINGA
A distributed deep learning platform .
 All Classes Namespaces Files Functions Variables Typedefs Macros
loss_layer.h
1 /************************************************************
2 *
3 * Licensed to the Apache Software Foundation (ASF) under one
4 * or more contributor license agreements. See the NOTICE file
5 * distributed with this work for additional information
6 * regarding copyright ownership. The ASF licenses this file
7 * to you under the Apache License, Version 2.0 (the
8 * "License"); you may not use this file except in compliance
9 * with the License. You may obtain a copy of the License at
10 *
11 * http://www.apache.org/licenses/LICENSE-2.0
12 *
13 * Unless required by applicable law or agreed to in writing,
14 * software distributed under the License is distributed on an
15 * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
16 * KIND, either express or implied. See the License for the
17 * specific language governing permissions and limitations
18 * under the License.
19 *
20 *************************************************************/
21 
22 #ifndef SINGA_NEURALNET_LOSS_LAYER_H_
23 #define SINGA_NEURALNET_LOSS_LAYER_H_
24 
25 #include "neuralnet/layer.h"
26 
31 namespace singa {
35 class EuclideanLossLayer : public LossLayer {
36  public:
37  void ComputeFeature(int flag, Metric* perf) override;
38  void ComputeGradient(int flag, Metric* perf) override;
39 };
40 
44 class SoftmaxLossLayer : public LossLayer {
45  public:
46  void Setup(const LayerProto& proto, int npartitions) override;
47  void ComputeFeature(int flag, Metric* perf) override;
48  void ComputeGradient(int flag, Metric* perf) override;
49 
54  ConnectionType src_neuron_connection(int k) const override {
55  // CHECK_LT(k, srclayers_.size());
56  return kOneToAll;
57  }
58 
59  private:
60  int batchsize_;
61  int dim_;
62  float scale_;
63  int topk_;
64 };
65 
66 } // namespace singa
67 
68 #endif // SINGA_NEURALNET_LOSS_LAYER_H_
void ComputeFeature(int flag, Metric *perf) override
Compute features of this layer based on connected layers.
void ComputeGradient(int flag, Metric *perf) override
Compute gradients for parameters and connected layers.
void ComputeGradient(int flag, Metric *perf) override
Compute gradients for parameters and connected layers.
Squared Euclidean loss as 0.5 ||predict - ground_truth||^2.
Definition: loss_layer.h:35
ConnectionType src_neuron_connection(int k) const override
softmax is not recommendeded for partition because it requires the whole src layer for normalization...
Definition: loss_layer.h:54
Base layer for calculating loss and other metrics, e.g., precison.
Definition: layer.h:213
void ComputeFeature(int flag, Metric *perf) override
Compute features of this layer based on connected layers.
void Setup(const LayerProto &proto, int npartitions) override
Setup layer properties.
Cross-entropy loss applied to the probabilities after Softmax.
Definition: loss_layer.h:44
Performance mtrics.
Definition: common.h:85