|
Java example source code file (Layer.java)
The Layer.java Java example source code/* * * * Copyright 2015 Skymind,Inc. * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * */ package org.deeplearning4j.nn.conf.layers; import java.io.Serializable; import java.util.HashMap; import java.util.Map; import com.fasterxml.jackson.annotation.JsonSubTypes; import com.fasterxml.jackson.annotation.JsonTypeInfo; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.annotation.JsonTypeInfo.Id; import lombok.Data; import lombok.NoArgsConstructor; import org.deeplearning4j.nn.conf.GradientNormalization; import org.deeplearning4j.nn.conf.LearningRatePolicy; import org.deeplearning4j.nn.conf.Updater; import org.deeplearning4j.nn.conf.distribution.Distribution; import org.deeplearning4j.nn.weights.WeightInit; /** * A neural network layer. */ @JsonTypeInfo(use=Id.NAME, include=As.WRAPPER_OBJECT) @JsonSubTypes(value={ @JsonSubTypes.Type(value = AutoEncoder.class, name = "autoEncoder"), @JsonSubTypes.Type(value = ConvolutionLayer.class, name = "convolution"), @JsonSubTypes.Type(value = GravesLSTM.class, name = "gravesLSTM"), @JsonSubTypes.Type(value = GravesBidirectionalLSTM.class, name = "gravesBidirectionalLSTM"), @JsonSubTypes.Type(value = GRU.class, name = "gru"), @JsonSubTypes.Type(value = OutputLayer.class, name = "output"), @JsonSubTypes.Type(value = RnnOutputLayer.class, name = "rnnoutput"), @JsonSubTypes.Type(value = RBM.class, name = "RBM"), @JsonSubTypes.Type(value = DenseLayer.class, name = "dense"), @JsonSubTypes.Type(value = SubsamplingLayer.class, name = "subsampling"), @JsonSubTypes.Type(value = BatchNormalization.class, name = "batchNormalization"), @JsonSubTypes.Type(value = LocalResponseNormalization.class, name = "localResponseNormalization"), @JsonSubTypes.Type(value = EmbeddingLayer.class, name = "embedding"), @JsonSubTypes.Type(value = ActivationLayer.class, name = "activation") }) @Data @NoArgsConstructor public abstract class Layer implements Serializable, Cloneable { protected String layerName; protected String activationFunction; protected WeightInit weightInit; protected double biasInit; protected Distribution dist; protected double learningRate; protected double biasLearningRate; //learning rate after n iterations protected Map<Integer,Double> learningRateSchedule; protected double momentum; //momentum after n iterations protected Map<Integer,Double> momentumSchedule; protected double l1; protected double l2; protected double biasL1; protected double biasL2; protected double dropOut; protected Updater updater; //adadelta - weight for how much to consider previous history protected double rho; protected double rmsDecay; protected double adamMeanDecay; protected double adamVarDecay; protected GradientNormalization gradientNormalization = GradientNormalization.None; //Clipping, rescale based on l2 norm, etc protected double gradientNormalizationThreshold = 1.0; //Threshold for l2 and element-wise gradient clipping public Layer(Builder builder) { this.layerName = builder.layerName; this.activationFunction = builder.activationFunction; this.weightInit = builder.weightInit; this.biasInit = builder.biasInit; this.dist = builder.dist; this.learningRate = builder.learningRate; this.biasLearningRate = builder.biasLearningRate; this.learningRateSchedule = builder.learningRateSchedule; this.momentum = builder.momentum; this.momentumSchedule = builder.momentumAfter; this.l1 = builder.l1; this.l2 = builder.l2; this.dropOut = builder.dropOut; this.updater = builder.updater; this.rho = builder.rho; this.rmsDecay = builder.rmsDecay; this.adamMeanDecay = builder.adamMeanDecay; this.adamVarDecay = builder.adamVarDecay; this.gradientNormalization = builder.gradientNormalization; this.gradientNormalizationThreshold = builder.gradientNormalizationThreshold; } @Override public Layer clone() { try { Layer clone = (Layer) super.clone(); if(clone.dist != null) clone.dist = clone.dist.clone(); if(clone.learningRateSchedule != null) clone.learningRateSchedule = new HashMap<>(clone.learningRateSchedule); if(clone.momentumSchedule != null) clone.momentumSchedule = new HashMap<>(clone.momentumSchedule); return clone; } catch (CloneNotSupportedException e) { throw new RuntimeException(e); } } @SuppressWarnings("unchecked") public abstract static class Builder<T extends Builder Other Java examples (source code examples)Here is a short list of links related to this Java Layer.java source code file: |
... this post is sponsored by my books ... | |
#1 New Release! |
FP Best Seller |
Copyright 1998-2021 Alvin Alexander, alvinalexander.com
All Rights Reserved.
A percentage of advertising revenue from
pages under the /java/jwarehouse
URI on this website is
paid back to open source projects.