-1

I made a working script in python to train a CNTK Model with some data samples. Now I'm trying to translate it to C# in CNTK V2.2 but I'm getting different results.

This is what I got in Python to create a model:

def create_model_function(num_hidden_layers,hidden_layers_dim,num_output_classes):
    return Sequential([For(range(num_hidden_layers)
                           , lambda i: Dense(hidden_layers_dim
                           , activation=cntk.tanh))
                           , Dense(num_classes,init=cntk.glorot_uniform()
                           , activation=cntk.softmax)])

Thanks


My C# function looks like this:

private Function CreateModel(DeviceDescriptor device, int HiddenLayerCount, int HiddenLayerDimension,int OutputClassesCount, Variable Input)
    {

        Function[] HiddenLayers= new Function[HiddenLayerCount];
        for (int i = 1; i < HiddenLayerCount - 1; i++)
        {
            HiddenLayers[i] = Dense(HiddenLayers[i - 1], HiddenLayerDimension, device, Activation.Tanh, "");
        }

       return Dense(HiddenLayers[HiddenLayerCount-1], OutputClassesCount, device, Activation.Sigmoid, "");

    }

I'm just not sure this is the equivalent of the Python sequential.

Undo
  • 25,519
  • 37
  • 106
  • 129
  • And what is your equivalent C# code? How do the results differ? – Bernard Vander Beken Sep 25 '17 at 11:35
  • I tried this function: private Function CreateModel(DeviceDescriptor device, int HiddenLayerCount, int HiddenLayerDimension,int OutputClassesCount, Variable Input) { Function[] HiddenLayers= new Function[HiddenLayerCount]; for (int i = 1; i < HiddenLayerCount - 1; i++) { HiddenLayers[i] = Dense(HiddenLayers[i - 1], HiddenLayerDimension, device, Activation.Tanh, ""); } return Dense(HiddenLayers[8], OutputClassesCount, device, Activation.Sigmoid, ""); } – Bert Degrave Sep 27 '17 at 08:22

2 Answers2

0

Python Dense function is not directly supported in C# yet. The Dense function you used in C# might be different than CNTK Python implementation. May you build a model in both C# and python with operators available in C# and see if they are the same?

I am attaching a C# function to help you check the model graph. Use it with python model loaded into C# and compare with the one you created in C#. Thanks.

    static void PrintGraph(Function function, int spaces, bool useName = false)
    {
        string indent = new string('.', spaces);
        if (function.Inputs.Count() == 0)
        {
            Console.WriteLine(indent + "(" + (useName ? function.Name : function.Uid) + ")" +
                "(" + function.OpName + ")" + function.AsString());
            return;
        }

        foreach (var input in function.Inputs)
        {
            Console.WriteLine(indent + "(" + (useName ? function.Name : function.Uid) + ")" +
                "(" + function.OpName + ")" + "->" +
                "(" + (useName ? input.Name : input.Uid) + ")" + input.AsString());
        }

        foreach (var input in function.Inputs)
        {
            if (input.Owner != null)
            {
                Function f = input.Owner;
                PrintGraph(f, spaces + 4, useName);
            }
        }
    }
Liqun Fu
  • 86
  • 2
0

Following example shows a simple feed forward path from left to right.

To generate the deep network, control the FOR LOOP per your requirements.

In this example, Loop control is modifying number of nodes with even and odd loopcount. CreateUniteLayer builds a unit layer - LXNodes on left side connected to LYNodes on right side. Other variables are self explained.

ParameterVector NetParamVec is needed to create the trainer. Pass this as parameter if you use CNTKLib.xxxx_learner function.

Carefully check on connectivity of Input features to first layer, First layer to intermediate layers and then last layer connection finally leading to sigmoid. Adjust LXNodes and LYNodes variables appropriately per your need.

Add this code in a class or pull it inside a method as appropriate for your application.

If building all layers of same node size, LXNodes = LYNodes = number of nodes per layer

NetOut represents final output of the deep network.

Hope this helps to build the net you are looking for.

Best wishes.

List<Function> Layers  = new List<Function>() ; 
ParameterVector NetParamVec = new ParameterVector ();

// Define first layer immediately after input.
Function layer1 = CreateUnitLayer(features, LXNodes, inputDim, "NetLayer0", InitWeight, InitBias); 
Layers.Add(layer1);

//Defines Intermediate hidden layers
for (int i = 1; i < LayerCount; i++)
{
   Function ly; 
   if (i % 2 == 0) 
      ly = CreateUnitLayer(Layers[i - 1], LXNodes, LYNodes, "NetLayer" + i.ToString(), InitWeight, InitBias);
   else
       ly = CreateUnitLayer(Layers[i - 1], LYNodes, LXNodes, "NetLayer" + i.ToString(), InitWeight, InitBias);
   Layers.Add(ly);
}

//Defines Last layer 
int lastDim = LXNodes; 
if (LayerCount % 2 == 0)lastDim = LYNodes;
Function layerLast = CreateUnitLayer(Layers[LayerCount - 1], outDim, lastDim, "NetLayerOut", InitWeight, InitBias);
Layers.Add(layerLast);

Function NetOut = CNTKLib.Sigmoid(layerLast);

public Function CreateUnitLayer(Variable LXIn, int LYNodes, int LXNodes, string LYName, float InitWeight, float InitBias)
{
     Parameter weightParamy = new Parameter(new int[] { LYNodes, LXNodes }, DataType.Float, InitWeight, device, "W" + LYName);
     Parameter biasParamy = new Parameter(new int[] { LYNodes }, DataType.Float, InitBias, device, "B" + LYName);
     Function LayerY = CNTKLib.Plus(CNTKLib.Times(weightParamy, LXIn), biasParamy);
     NetParamVec.Add(weightParamy);
     NetParamVec.Add(biasParamy);
     return LayerY;
}
Milind
  • 45
  • 8