Skip to content

Commit

Permalink
updated samples or moved to test folder to work with changes
Browse files Browse the repository at this point in the history
  • Loading branch information
artidoro committed Mar 14, 2019
1 parent e63ad50 commit 2e27634
Show file tree
Hide file tree
Showing 12 changed files with 15 additions and 14 deletions.
3 changes: 2 additions & 1 deletion docs/samples/Microsoft.ML.Samples/Dynamic/Normalizer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,8 @@ public static void Example()

// Composing a different pipeline if we wanted to normalize more than one column at a time.
// Using log scale as the normalization mode.
var multiColPipeline = ml.Transforms.Normalize(NormalizingEstimator.NormalizationMode.LogMeanVariance, new ColumnOptions[] { ("LogInduced", "Induced"), ("LogSpontaneous", "Spontaneous") });
var multiColPipeline = ml.Transforms.Normalize("LogInduced", "Induced", NormalizingEstimator.NormalizationMode.LogMeanVariance)
.Append(ml.Transforms.Normalize("LogSpontaneous", "Spontaneous", NormalizingEstimator.NormalizationMode.LogMeanVariance));
// The transformed data.
var multiColtransformer = multiColPipeline.Fit(trainData);
var multiColtransformedData = multiColtransformer.Transform(trainData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ public static void Example()
};

var engine = mlContext.Transforms.Text.TokenizeIntoWords("TokenizedWords", "Sentiment_Text")
.Append(mlContext.Transforms.Conversion.MapValue(lookupMap, "Words", "Ids", new ColumnOptions[] { ("VariableLenghtFeatures", "TokenizedWords") }))
.Append(mlContext.Transforms.Conversion.MapValue(lookupMap, "Words", "Ids", "VariableLenghtFeatures", "TokenizedWords"))
.Append(mlContext.Transforms.CustomMapping(ResizeFeaturesAction, "Resize"))
.Append(tensorFlowModel.ScoreTensorFlowModel(new[] { "Prediction/Softmax" }, new[] { "Features" }))
.Append(mlContext.Transforms.CopyColumns(("Prediction", "Prediction/Softmax")))
.Append(mlContext.Transforms.CopyColumns("Prediction", "Prediction/Softmax"))
.Fit(dataView)
.CreatePredictionEngine<IMDBSentiment, OutputScores>(mlContext);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ public static void Example()

var imagesFolder = Path.GetDirectoryName(imagesDataFile);
// Image loading pipeline.
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, ("ImageObject", "ImagePath"))
.Append(mlContext.Transforms.ConvertToGrayscale(("Grayscale", "ImageObject")));
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, "ImageObject", "ImagePath")
.Append(mlContext.Transforms.ConvertToGrayscale("Grayscale", "ImageObject"));

var transformedData = pipeline.Fit(data).Transform(data);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public static void Example()
// Installing the Microsoft.ML.DNNImageFeaturizer packages copies the models in the
// `DnnImageModels` folder.
// Image loading pipeline.
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, ("ImageObject", "ImagePath"))
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, "ImageObject", "ImagePath")
.Append(mlContext.Transforms.ResizeImages("ImageObject", imageWidth: 224, imageHeight: 224))
.Append(mlContext.Transforms.ExtractPixels("Pixels", "ImageObject"))
.Append(mlContext.Transforms.DnnFeaturizeImage("FeaturizedImage", m => m.ModelSelector.ResNet18(mlContext, m.OutputColumn, m.InputColumn), "Pixels"));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public static void Example()

var imagesFolder = Path.GetDirectoryName(imagesDataFile);
// Image loading pipeline.
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, ("ImageObject", "ImagePath"))
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, "ImageObject", "ImagePath")
.Append(mlContext.Transforms.ResizeImages("ImageObject", imageWidth: 100, imageHeight: 100 ))
.Append(mlContext.Transforms.ExtractPixels("Pixels", "ImageObject"));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public static void Example()

var imagesFolder = Path.GetDirectoryName(imagesDataFile);
// Image loading pipeline.
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, ("ImageReal", "ImagePath"));
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, "ImageReal", "ImagePath");
var transformedData = pipeline.Fit(data).Transform(data);

// The transformedData IDataView contains the loaded images now
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ public static void Example()

var imagesFolder = Path.GetDirectoryName(imagesDataFile);
// Image loading pipeline.
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, ("ImageReal", "ImagePath"))
var pipeline = mlContext.Transforms.LoadImages(imagesFolder, "ImageReal", "ImagePath")
.Append(mlContext.Transforms.ResizeImages("ImageReal", imageWidth: 100, imageHeight: 100));


Expand Down
2 changes: 1 addition & 1 deletion docs/samples/Microsoft.ML.Samples/Dynamic/ValueMapping.cs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ public static void Example()
};

// Constructs the ValueMappingEstimator making the ML.net pipeline
var pipeline = mlContext.Transforms.Conversion.MapValue(educationKeys, educationValues, ("EducationCategory", "Education"));
var pipeline = mlContext.Transforms.Conversion.MapValue(educationKeys, educationValues, "EducationCategory", "Education");

// Fits the ValueMappingEstimator and transforms the data converting the Education to EducationCategory.
IDataView transformedData = pipeline.Fit(trainData).Transform(trainData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ public static void Example()
};

// Constructs the ValueMappingEstimator making the ML.net pipeline
var pipeline = mlContext.Transforms.Conversion.MapValue(temperatureKeys, classificationValues, ("TemperatureCategory", "Temperature"));
var pipeline = mlContext.Transforms.Conversion.MapValue(temperatureKeys, classificationValues, "TemperatureCategory", "Temperature");

// Fits the ValueMappingEstimator and transforms the data adding the TemperatureCategory column.
IDataView transformedData = pipeline.Fit(trainData).Transform(trainData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public static void Example()
};

// Constructs the ValueMappingEstimator making the ML.net pipeline
var pipeline = mlContext.Transforms.Conversion.MapValue<string, int>(educationKeys, educationValues, ("EducationFeature", "Education"));
var pipeline = mlContext.Transforms.Conversion.MapValue<string, int>(educationKeys, educationValues, "EducationFeature", "Education");

// Fits the ValueMappingEstimator and transforms the data adding the EducationFeature column.
IDataView transformedData = pipeline.Fit(trainData).Transform(trainData);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ public static void Example()
// Generate the ValueMappingEstimator that will output KeyTypes even though our values are strings.
// The KeyToValueMappingEstimator is added to provide a reverse lookup of the KeyType, converting the KeyType value back
// to the original value.
var pipeline = mlContext.Transforms.Conversion.MapValue<string, string>(educationKeys, educationValues, true, ("EducationKeyType", "Education"))
.Append(mlContext.Transforms.Conversion.MapKeyToValue(("EducationCategory", "EducationKeyType")));
var pipeline = mlContext.Transforms.Conversion.MapValue(educationKeys, educationValues, "EducationKeyType", "Education", true)
.Append(mlContext.Transforms.Conversion.MapKeyToValue("EducationCategory", "EducationKeyType"));

// Fits the ValueMappingEstimator and transforms the data adding the EducationKeyType column.
IDataView transformedData = pipeline.Fit(trainData).Transform(trainData);
Expand Down

0 comments on commit 2e27634

Please sign in to comment.