Skip to content

Commit

Permalink
Merge pull request #80 from Sergio0694/dev
Browse files Browse the repository at this point in the history
Batch normalization, APIs adjustments
  • Loading branch information
Sergio0694 authored Feb 1, 2018
2 parents e9170bd + 5936f85 commit d9d1089
Show file tree
Hide file tree
Showing 39 changed files with 2,130 additions and 308 deletions.
64 changes: 10 additions & 54 deletions NeuralNetwork.NET/APIs/CuDnnNetworkLayers.cs
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
using System;
using System.Linq;
using JetBrains.Annotations;
using JetBrains.Annotations;
using NeuralNetworkNET.APIs.Delegates;
using NeuralNetworkNET.APIs.Enums;
using NeuralNetworkNET.APIs.Structs;
using NeuralNetworkNET.Extensions;
using NeuralNetworkNET.cuDNN;
using NeuralNetworkNET.Networks.Layers.Cuda;

namespace NeuralNetworkNET.APIs
Expand All @@ -17,22 +15,7 @@ public static class CuDnnNetworkLayers
/// <summary>
/// Gets whether or not the Cuda acceleration is supported on the current system
/// </summary>
public static bool IsCudaSupportAvailable
{
get
{
try
{
// Calling this directly would could a crash in the <Module> loader due to the missing .dll files
return CuDnnSupportHelper.IsGpuAccelerationSupported();
}
catch (TypeInitializationException)
{
// Missing .dll file
return false;
}
}
}
public static bool IsCudaSupportAvailable => CuDnnService.IsAvailable;

/// <summary>
/// Creates a new fully connected layer with the specified number of input and output neurons, and the given activation function
Expand Down Expand Up @@ -132,41 +115,14 @@ public static LayerFactory Convolutional(
public static LayerFactory Inception(InceptionInfo info, BiasInitializationMode biasMode = BiasInitializationMode.Zero)
=> input => new CuDnnInceptionLayer(input, info, biasMode);

#region Feature helper

/// <summary>
/// A private class that is used to create a new standalone type that contains the actual test method (decoupling is needed to &lt;Module&gt; loading crashes)
/// Creates a new batch normalization layer
/// </summary>
private static class CuDnnSupportHelper
{
/// <summary>
/// Checks whether or not the Cuda features are currently supported
/// </summary>
public static bool IsGpuAccelerationSupported()
{
try
{
// CUDA test
Alea.Gpu gpu = Alea.Gpu.Default;
if (gpu == null) return false;
if (!Alea.cuDNN.Dnn.IsAvailable) return false; // cuDNN
using (Alea.DeviceMemory<float> sample_gpu = gpu.AllocateDevice<float>(1024))
{
Alea.deviceptr<float> ptr = sample_gpu.Ptr;
void Kernel(int i) => ptr[i] = i;
Alea.Parallel.GpuExtension.For(gpu, 0, 1024, Kernel); // JIT test
float[] sample = Alea.Gpu.CopyToHost(sample_gpu);
return Enumerable.Range(0, 1024).Select<int, float>(i => i).ToArray().ContentEquals(sample);
}
}
catch
{
// Missing .dll or other errors
return false;
}
}
}

#endregion
/// <param name="mode">The normalization mode to use for the new layer</param>
/// <param name="activation">The desired activation function to use in the network layer</param>
[PublicAPI]
[Pure, NotNull]
public static LayerFactory BatchNormalization(NormalizationMode mode, ActivationType activation)
=> input => new CuDnnBatchNormalizationLayer(input, mode, activation);
}
}
93 changes: 69 additions & 24 deletions NeuralNetwork.NET/APIs/DatasetLoader.cs

Large diffs are not rendered by default.

77 changes: 70 additions & 7 deletions NeuralNetwork.NET/APIs/Datasets/Cifar10.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,9 @@
using NeuralNetworkNET.Extensions;
using NeuralNetworkNET.Helpers;
using NeuralNetworkNET.SupervisedLearning.Progress;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Advanced;
using SixLabors.ImageSharp.PixelFormats;

namespace NeuralNetworkNET.APIs.Datasets
{
Expand All @@ -25,11 +28,14 @@ public static class Cifar10
// 32*32 RGB images
private const int SampleSize = 3072;

// A single 32*32 image
private const int ImageSize = 1024;

private const String DatasetURL = "https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz";

[NotNull, ItemNotNull]
private static readonly IReadOnlyList<String> TrainingBinFilenames = Enumerable.Range(1, 5).Select(i => $"data_batch_{i}.bin").ToArray();

private const String TestBinFilename = "test_batch.bin";

#endregion
Expand All @@ -38,12 +44,13 @@ public static class Cifar10
/// Downloads the CIFAR-10 training datasets and returns a new <see cref="ITestDataset"/> instance
/// </summary>
/// <param name="size">The desired dataset batch size</param>
/// <param name="callback">The optional progress calback</param>
/// <param name="token">An optional cancellation token for the operation</param>
[PublicAPI]
[Pure, ItemCanBeNull]
public static async Task<ITrainingDataset> GetTrainingDatasetAsync(int size, CancellationToken token = default)
public static async Task<ITrainingDataset> GetTrainingDatasetAsync(int size, [CanBeNull] IProgress<HttpProgress> callback = null, CancellationToken token = default)
{
IReadOnlyDictionary<String, Func<Stream>> map = await DatasetsDownloader.GetArchiveAsync(DatasetURL, token);
IReadOnlyDictionary<String, Func<Stream>> map = await DatasetsDownloader.GetArchiveAsync(DatasetURL, callback, token);
if (map == null) return null;
IReadOnlyList<(float[], float[])>[] data = new IReadOnlyList<(float[], float[])>[TrainingBinFilenames.Count];
Parallel.For(0, TrainingBinFilenames.Count, i => data[i] = ParseSamples(map[TrainingBinFilenames[i]], TrainingSamplesInBinFiles)).AssertCompleted();
Expand All @@ -54,25 +61,45 @@ public static async Task<ITrainingDataset> GetTrainingDatasetAsync(int size, Can
/// Downloads the CIFAR-10 test datasets and returns a new <see cref="ITestDataset"/> instance
/// </summary>
/// <param name="progress">The optional progress callback to use</param>
/// <param name="callback">The optional progress calback</param>
/// <param name="token">An optional cancellation token for the operation</param>
[PublicAPI]
[Pure, ItemCanBeNull]
public static async Task<ITestDataset> GetTestDatasetAsync([CanBeNull] Action<TrainingProgressEventArgs> progress = null, CancellationToken token = default)
public static async Task<ITestDataset> GetTestDatasetAsync([CanBeNull] Action<TrainingProgressEventArgs> progress = null, [CanBeNull] IProgress<HttpProgress> callback = null, CancellationToken token = default)
{
IReadOnlyDictionary<String, Func<Stream>> map = await DatasetsDownloader.GetArchiveAsync(DatasetURL, token);
IReadOnlyDictionary<String, Func<Stream>> map = await DatasetsDownloader.GetArchiveAsync(DatasetURL, callback, token);
if (map == null) return null;
IReadOnlyList<(float[], float[])> data = ParseSamples(map[TestBinFilename], TrainingSamplesInBinFiles);
return DatasetLoader.Test(data, progress);
}

/// <summary>
/// Downloads and exports the full CIFAR-10 dataset (both training and test samples) to the target directory
/// </summary>
/// <param name="directory">The target directory</param>
/// <param name="token">The cancellation token for the operation</param>
[PublicAPI]
public static async Task<bool> ExportDatasetAsync([NotNull] DirectoryInfo directory, CancellationToken token = default)
{
IReadOnlyDictionary<String, Func<Stream>> map = await DatasetsDownloader.GetArchiveAsync(DatasetURL, null, token);
if (map == null) return false;
if (!directory.Exists) directory.Create();
ParallelLoopResult result = Parallel.ForEach(TrainingBinFilenames.Concat(new[] { TestBinFilename }), (name, state) =>
{
ExportSamples(directory, (name, map[name]), TrainingSamplesInBinFiles, token);
if (token.IsCancellationRequested) state.Stop();
});
return result.IsCompleted && !token.IsCancellationRequested;
}

#region Tools

/// <summary>
/// Parses a CIFAR-10 .bin file
/// </summary>
/// <param name="factory">A <see cref="Func{TResult}"/> that returns the <see cref="Stream"/> to read</param>
/// <param name="count">The number of samples to parse</param>
private static unsafe IReadOnlyList<(float[], float[])> ParseSamples(Func<Stream> factory, int count)
private static unsafe IReadOnlyList<(float[], float[])> ParseSamples([NotNull] Func<Stream> factory, int count)
{
using (Stream stream = factory())
{
Expand All @@ -89,8 +116,12 @@ public static async Task<ITestDataset> GetTestDatasetAsync([CanBeNull] Action<Tr
fixed (float* px = x)
{
stream.Read(temp, 0, SampleSize);
for (int j = 0; j < SampleSize; j++)
for (int j = 0; j < ImageSize; j++)
{
px[j] = ptemp[j] / 255f; // Normalized samples
px[j] = ptemp[j + ImageSize] / 255f;
px[j] = ptemp[j + 2 * ImageSize] / 255f;
}
}
data[i] = (x, y);
}
Expand All @@ -99,6 +130,38 @@ public static async Task<ITestDataset> GetTestDatasetAsync([CanBeNull] Action<Tr
}
}

/// <summary>
/// Exports a CIFAR-10 .bin file
/// </summary>
/// <param name="folder">The target folder to use to save the images</param>
/// <param name="source">The source filename and a <see cref="Func{TResult}"/> that returns the <see cref="Stream"/> to read</param>
/// <param name="count">The number of samples to parse</param>
/// <param name="token">A token for the operation</param>
private static unsafe void ExportSamples([NotNull] DirectoryInfo folder, (String Name, Func<Stream> Factory) source, int count, CancellationToken token)
{
using (Stream stream = source.Factory())
{
byte[] temp = new byte[SampleSize];
fixed (byte* ptemp = temp)
{
for (int i = 0; i < count; i++)
{
if (token.IsCancellationRequested) return;
int label = stream.ReadByte();
stream.Read(temp, 0, SampleSize);
using (Image<Rgb24> image = new Image<Rgb24>(32, 32))
fixed (Rgb24* p0 = &image.DangerousGetPinnableReferenceToPixelBuffer())
{
for (int j = 0; j < ImageSize; j++)
p0[j] = new Rgb24(ptemp[j], ptemp[j + ImageSize], ptemp[j + 2 * ImageSize]);
using (FileStream file = File.OpenWrite(Path.Combine(folder.FullName, $"[{source.Name}][{i}][{label}].bmp")))
image.SaveAsBmp(file);
}
}
}
}
}

#endregion
}
}
Loading

0 comments on commit d9d1089

Please sign in to comment.