Skip to main content

Azure functions (C#)


using System.IO;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Extensions.Http;
using Microsoft.AspNetCore.Http;
using Microsoft.Azure.WebJobs.Host;
using Newtonsoft.Json;
using BayesServer.Inference;
using BayesServer.Inference.RelevanceTree;

namespace BayesServer.Azure.Functions
{
/// <summary>
/// A simple example of using Bayes Server within an HttpTrigger Azure function.
/// </summary>
/// <remarks>
/// If you are using an Azure function Consumption plan, after a period of inactivity Azure functions require
/// a cold start. This can lead to a delay of up to 2 seconds, which is not caused by Bayes Server itself.
///
/// You can use Azure function monitoring to provide timing detail and additional information about usage.
/// Please see the Azure documentation for further information.
/// </remarks>
public static class Waste
{
// Cache the network so it can re-used between calls.
// See the Azure documentation for more advanced caching options.
private static readonly Network network;
private static readonly IInferenceFactory factory;

private static readonly Variable filterState;

static Waste()
{
Waste.network = new Network();

// In this example the 'Waste' network, which is included as an example with Bayes Server,
// is loaded from a project resource.
// See the Azure documentation on the many options for loading resources/data

using (var stream = new MemoryStream(Properties.Resources.Waste))
{
Waste.network.Load(stream);
}

Waste.factory = new RelevanceTreeInferenceFactory();

Waste.filterState = network.Variables["Filter state", true];
}

/// <summary>
/// HttpTrigger that calculates the log-likelihood of evidence.
///
/// The http(s) request body expects json with an evidence section. For example:
///
/// {
/// "evidence": {
/// "filterState": "Defect"
/// }
/// }
///
/// </summary>
[FunctionName("waste")]
public static IActionResult Run(
[HttpTrigger(AuthorizationLevel.Function, "post", Route = null)]
HttpRequest req,
TraceWriter log)
{
log.Info("C# HTTP trigger function processed a request.");

var requestBody = new StreamReader(req.Body).ReadToEnd();
dynamic data = JsonConvert.DeserializeObject(requestBody);
dynamic evidence = data?.evidence;
string filterStateValue = evidence?.filterState;

if (filterStateValue == null)
{
return new BadRequestObjectResult("filterState not found in json body (evidence section)");
}

// In this simple example, we create a new inference engine per call.
// Ideally, inference engines are re-used over multiple calls.
// If required, please refer to the Azure documentation on how best to do this.
// A simple approach would be to use one of the built in .NET Concurrent data structures.

var inference = factory.CreateInferenceEngine(network);
var queryOptions = factory.CreateQueryOptions();
var queryOutput = factory.CreateQueryOutput();
queryOptions.LogLikelihood = true;

inference.Evidence.SetState(filterState.States[filterStateValue, true]);

inference.Query(queryOptions, queryOutput);

return (ActionResult)new OkObjectResult($"Log-likelihood = {queryOutput.LogLikelihood.Value}");

}
}
}