Log-likelihood analysis in Python

# __author__ = 'Bayes Server'
# __version__= '0.2'

import jpype    # pip install jpype1    (version 1.2.1 or later)
import jpype.imports
from jpype.types import *

classpath = "C:\\Program Files\\Bayes Server\\Bayes Server 9.4\\API\\Java\\bayesserver-9.4.jar"

# Launch the JVM
jpype.startJVM(classpath=[classpath])

# import the Java modules
from com.bayesserver import *
from com.bayesserver.inference import *
from com.bayesserver.analysis import *
from jpype import java
# Uncomment the following line and change the license key, if you are using a licensed version
# License.validate("xxx")


# // TODO change path to Waste network
network_path = 'C:\\ProgramData\\Bayes Server 9.4\\Sample Networks\\Waste.bayes'

network = Network()
network.load(network_path)

variables = network.getVariables()

# discrete
burning_regimen = variables.get('Burning Regimen', True)
waste_type = variables.get('Waste type', True)
filter_state = variables.get('Filter state', True)

# continuous
filter_efficiency = variables.get('Filter efficiency', True)
dust_emission = variables.get('Dust emission', True)
metals_in_waste = variables.get('Metals in waste', True)
co2_concentration = variables.get('CO2 concentration', True)
light_penetrability = variables.get('Light penetrability', True)
metals_emission = variables.get('Metals emission', True)

# You can either create some new evidence to analyze, or you can use
# inference.Evidence if you have evidence on an inference engine you want to analyze

evidence = DefaultEvidence(network)

# set some evidence
evidence.setState(burning_regimen.getStates().get("Unstable", True))
evidence.setState(filter_state.getStates().get("Defect", True))
evidence.set(filter_efficiency, -0.45)
evidence.set(light_penetrability, 2.0)
evidence.set(metals_emission, 6.3)

evidence_to_analyse = JArray(Variable)([None] * evidence.size())    # or you can use [a, b, c] syntax (see Impact Analysis example))
evidence.getVariables(evidence_to_analyse)
evidence_to_analyse_list = java.util.Arrays.asList(evidence_to_analyse)

options = LogLikelihoodAnalysisOptions()
options.setSubsetMethod(LogLikelihoodAnalysisSubsetMethod.EXCLUDE)
options.setMaxEvidenceSubsetSize(1)

output = LogLikelihoodAnalysis.calculate(
    network,
    evidence,
    evidence_to_analyse_list,
    options)

print(f'None={output.getBaseline().getLogLikelihoodNone()}\tAll={output.getBaseline().getLogLikelihoodAll()}')
print()
print('\t'.join(map(lambda v: str(v.getName()), evidence_to_analyse)))

# In this example, we have chosen to order by descending log-likelihood, as we are using
# LogLikelihoodAnalysisSubsetMethod.Exclude and wish to understand which evidence
# causes the biggest decrease (e.g. for anomaly detection)

for item in sorted(output.getItems(), key=lambda i: i.getLogLikelihood(), reverse=True):

    flags = '\t'.join(map(lambda f: 'T' if f else 'F', item.getEvidenceFlags()))
    print(f'{item.getLogLikelihood()}\t{flags}')


#  Expected output
#  None = 0  All = -25.7131596279843

#  Burning Regimen Filter state    Filter efficiency       Light penetrability     Metals emission
#  -14.1361346392178       T T       T F       T
#  - 17.0198859514185       T T       F T       T
#  - 20.9544632365106       F T       T T       T
#  - 23.6826206567332       T T       T T       F
#  - 25.7131596279843       T F       T T       T