Simple Naive Bayesian Network
In this we will work with simple weather dataset as per below data records which is given in screenshot:
# Naive Bayesian Network for the nominal weather dataset
# using smoothed probabilities given in the data mining book
# Note: pomegranate requires that networkx, joblib,
# and pyyaml be installed
from pomegranate import *
# Table for Play is discrete (isn't conditional on any other nodes)
P = DiscreteDistribution({'yes': 0.633, 'no': 0.367})
# Table for Outlook is conditional on Play
# Columns in the table correspond to Play, Outlook prob's
O = ConditionalProbabilityTable(
[['yes', 'sunny', 0.238],
['yes', 'overcast', 0.429],
['yes', 'rainy', 0.333],
['no', 'sunny', 0.538],
['no', 'overcast', 0.077],
['no', 'rainy', 0.385]],
[P])
# Table for Windy is conditional on Play
# Columns in the table correspond to Play, Windy prob's
W = ConditionalProbabilityTable(
[['yes', 'false', 0.350],
['yes', 'true', 0.650],
['no', 'false', 0.583],
['no', 'true', 0.417]],
[P])
# Table for Temperature is conditional on Play
# Columns in the table correspond to Play, Temperature prob's
T = ConditionalProbabilityTable(
[['yes', 'hot', 0.238],
['yes', 'mild', 0.429],
['yes', 'cool', 0.333],
['no', 'hot', 0.385],
['no', 'mild', 0.385],
['no', 'cool', 0.231]],
[P])
# Table for Humidity is conditional on Play
# Columns in the table correspond to Play, Humidity prob's
H = ConditionalProbabilityTable(
[['yes', 'high', 0.350],
['yes', 'normal', 0.650],
['no', 'high', 0.750],
['no', 'normal', 0.250]],
[P])
# Create nodes with their probability tables
s1 = Node(P, name="Play")
s2 = Node(O, name="Outlook")
s3 = Node(W, name="Windy")
s4 = Node(T, name="Temperature")
s5 = Node(H, name="Humidity")
Fit into model
model = BayesianNetwork("Weather Bayesian Network")
model.add_states(s1, s2, s3, s4, s5) # add the nodes
model.add_edge(s1, s2) # add the edges
model.add_edge(s1, s3)
model.add_edge(s1, s4)
model.add_edge(s1, s5)
model.bake() # actually create the network
# Can now make predictions
# Order of list is [Play, Outlook, Windy, Temperature, Humidity]
# i.e., same as order nodes were created
instance = ['yes', 'rainy', 'true', 'cool', 'high']
likelihood_yes = model.probability([instance])
instance[0] = 'no'
likelihood_no = model.probability([instance])
prob_yes = (likelihood_yes / (likelihood_yes + likelihood_no))* 100
prob_no = (likelihood_no / (likelihood_yes + likelihood_no))* 100
print(prob_yes, prob_no)
# Here it will predict the None's
print(model.predict([['yes', 'rainy', None, None, None]]))
Non-Simple Naive Bayesian Network
# Note: pomegranate requires that networkx, joblib,
# and pyyaml be installed
from pomegranate import *
# Table for G is discrete (isn't conditional on any other nodes)
G = DiscreteDistribution({'A': 1./3, 'B': 1./3, 'C': 1./3})
# Table for P is discrete (isn't conditional on any other nodes)
P = DiscreteDistribution({'A': 1./3, 'B': 1./3, 'C': 1./3})
# Table for M is conditional on other nodes
# Columns in the table correspond to G, P, M, probability
M = ConditionalProbabilityTable(
[['A', 'A', 'A', 0.0],
['A', 'A', 'B', 0.5],
['A', 'A', 'C', 0.5],
['A', 'B', 'A', 0.0],
['A', 'B', 'B', 0.0],
['A', 'B', 'C', 1.0],
['A', 'C', 'A', 0.0],
['A', 'C', 'B', 1.0],
['A', 'C', 'C', 0.0],
['B', 'A', 'A', 0.0],
['B', 'A', 'B', 0.0],
['B', 'A', 'C', 1.0],
['B', 'B', 'A', 0.5],
['B', 'B', 'B', 0.0],
['B', 'B', 'C', 0.5],
['B', 'C', 'A', 1.0],
['B', 'C', 'B', 0.0],
['B', 'C', 'C', 0.0],
['C', 'A', 'A', 0.0],
['C', 'A', 'B', 1.0],
['C', 'A', 'C', 0.0],
['C', 'B', 'A', 1.0],
['C', 'B', 'B', 0.0],
['C', 'B', 'C', 0.0],
['C', 'C', 'A', 0.5],
['C', 'C', 'B', 0.5],
['C', 'C', 'C', 0.0]], [G, P])
# Create nodes with their probability tables
s1 = Node(G, name="G")
s2 = Node(P, name="P")
s3 = Node(M, name="M")
Use Baysian Network Model
model = BayesianNetwork("GPM Bayesian Network")
model.add_states(s1, s2, s3) # add the nodes
model.add_edge(s1, s3) # add the edges
model.add_edge(s2, s3)
model.bake() # actually create the network
# Can now make predictions
# For each of these 3 instances, predict the None (which here is
# always the M attribute)
print(model.predict([['A', 'B', None],['A', 'C', None],['C', 'B', None]]))
# For each of these 3 instances, predict the None (which is # the M attribute for the 1st instance, the P attribute for the # 2nd instance, and the G attribute for the 3rd instance)
print(model.predict([['A', 'B', None],['A', None, 'C'],[None, 'B', 'A']]))
Contact Us!
And get instant help with an affordable price.
We are providing all Naive Bayesian related help if you stuck with it or need any other help in Machine Learning topics then send your request or discuss on website chat (www.realcode4you.com) so we can help you.
Comments