Click here to hide/show the list of notebooks.
  pyAgrum on notebooks   pyAgrum jupyter
☰  ParametricEM 
pyAgrum 0.17.3   
Zipped notebooks   
generation: 2020-04-27 18:59  

Creative Commons License
This pyAgrum's notebook is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License.

In [1]:
import pyAgrum as gum
import pyAgrum.lib.notebook as gnb

from pyAgrum.lib._utils.oslike import head

import os
#the bases will be saved in "out/*.csv"
EMnomissing=os.path.join("out","EM_nomissing.csv")
EMmissing=os.path.join("out","EM_missing.csv")

generating data with missing values (at random)

In [2]:
src=gum.fastBN("A->B<-C->D->E<-B;D->F")
gum.generateCSV(src,EMnomissing,5000,random_order=False)
src
Out[2]:
G A A B B A->B E E B->E C C C->B D D C->D D->E F F D->F
In [3]:
import pandas as pd
import numpy as np

def add_missing(src,dst,proba):
  df=pd.read_csv(src)
  mask=np.random.choice([True, False], size=df.shape,p=[proba,1-proba])
  df.mask(mask).to_csv(dst,na_rep='?',index=False,float_format='%.0f')

gum.generateCSV(src,EMnomissing,5000,random_order=False)
add_missing(EMnomissing,EMmissing,proba=0.1)
In [4]:
print("No missing")
head(EMnomissing)
print("Missing")
head(EMmissing)
No missing
A,B,C,D,E,F
1,1,1,1,0,1
1,1,0,1,0,1
0,1,1,0,1,0
1,0,1,0,0,0
1,0,1,1,0,0
1,1,0,1,0,1
1,1,0,1,0,1
0,0,1,1,1,0
0,1,0,1,0,1

Missing
A,B,C,D,E,F
1,1,?,1,0,1
?,1,0,1,0,1
0,1,1,0,1,0
1,0,1,0,0,?
1,0,1,1,0,0
1,1,0,1,0,1
1,?,0,1,0,?
0,0,1,1,1,0
0,1,0,?,0,1

learning with missing data

In [5]:
learner = gum.BNLearner(EMmissing, ["?"])
print(f"Missing values in {EMmissing} : {learner.hasMissingValues()}")
Missing values in out/EM_missing.csv : True
In [6]:
# this will fail : missing data !
# learner.learnParameters(src.dag())
In [7]:
learner.useEM(1e-3)
learner.useAprioriSmoothing()
bn=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn))
# iterations : 5
structs Inference in   1.90ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   0.88ms A B A->B E B->E C C->B D C->D D->E F D->F

learning with smaller error (and no smoothing)

In [8]:
learner = gum.BNLearner(EMmissing, ["?"])
learner.setVerbosity(True)
learner.useEM(1e-8)
bn2=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn2))
# iterations : 12
structs Inference in   0.88ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   1.14ms A B A->B E B->E C C->B D C->D D->E F D->F
In [9]:
print(learner.history())
(0.3856529800972824, 0.06846982402141066, 0.010562362085800971, 0.0016271382201646326, 0.000313275220750178, 6.747997590156713e-05, 1.4836188104692787e-05, 3.3106785674519545e-06, 7.468753825331639e-07, 1.698633995033125e-07, 3.886876382027633e-08, 8.935576049169853e-09)
In [ ]: