Click here to hide/show the list of notebooks.
  pyAgrum on notebooks   pyAgrum jupyter
☰  ParametricEM 
pyAgrum 0.16.3   
Zipped notebooks   
generation: 2019-10-20 09:16  

Creative Commons License
This pyAgrum's notebook is licensed under a Creative Commons Attribution-NonCommercial 4.0 International License.

In [1]:
import pyAgrum as gum
import pyAgrum.lib.notebook as gnb

from pyAgrum.lib._utils.oslike import head

import os
#the bases will be saved in "out/*.csv"
EMnomissing=os.path.join("out","EM_nomissing.csv")
EMmissing=os.path.join("out","EM_missing.csv")

generating data with missing values (at random)

In [2]:
src=gum.fastBN("A->B<-C->D->E<-B;D->F")
gum.generateCSV(src,EMnomissing,5000,random_order=False)
src
Out[2]:
G A A B B A->B E E B->E C C C->B D D C->D D->E F F D->F
In [3]:
import pandas as pd
import numpy as np

def add_missing(src,dst,proba):
  df=pd.read_csv(src)
  mask=np.random.choice([True, False], size=df.shape,p=[proba,1-proba])
  df.mask(mask).to_csv(dst,na_rep='?',index=False,float_format='%.0f')

gum.generateCSV(src,EMnomissing,5000,random_order=False)
add_missing(EMnomissing,EMmissing,proba=0.1)
In [4]:
print("No missing")
head(EMnomissing)
print("Missing")
head(EMmissing)
No missing
A,B,C,D,E,F
1,1,0,0,1,0
0,1,0,1,0,1
0,0,0,0,1,1
0,0,1,1,1,0
0,0,1,1,1,1
0,1,1,0,1,1
0,0,0,0,1,0
0,0,0,0,1,1
0,0,1,0,1,1

Missing
A,B,C,D,E,F
1,1,0,0,1,0
0,1,0,1,0,1
0,0,0,0,1,1
0,0,1,1,1,0
0,0,1,1,1,1
0,1,1,0,1,1
0,0,0,0,1,0
0,0,0,0,1,1
0,0,1,0,1,1

learning with missing data

In [5]:
learner = gum.BNLearner(EMmissing, ["?"])
print(f"Missing values in {EMmissing} : {learner.hasMissingValues()}")
Missing values in out/EM_missing.csv : True
In [6]:
# this will fail : missing data !
# learner.learnParameters(src.dag())
In [7]:
learner.useEM(1e-3)
learner.useAprioriSmoothing()
bn=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn))
# iterations : 6
structs Inference in   1.06ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   0.67ms A B A->B E B->E C C->B D C->D D->E F D->F

learning with smaller error (and no smoothing)

In [8]:
learner = gum.BNLearner(EMmissing, ["?"])
learner.setVerbosity(True)
learner.useEM(1e-8)
bn2=learner.learnParameters(src.dag())
print(f"# iterations : {learner.nbrIterations()}")
gnb.sideBySide(gnb.getInference(src),gnb.getInference(bn2))
# iterations : 15
structs Inference in   0.94ms A B A->B E B->E C C->B D C->D D->E F D->F
structs Inference in   1.00ms A B A->B E B->E C C->B D C->D D->E F D->F
In [9]:
print(learner.history())
(0.38484466395297656, 0.12192497782703847, 0.032630472733736274, 0.008395405420123384, 0.0021618092721542286, 0.0005622359210526747, 0.00014777844673121603, 3.920251010628199e-05, 1.0479797486923167e-05, 2.8192656788530122e-06, 7.623856161507548e-07, 2.0704767169807346e-07, 5.642824969011388e-08, 1.542355565753063e-08, 4.225827053662707e-09)
In [ ]: