欧美bbbwbbbw肥妇,免费乱码人妻系列日韩,一级黄片

解讀python如何實(shí)現(xiàn)決策樹算法

 更新時(shí)間:2018年10月11日 10:49:07   投稿:laozhang  
在本篇文章里我們給讀者們分享了關(guān)于python如何實(shí)現(xiàn)決策樹算法的相關(guān)知識(shí)點(diǎn)內(nèi)容,需要的朋友們參考下。

數(shù)據(jù)描述

每條數(shù)據(jù)項(xiàng)儲(chǔ)存在列表中,最后一列儲(chǔ)存結(jié)果

多條數(shù)據(jù)項(xiàng)形成數(shù)據(jù)集

data=[[d1,d2,d3...dn,result],
   [d1,d2,d3...dn,result],
        .
        .
   [d1,d2,d3...dn,result]]

決策樹數(shù)據(jù)結(jié)構(gòu)

class DecisionNode:
  '''決策樹節(jié)點(diǎn)
  '''
   
  def __init__(self,col=-1,value=None,results=None,tb=None,fb=None):
    '''初始化決策樹節(jié)點(diǎn)
     
    args:    
    col -- 按數(shù)據(jù)集的col列劃分?jǐn)?shù)據(jù)集
    value -- 以value作為劃分col列的參照
    result -- 只有葉子節(jié)點(diǎn)有,代表最終劃分出的子數(shù)據(jù)集結(jié)果統(tǒng)計(jì)信息。{‘結(jié)果':結(jié)果出現(xiàn)次數(shù)}
    rb,fb -- 代表左右子樹
    '''
    self.col=col
    self.value=value
    self.results=results
    self.tb=tb
    self.fb=fb

決策樹分類的最終結(jié)果是將數(shù)據(jù)項(xiàng)劃分出了若干子集,其中每個(gè)子集的結(jié)果都一樣,所以這里采用{‘結(jié)果':結(jié)果出現(xiàn)次數(shù)}的方式表達(dá)每個(gè)子集

def pideset(rows,column,value):
  '''依據(jù)數(shù)據(jù)集rows的column列的值,判斷其與參考值value的關(guān)系對(duì)數(shù)據(jù)集進(jìn)行拆分
    返回兩個(gè)數(shù)據(jù)集
  '''
  split_function=None
  #value是數(shù)值類型
  if isinstance(value,int) or isinstance(value,float):
    #定義lambda函數(shù)當(dāng)row[column]>=value時(shí)返回true
    split_function=lambda row:row[column]>=value
  #value是字符類型
  else:
    #定義lambda函數(shù)當(dāng)row[column]==value時(shí)返回true
    split_function=lambda row:row[column]==value
  #將數(shù)據(jù)集拆分成兩個(gè)
  set1=[row for row in rows if split_function(row)]
  set2=[row for row in rows if not split_function(row)]
  #返回兩個(gè)數(shù)據(jù)集
  return (set1,set2)
 
def uniquecounts(rows):
  '''計(jì)算數(shù)據(jù)集rows中有幾種最終結(jié)果,計(jì)算結(jié)果出現(xiàn)次數(shù),返回一個(gè)字典
  '''
  results={}
  for row in rows:
    r=row[len(row)-1]
    if r not in results: results[r]=0
    results[r]+=1
  return results
 
def giniimpurity(rows):
  '''返回rows數(shù)據(jù)集的基尼不純度
  '''
  total=len(rows)
  counts=uniquecounts(rows)
  imp=0
  for k1 in counts:
    p1=float(counts[k1])/total
    for k2 in counts:
      if k1==k2: continue
      p2=float(counts[k2])/total
      imp+=p1*p2
  return imp
 
def entropy(rows):
  '''返回rows數(shù)據(jù)集的熵
  '''
  from math import log
  log2=lambda x:log(x)/log(2) 
  results=uniquecounts(rows)
  ent=0.0
  for r in results.keys():
    p=float(results[r])/len(rows)
    ent=ent-p*log2(p)
  return ent
 
def build_tree(rows,scoref=entropy):
  '''構(gòu)造決策樹
  '''
  if len(rows)==0: return DecisionNode()
  current_score=scoref(rows)
 
  # 最佳信息增益
  best_gain=0.0
  #
  best_criteria=None
  #最佳劃分
  best_sets=None
 
  column_count=len(rows[0])-1
  #遍歷數(shù)據(jù)集的列,確定分割順序
  for col in range(0,column_count):
    column_values={}
    # 構(gòu)造字典
    for row in rows:
      column_values[row[col]]=1
    for value in column_values.keys():
      (set1,set2)=pideset(rows,col,value)
      p=float(len(set1))/len(rows)
      # 計(jì)算信息增益
      gain=current_score-p*scoref(set1)-(1-p)*scoref(set2)
      if gain>best_gain and len(set1)>0 and len(set2)>0:
        best_gain=gain
        best_criteria=(col,value)
        best_sets=(set1,set2)
  # 如果劃分的兩個(gè)數(shù)據(jù)集熵小于原數(shù)據(jù)集,進(jìn)一步劃分它們
  if best_gain>0:
    trueBranch=build_tree(best_sets[0])
    falseBranch=build_tree(best_sets[1])
    return DecisionNode(col=best_criteria[0],value=best_criteria[1],
            tb=trueBranch,fb=falseBranch)
  # 如果劃分的兩個(gè)數(shù)據(jù)集熵不小于原數(shù)據(jù)集,停止劃分
  else:
    return DecisionNode(results=uniquecounts(rows))
 
def print_tree(tree,indent=''):
  if tree.results!=None:
    print(str(tree.results))
  else:
    print(str(tree.col)+':'+str(tree.value)+'? ')
    print(indent+'T->',end='')
    print_tree(tree.tb,indent+' ')
    print(indent+'F->',end='')
    print_tree(tree.fb,indent+' ')
 
 
def getwidth(tree):
  if tree.tb==None and tree.fb==None: return 1
  return getwidth(tree.tb)+getwidth(tree.fb)
 
def getdepth(tree):
  if tree.tb==None and tree.fb==None: return 0
  return max(getdepth(tree.tb),getdepth(tree.fb))+1
 
 
def drawtree(tree,jpeg='tree.jpg'):
  w=getwidth(tree)*100
  h=getdepth(tree)*100+120
 
  img=Image.new('RGB',(w,h),(255,255,255))
  draw=ImageDraw.Draw(img)
 
  drawnode(draw,tree,w/2,20)
  img.save(jpeg,'JPEG')
 
def drawnode(draw,tree,x,y):
  if tree.results==None:
    # Get the width of each branch
    w1=getwidth(tree.fb)*100
    w2=getwidth(tree.tb)*100
 
    # Determine the total space required by this node
    left=x-(w1+w2)/2
    right=x+(w1+w2)/2
 
    # Draw the condition string
    draw.text((x-20,y-10),str(tree.col)+':'+str(tree.value),(0,0,0))
 
    # Draw links to the branches
    draw.line((x,y,left+w1/2,y+100),fill=(255,0,0))
    draw.line((x,y,right-w2/2,y+100),fill=(255,0,0))
   
    # Draw the branch nodes
    drawnode(draw,tree.fb,left+w1/2,y+100)
    drawnode(draw,tree.tb,right-w2/2,y+100)
  else:
    txt=' \n'.join(['%s:%d'%v for v in tree.results.items()])
    draw.text((x-20,y),txt,(0,0,0))

對(duì)測(cè)試數(shù)據(jù)進(jìn)行分類(附帶處理缺失數(shù)據(jù))

def mdclassify(observation,tree):
  '''對(duì)缺失數(shù)據(jù)進(jìn)行分類
   
  args:
  observation -- 發(fā)生信息缺失的數(shù)據(jù)項(xiàng)
  tree -- 訓(xùn)練完成的決策樹
   
  返回代表該分類的結(jié)果字典
  '''
 
  # 判斷數(shù)據(jù)是否到達(dá)葉節(jié)點(diǎn)
  if tree.results!=None:
    # 已經(jīng)到達(dá)葉節(jié)點(diǎn),返回結(jié)果result
    return tree.results
  else:
    # 對(duì)數(shù)據(jù)項(xiàng)的col列進(jìn)行分析
    v=observation[tree.col]
 
    # 若col列數(shù)據(jù)缺失
    if v==None:
      #對(duì)tree的左右子樹分別使用mdclassify,tr是左子樹得到的結(jié)果字典,fr是右子樹得到的結(jié)果字典
      tr,fr=mdclassify(observation,tree.tb),mdclassify(observation,tree.fb)
 
      # 分別以結(jié)果占總數(shù)比例計(jì)算得到左右子樹的權(quán)重
      tcount=sum(tr.values())
      fcount=sum(fr.values())
      tw=float(tcount)/(tcount+fcount)
      fw=float(fcount)/(tcount+fcount)
      result={}
 
      # 計(jì)算左右子樹的加權(quán)平均
      for k,v in tr.items(): 
        result[k]=v*tw
      for k,v in fr.items(): 
        # fr的結(jié)果k有可能并不在tr中,在result中初始化k
        if k not in result: 
          result[k]=0 
        # fr的結(jié)果累加到result中 
        result[k]+=v*fw
      return result
 
    # col列沒有缺失,繼續(xù)沿決策樹分類
    else:
      if isinstance(v,int) or isinstance(v,float):
        if v>=tree.value: branch=tree.tb
        else: branch=tree.fb
      else:
        if v==tree.value: branch=tree.tb
        else: branch=tree.fb
      return mdclassify(observation,branch)
 
tree=build_tree(my_data)
print(mdclassify(['google',None,'yes',None],tree))
print(mdclassify(['google','France',None,None],tree))

決策樹剪枝

def prune(tree,mingain):
  '''對(duì)決策樹進(jìn)行剪枝
   
  args:
  tree -- 決策樹
  mingain -- 最小信息增益
   
  返回
  '''
  # 修剪非葉節(jié)點(diǎn)
  if tree.tb.results==None:
    prune(tree.tb,mingain)
  if tree.fb.results==None:
    prune(tree.fb,mingain)
  #合并兩個(gè)葉子節(jié)點(diǎn)
  if tree.tb.results!=None and tree.fb.results!=None:
    tb,fb=[],[]
    for v,c in tree.tb.results.items():
      tb+=[[v]]*c
    for v,c in tree.fb.results.items():
      fb+=[[v]]*c
    #計(jì)算熵減少情況
    delta=entropy(tb+fb)-(entropy(tb)+entropy(fb)/2)
    #熵的增加量小于mingain,可以合并分支
    if delta<mingain:
      tree.tb,tree.fb=None,None
      tree.results=uniquecounts(tb+fb)

相關(guān)文章

  • python選擇排序算法實(shí)例總結(jié)

    python選擇排序算法實(shí)例總結(jié)

    這篇文章主要介紹了python選擇排序算法,以三個(gè)實(shí)例以不同方法分析了Python實(shí)現(xiàn)選擇排序的相關(guān)技巧,需要的朋友可以參考下
    2015-07-07
  • Python?PyQt5中窗口數(shù)據(jù)傳遞的示例詳解

    Python?PyQt5中窗口數(shù)據(jù)傳遞的示例詳解

    開發(fā)應(yīng)用程序時(shí),若只有一個(gè)窗口則只需關(guān)心這個(gè)窗口里面的各控件之間如何傳遞數(shù)據(jù)。如果程序有多個(gè)窗口,就要關(guān)心不同的窗口之間是如何傳遞數(shù)據(jù)。本文介紹了PyQt5中三種窗口數(shù)據(jù)傳遞,需要的可以了解一下
    2022-12-12
  • 最新評(píng)論