Skip to content

Commit 1aa10dd

Browse files
author
h205c
committed
changes
1 parent ac0bfe6 commit 1aa10dd

File tree

1,482 files changed

+17209556
-11805
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

1,482 files changed

+17209556
-11805
lines changed

algorithms/AbstractAlgorithm.py

Lines changed: 5 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,12 @@
11
from abc import ABCMeta, abstractmethod
22
import os, sys
3-
from preprocessing.prepare_german import prepare_german
4-
from preprocessing.prepare_adult import prepare_adult
5-
from preprocessing.prepare_retailer import prepare_retailer
6-
from preprocessing.prepare_small_retailer import prepare_small_retailer
7-
from preprocessing.prepare_ricci import prepare_ricci
3+
from preprocessing.deprecated.prepare_german import prepare_german
4+
from preprocessing.deprecated.prepare_adult import prepare_adult
5+
from preprocessing.deprecated.prepare_retailer import prepare_retailer
6+
from preprocessing.deprecated.prepare_ricci import prepare_ricci
87
import numpy as np
98
import pandas as pd
10-
from preprocessing.black_box_auditing import *
9+
from preprocessing.deprecated.black_box_auditing import *
1110
from data.german.load_german_data import *
1211
from data.adult.load_adult import *
1312
from data.ricci.load_data import *

algorithms/kamishima/KamishimaAlgorithm.py

Lines changed: 58 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,7 @@ def CV_score(predicted, protected, total_sensitive, total_nonsensitive):
130130
else:
131131
print("Invalid class value in y_control_test")
132132

133-
def getScore(fixed_y_test,y_classified_results, var=1): #var = 1-acc/DI, 2-acc, 3-DI
133+
def getScore(fixed_y_test,y_classified_results, var=1): #var = |1-DI|/acc, 2-acc, 3-|1-DI|
134134
total_sensitive = 0
135135
total_nonsensitive = 0
136136
for x in self.x_control_test[self.sensitive_attr]:
@@ -142,17 +142,55 @@ def getScore(fixed_y_test,y_classified_results, var=1): #var = 1-acc/DI, 2-acc,
142142
if var == 2:
143143
return accuracy
144144
elif var == 1:
145-
return abs(1 - accuracy/DI_score(y_classified_results, self.x_control_test[self.sensitive_attr], total_sensitive, total_nonsensitive))
145+
return abs(1 - DI_score(y_classified_results, self.x_control_test[self.sensitive_attr], total_sensitive, total_nonsensitive))/accuracy
146146
else:
147-
return DI_score(y_classified_results, self.x_control_test[self.sensitive_attr], total_sensitive, total_nonsensitive)
147+
return abs(1-DI_score(y_classified_results, self.x_control_test[self.sensitive_attr], total_sensitive, total_nonsensitive))
148148

149+
def binMinMax(first, last, var): #var = |1-DI|/acc, 2-acc, 3-|1-DI|
150+
if first == last:
151+
print(first)
152+
y = train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, first, self.x_control_test)
153+
return y
154+
elif (last-1) == first:
155+
firstScore = getScore(fixed_y_test, train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, first, self.x_control_test),var)
156+
lastScore = getScore(fixed_y_test, train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, last, self.x_control_test),var)
157+
if firstScore <= lastScore:
158+
if var == 1 or var == 3:
159+
return binMinMax(first,first,var)
160+
else:
161+
return binMinMax(last,last,var)
162+
else:
163+
if var == 1 or var == 3:
164+
return binMinMax(last,last,var)
165+
else:
166+
return binMinMax(first,first,var)
167+
else:
168+
mid = (first + last)//2
169+
firstScore = getScore(fixed_y_test, train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, first, self.x_control_test),var)
170+
lastScore = getScore(fixed_y_test, train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, last, self.x_control_test),var)
171+
midY = y_classified_results = train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, mid, self.x_control_test)
172+
midScore = getScore(fixed_y_test, midY, var)
173+
174+
if midScore <= lastScore:
175+
if var == 1 or var == 3:
176+
return binMinMax(first,mid,var)
177+
else:
178+
return binMinMax(mid,last,var)
179+
else:
180+
if var == 1 or var == 3:
181+
return binMinMax(mid,last,var)
182+
else:
183+
return binMinMax(first,mid,var)
184+
185+
'''
149186
minDI = (1000,[])
150187
maxacc = (1000,[])
151188
maxDI = (1000,[])
152189
if self.data=="ricci":
153190
minDI = (50,[])
154191
maxacc = (50,[])
155192
maxDI = (50,[])
193+
'''
156194

157195
# PRINT ETAS TO SHOW PLATEAU
158196
if self.data == 'ricci':
@@ -169,16 +207,18 @@ def printScores():
169207
scoresBoth.append(getScore(fixed_y_test,y_classified_results))
170208
scoresAcc.append(getScore(fixed_y_test,y_classified_results, 2))
171209
scoresDI.append(getScore(fixed_y_test,y_classified_results,3))
172-
with open("etas/" + self.data + "-etas.csv",'w') as f:
173-
f.write('Acc/DI, Acc, DI' + '\n')
210+
with open("etas/" + self.data + "-etas-" + self.params['num'] + ".csv",'w') as f:
211+
f.write('|1-DI|/Acc, Acc, |1-DI|' + '\n')
174212
for i in range(len(scoresBoth)):
175213
f.write(str(scoresBoth[i]) + ',' + str(scoresAcc[i]) + ',' + str(scoresDI[i]) +'\n')
176214
f.close()
177215
print("SCORES WRITTEN.")
178216
#############################
179217

218+
'''
180219
def binMaxVar(first, last, minDI, maxacc, maxDI, var=1): #var = 1-acc/DI, 2-acc, 3-DI
181220
if first == last:
221+
print(first)
182222
if var == 1:
183223
return minDI
184224
elif var == 2:
@@ -232,16 +272,26 @@ def binMaxVar(first, last, minDI, maxacc, maxDI, var=1): #var = 1-acc/DI, 2-acc,
232272
return binMaxVar(midpoint, last, minDI, maxacc, (secondMidScore, second_y_classified_results))
233273
else:
234274
return binMaxVar(first, midpoint, minDI, maxacc,maxDI)
235-
236-
printScores()
275+
237276
if self.data == "ricci":
238277
final = binMaxVar(1,50, minDI, maxacc, maxDI, self.params['var'])
239278
#final = (0, train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, 1, self.x_control_test))
240279
else:
241280
final = binMaxVar(1,1000, minDI, maxacc, maxDI, self.params['var'])
242281
predicted = final[1]
282+
'''
283+
284+
if self.params['print'] == 1:
285+
printScores()
286+
287+
if self.data == "ricci":
288+
final = binMinMax(1,50,self.params['var'])
289+
else:
290+
final = binMinMax(1,1000,self.params['var'])
291+
292+
# predicted = train_classify(self.sensitive_attr, self.name, x_train_with_sensitive_feature, self.y_train, x_test_with_sensitive_feature, self.y_test, 1, 100, self.x_control_test)
243293

244-
kam_actual, kam_predicted, kam_protected = fixed_y_test, predicted, self.x_control_test[self.sensitive_attr]
294+
kam_actual, kam_predicted, kam_protected = fixed_y_test, final, self.x_control_test[self.sensitive_attr]
245295
kam_time = datetime.now() - startTime
246296

247297
return kam_actual, kam_predicted, kam_protected, kam_time

compare.txt

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 0.0, END OF Y_TRAIN
2+
0, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 0, 1, 0, 1, 0, END OF CONTROL

0 commit comments

Comments
 (0)