Donlapark commited on
Commit
7eded29
1 Parent(s): 74fdcd6

Upload 5 files

Browse files
Files changed (5) hide show
  1. NCD_synthetic.csv +501 -0
  2. README.md +44 -5
  3. app.py +614 -0
  4. configs.json +37 -0
  5. requirements.txt +1 -0
NCD_synthetic.csv ADDED
@@ -0,0 +1,501 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Id,AGE,DM_key,IFG_key,DM_ICD10,DM_drugs,Glucose,HbA1c,eGFR,HTN_key,cvd_key,HTN_ICD10,HTN_drugs,sbp1,dbp1,CKD_key,CKD_ICD10,CKD_drugs,DLP_key,DLP_ICD10,DLP_drugs,LDL-c,DM_label,HTN_label,CKD_label,DLP_label
2
+ 0,57,0,0,0,1,265,13,40,0,0,0,0,150,50,0,0,0,1,0,1,42,0,0,0,1
3
+ 1,35,1,0,1,1,210,7,52,0,0,0,0,100,80,0,0,0,0,0,0,70,1,0,0,0
4
+ 2,55,0,0,0,1,276,15,18,0,1,0,0,140,40,0,0,0,0,1,0,126,0,0,0,0
5
+ 3,74,0,1,0,1,14,16,86,1,1,1,0,120,70,0,0,1,0,0,0,162,0,1,0,0
6
+ 4,16,1,1,1,1,141,11,53,0,1,0,0,130,80,0,0,1,0,0,0,38,1,0,0,0
7
+ 5,17,0,0,0,1,216,8,18,0,0,0,0,100,70,0,0,0,0,0,0,13,0,0,0,0
8
+ 6,69,0,1,0,0,75,7,101,0,1,0,0,150,90,0,0,1,0,0,0,31,0,0,0,0
9
+ 7,40,0,0,0,1,79,10,83,0,0,0,0,150,60,0,0,1,0,0,0,171,0,0,0,0
10
+ 8,68,0,1,0,1,230,12,9,0,0,0,0,140,60,0,0,0,0,0,0,89,0,0,0,0
11
+ 9,80,0,1,0,0,104,14,46,0,1,0,1,140,70,0,0,0,0,0,0,94,0,0,0,0
12
+ 10,86,0,0,0,0,273,2,89,0,1,0,0,130,80,0,0,0,0,0,1,49,0,0,0,1
13
+ 11,11,0,0,0,0,6,2,20,0,1,0,0,80,70,0,1,1,0,0,1,175,0,0,0,1
14
+ 12,41,0,1,0,0,99,6,117,1,0,1,1,170,90,0,0,0,0,0,0,140,0,1,0,0
15
+ 13,21,0,0,0,1,77,7,23,0,1,1,0,130,90,0,0,0,0,0,0,118,0,1,0,0
16
+ 14,87,0,1,1,1,231,3,40,0,1,0,0,100,90,0,0,0,0,0,0,60,0,0,0,0
17
+ 15,41,0,1,0,0,56,0,49,0,1,0,0,150,80,0,0,0,0,0,1,44,0,0,0,0
18
+ 16,87,1,0,1,0,294,4,36,0,0,0,0,90,70,0,0,0,0,0,0,131,1,0,0,0
19
+ 17,68,0,0,0,0,281,14,48,0,0,0,0,130,60,1,0,1,0,0,0,157,0,0,1,0
20
+ 18,30,0,1,1,1,257,11,45,0,0,0,0,130,60,1,0,1,0,0,0,30,0,0,1,0
21
+ 19,23,0,0,0,0,68,4,100,0,0,1,0,120,70,0,0,0,0,0,0,20,0,1,0,0
22
+ 20,68,0,0,0,0,170,17,66,0,1,0,0,100,90,0,0,0,0,0,0,0,0,0,0,0
23
+ 21,34,0,1,1,0,149,17,35,0,1,0,0,140,60,0,0,0,0,0,0,123,0,0,0,0
24
+ 22,40,0,0,0,0,298,12,102,0,0,0,0,120,70,1,0,1,0,0,0,7,0,0,1,0
25
+ 23,48,0,0,1,0,20,10,8,0,1,0,0,150,40,0,0,1,0,0,0,54,0,0,0,0
26
+ 24,48,1,1,1,0,292,8,32,0,1,0,0,80,80,0,1,1,0,0,0,25,1,0,0,0
27
+ 25,83,0,1,0,0,206,5,34,0,1,0,0,130,90,0,0,0,0,0,0,183,0,0,0,0
28
+ 26,24,0,1,0,1,141,8,60,0,1,0,0,100,60,0,0,0,0,1,0,125,0,0,0,0
29
+ 27,12,1,0,1,0,26,1,19,0,1,0,0,150,80,0,0,0,0,1,1,50,1,0,0,0
30
+ 28,60,0,0,0,0,105,11,61,0,0,0,0,100,80,0,0,0,0,0,0,126,0,0,0,0
31
+ 29,28,0,1,0,0,318,13,88,0,0,0,0,80,60,0,0,0,0,0,0,8,0,0,0,0
32
+ 30,17,0,0,0,0,214,8,10,0,0,0,0,140,70,0,0,0,0,0,0,35,0,0,0,0
33
+ 31,85,0,1,0,0,219,10,21,0,1,0,0,120,70,0,0,0,0,0,0,133,0,0,0,0
34
+ 32,78,0,0,0,0,118,11,110,0,1,0,0,100,70,0,0,1,0,0,1,81,0,0,0,0
35
+ 33,50,0,1,0,0,71,13,12,0,1,0,0,80,90,0,0,0,0,0,0,64,0,0,0,0
36
+ 34,33,0,1,1,0,299,15,114,1,1,1,1,100,90,0,0,0,1,0,1,61,0,1,0,1
37
+ 35,59,1,0,1,0,17,15,121,0,0,0,0,110,80,0,0,0,1,0,1,15,1,0,0,1
38
+ 36,16,0,1,0,0,205,15,98,0,1,0,1,120,90,0,1,1,0,0,0,26,0,0,0,0
39
+ 37,4,0,1,0,0,57,7,64,0,1,0,1,90,90,0,0,0,1,0,1,63,0,0,0,1
40
+ 38,14,0,0,0,1,51,5,126,0,0,0,0,130,90,0,0,0,0,0,0,111,0,0,0,0
41
+ 39,18,0,0,0,0,140,4,22,0,0,0,0,100,90,0,0,0,0,0,0,101,0,0,0,0
42
+ 40,39,0,1,0,1,125,0,32,0,0,0,0,110,80,0,0,1,0,0,0,116,0,0,0,0
43
+ 41,64,0,0,0,0,92,15,114,0,1,0,0,140,90,0,0,0,0,0,0,24,0,0,0,0
44
+ 42,86,0,1,0,0,189,14,48,0,0,0,0,120,60,0,0,0,1,0,1,120,0,0,0,1
45
+ 43,5,0,0,0,0,241,2,42,0,1,0,0,130,70,0,0,0,0,0,0,108,0,0,0,0
46
+ 44,68,0,1,1,0,281,6,105,1,0,1,0,120,90,0,1,0,0,0,0,150,0,1,0,0
47
+ 45,50,0,1,0,0,295,4,34,0,1,0,0,120,80,0,0,0,0,0,1,130,0,0,0,0
48
+ 46,71,0,0,1,0,85,8,78,0,0,0,0,140,80,1,1,1,0,0,0,28,0,0,1,0
49
+ 47,67,0,0,0,0,250,15,87,0,1,1,0,130,70,1,0,1,0,0,0,85,0,1,1,0
50
+ 48,3,0,1,0,0,22,7,38,0,0,0,0,110,80,0,0,0,0,0,1,77,0,0,0,0
51
+ 49,71,0,1,0,1,214,3,26,0,0,0,0,140,60,1,1,1,0,0,0,32,0,0,1,0
52
+ 50,73,0,0,0,0,135,0,108,0,0,0,0,100,80,0,0,0,0,0,0,79,0,0,0,0
53
+ 51,71,0,0,0,0,168,8,84,0,0,0,0,110,90,0,0,0,0,0,1,124,0,0,0,1
54
+ 52,28,0,1,0,0,150,17,116,0,0,0,0,80,70,0,0,0,0,0,0,140,0,0,0,0
55
+ 53,30,0,1,0,0,261,10,30,0,0,1,1,130,90,0,0,0,0,0,1,132,0,1,0,0
56
+ 54,50,0,0,0,0,253,10,57,1,1,0,0,130,90,0,0,0,0,0,0,11,0,1,0,0
57
+ 55,29,0,1,0,0,84,7,125,0,1,0,0,80,90,0,0,1,0,0,0,23,0,0,0,0
58
+ 56,15,0,1,0,1,52,2,113,0,0,0,0,150,60,0,0,0,0,0,0,58,0,0,0,0
59
+ 57,5,0,0,0,0,247,16,129,0,1,0,1,150,60,0,0,0,0,0,0,45,0,0,0,0
60
+ 58,56,0,0,0,0,206,16,107,0,0,0,0,100,70,0,0,0,0,0,0,77,0,0,0,0
61
+ 59,22,0,0,0,0,220,0,20,0,1,0,0,130,90,0,0,0,0,0,0,161,0,0,0,0
62
+ 60,43,0,1,0,1,164,16,85,0,1,0,1,100,90,0,0,0,1,0,1,172,0,1,0,1
63
+ 61,2,0,1,0,0,284,2,105,0,0,0,0,130,60,1,1,1,0,0,0,61,0,0,1,0
64
+ 62,38,0,0,0,0,289,8,35,0,0,0,0,110,50,0,0,0,0,0,0,166,0,0,0,0
65
+ 63,11,0,0,0,1,0,16,92,0,1,0,0,120,70,0,0,0,0,0,0,154,0,0,0,0
66
+ 64,31,0,0,0,0,268,12,0,1,0,1,0,120,70,0,1,0,0,0,0,180,0,1,0,0
67
+ 65,6,0,1,0,0,201,11,65,0,0,0,0,120,80,0,0,1,1,0,1,88,0,0,0,1
68
+ 66,66,0,1,0,0,126,8,8,0,1,0,0,120,80,1,1,1,0,0,0,85,0,0,1,0
69
+ 67,84,1,0,1,0,2,10,70,0,0,0,1,100,90,0,0,0,1,0,1,181,1,0,0,1
70
+ 68,15,0,1,0,1,162,2,115,0,1,0,0,120,60,0,0,0,0,0,0,75,0,0,0,0
71
+ 69,10,0,1,0,1,182,6,52,0,0,0,0,140,90,0,0,0,0,0,0,93,0,0,0,0
72
+ 70,55,0,0,0,0,111,13,110,0,0,0,0,130,90,0,0,0,0,0,0,109,0,0,0,0
73
+ 71,69,0,1,0,1,288,11,24,0,0,0,0,140,80,0,0,1,0,0,1,116,0,0,0,1
74
+ 72,33,0,1,0,0,207,13,82,0,1,0,0,100,80,0,0,1,0,0,0,12,0,0,0,0
75
+ 73,17,1,1,1,1,60,12,100,0,0,0,0,130,70,0,0,1,0,0,0,33,1,1,0,0
76
+ 74,82,0,0,0,0,113,2,111,0,1,0,0,130,70,0,0,0,0,0,0,90,0,0,0,0
77
+ 75,50,0,1,0,0,61,8,54,0,0,0,0,110,70,0,1,0,0,0,0,64,0,0,0,0
78
+ 76,44,0,0,0,0,134,14,41,0,1,0,0,110,80,0,0,1,0,0,1,142,0,0,0,0
79
+ 77,56,0,0,0,0,61,0,21,0,1,0,1,100,40,0,0,0,0,0,0,169,0,0,0,0
80
+ 78,15,1,1,1,1,277,10,76,0,0,0,1,130,90,0,0,0,0,0,0,100,1,1,0,0
81
+ 79,13,0,1,0,0,114,7,33,0,0,0,0,130,70,0,0,1,0,0,0,30,0,0,0,0
82
+ 80,10,0,1,0,0,205,9,5,0,0,0,0,140,60,0,0,0,0,0,1,189,0,0,0,0
83
+ 81,10,0,1,0,0,274,17,26,0,1,0,0,100,60,0,0,0,1,0,1,62,0,0,0,1
84
+ 82,22,0,0,0,0,153,4,12,0,1,0,0,110,60,0,0,0,0,0,1,72,0,0,0,0
85
+ 83,27,0,1,0,0,208,11,19,0,0,0,0,80,80,0,0,0,0,0,0,136,0,0,0,0
86
+ 84,35,0,0,0,0,23,9,86,0,0,0,0,120,80,0,0,1,0,0,0,56,0,0,0,0
87
+ 85,33,0,0,1,0,212,14,57,0,1,1,0,130,90,0,0,0,0,0,0,120,0,1,0,0
88
+ 86,20,0,0,0,0,142,1,111,0,0,0,0,120,80,0,1,0,1,0,1,124,0,0,0,1
89
+ 87,7,0,0,0,0,275,6,72,0,1,0,1,140,90,0,0,0,0,0,0,171,0,0,0,0
90
+ 88,15,0,0,0,0,26,11,108,0,1,0,0,110,80,0,1,1,0,0,0,153,0,0,0,0
91
+ 89,86,0,1,0,0,192,6,6,0,0,0,0,130,60,0,0,0,0,0,0,116,0,0,0,0
92
+ 90,40,0,0,0,0,200,15,94,0,1,0,1,110,80,0,0,1,0,0,0,165,0,0,0,0
93
+ 91,25,0,0,0,0,139,11,50,1,1,0,0,130,90,0,0,0,0,0,0,179,0,1,0,0
94
+ 92,62,0,0,0,0,312,7,43,0,0,0,1,130,90,0,0,1,0,0,0,5,0,1,0,0
95
+ 93,12,0,1,0,0,10,13,14,0,0,0,0,140,50,0,0,1,0,0,0,6,0,0,0,0
96
+ 94,25,0,1,0,1,187,14,70,0,0,0,0,100,70,0,0,1,0,0,0,144,0,1,0,0
97
+ 95,53,0,1,0,0,85,6,84,0,0,0,0,130,70,0,0,0,0,0,0,169,0,0,0,0
98
+ 96,70,0,0,0,0,121,12,13,0,1,0,0,140,70,0,0,1,0,0,0,23,0,0,0,0
99
+ 97,54,0,1,0,0,197,1,24,0,0,0,0,110,40,0,0,0,1,0,1,107,0,0,0,1
100
+ 98,58,0,0,0,0,197,16,32,0,0,0,0,150,90,0,0,0,0,0,0,63,0,0,0,0
101
+ 99,31,0,0,0,0,167,14,111,0,0,0,0,110,50,0,0,0,0,0,0,55,0,0,0,0
102
+ 100,65,0,1,0,0,204,12,70,0,1,0,0,130,90,0,0,1,0,0,1,75,,,,
103
+ 101,46,0,0,0,1,41,17,121,0,0,0,0,150,60,0,0,0,0,0,0,120,,,,
104
+ 102,75,0,0,0,0,191,8,121,1,0,0,0,120,90,0,0,0,1,0,1,119,,,,
105
+ 103,27,0,0,0,0,52,13,31,0,1,0,0,130,60,0,0,1,0,0,1,128,,,,
106
+ 104,12,1,1,1,1,158,3,65,0,1,0,0,110,60,0,1,0,0,0,0,29,,,,
107
+ 105,33,0,0,0,1,136,10,81,1,1,1,0,120,90,0,0,0,0,0,0,122,,,,
108
+ 106,24,0,1,0,1,209,14,106,0,0,0,0,130,80,0,0,0,0,0,0,109,,,,
109
+ 107,13,0,0,0,0,70,4,63,0,0,0,0,110,90,0,0,0,0,0,1,187,,,,
110
+ 108,64,0,0,0,0,81,1,109,0,0,0,0,80,50,0,1,0,0,0,0,37,,,,
111
+ 109,5,0,0,0,0,257,7,110,0,0,0,0,120,80,0,1,0,0,0,1,188,,,,
112
+ 110,60,0,0,0,0,263,9,29,0,1,0,0,140,80,0,0,0,0,0,0,137,,,,
113
+ 111,36,0,1,0,0,82,3,121,0,1,0,0,120,50,0,0,0,0,0,0,41,,,,
114
+ 112,49,0,0,0,1,65,8,124,0,1,0,0,100,40,1,1,1,0,0,0,156,,,,
115
+ 113,77,0,1,0,0,135,1,24,0,1,0,0,110,80,0,0,1,0,0,0,15,,,,
116
+ 114,58,0,0,0,0,217,2,33,0,0,0,0,110,90,0,0,0,0,0,1,74,,,,
117
+ 115,36,0,0,0,0,86,1,120,0,1,0,0,120,60,0,0,0,0,1,0,85,,,,
118
+ 116,4,0,1,0,0,95,6,96,0,0,0,0,110,60,0,0,0,0,0,0,173,,,,
119
+ 117,60,0,0,0,0,305,15,122,0,0,0,0,90,80,0,0,0,0,0,1,71,,,,
120
+ 118,36,0,0,0,0,307,6,79,0,1,0,0,100,90,0,0,0,0,0,0,37,,,,
121
+ 119,74,0,0,1,1,66,15,92,0,1,0,0,100,80,0,0,0,0,0,0,11,,,,
122
+ 120,23,1,0,1,1,269,17,64,0,1,0,0,120,70,0,0,1,0,0,0,75,,,,
123
+ 121,44,0,0,0,0,45,17,113,0,0,0,1,110,80,0,0,0,0,0,1,14,,,,
124
+ 122,81,0,0,1,0,97,11,47,1,0,1,1,120,70,0,0,0,0,0,0,167,,,,
125
+ 123,37,0,1,0,1,53,2,103,0,1,0,0,150,80,0,0,1,0,0,0,55,,,,
126
+ 124,5,0,0,0,0,133,13,73,0,1,0,0,130,80,0,0,0,0,0,0,98,,,,
127
+ 125,48,0,1,0,0,231,12,123,0,0,0,0,100,70,0,0,0,0,0,0,139,,,,
128
+ 126,31,0,0,0,0,267,9,83,0,1,0,0,120,90,0,0,1,0,0,0,135,,,,
129
+ 127,25,0,1,0,1,46,17,5,0,1,0,0,150,60,1,1,1,0,0,0,163,,,,
130
+ 128,17,1,1,1,1,223,5,32,1,0,0,1,120,90,0,0,1,0,0,0,36,,,,
131
+ 129,49,0,1,0,0,226,9,51,0,0,0,0,150,90,0,0,1,0,0,0,162,,,,
132
+ 130,70,1,1,1,1,315,9,2,0,0,0,0,140,70,0,0,0,0,0,1,21,,,,
133
+ 131,1,0,1,0,0,32,16,59,0,1,0,0,120,60,0,0,1,0,0,0,111,,,,
134
+ 132,59,0,0,0,1,251,5,116,0,0,0,0,100,70,0,0,1,0,0,0,96,,,,
135
+ 133,82,1,1,1,0,42,2,85,0,0,0,0,120,70,0,0,0,0,0,1,5,,,,
136
+ 134,55,0,0,0,0,211,14,119,0,0,0,1,120,90,0,0,0,0,0,0,80,,,,
137
+ 135,46,0,1,1,0,171,5,5,0,0,0,1,140,80,0,0,0,1,0,1,47,,,,
138
+ 136,66,0,0,0,0,291,1,96,0,1,0,0,100,40,0,0,0,0,0,0,104,,,,
139
+ 137,65,0,1,0,0,168,14,65,0,1,0,0,150,70,0,0,0,0,0,0,192,,,,
140
+ 138,38,0,1,0,0,202,10,71,0,1,0,0,120,70,0,0,0,0,1,0,84,,,,
141
+ 139,58,0,0,0,1,296,4,57,0,0,0,0,110,90,0,0,0,0,0,0,0,,,,
142
+ 140,37,0,0,0,0,78,1,52,0,1,0,0,80,90,0,0,0,0,0,0,108,,,,
143
+ 141,20,0,1,0,0,18,8,75,0,1,0,0,100,80,0,0,0,0,0,0,78,,,,
144
+ 142,42,0,1,0,1,31,4,86,0,0,0,0,80,80,0,0,0,0,0,0,80,,,,
145
+ 143,77,0,0,0,0,112,13,117,0,0,0,0,80,80,0,0,0,0,0,0,45,,,,
146
+ 144,16,0,0,0,0,139,8,50,0,1,0,0,150,90,0,0,0,0,0,0,51,,,,
147
+ 145,34,0,0,0,0,1,17,54,0,0,0,0,140,90,0,0,0,0,0,1,19,,,,
148
+ 146,22,1,0,1,1,112,3,94,1,1,0,0,130,70,0,0,0,0,0,0,51,,,,
149
+ 147,6,0,1,0,0,270,17,68,1,1,1,0,120,90,0,0,0,0,0,0,172,,,,
150
+ 148,34,0,1,0,0,292,12,15,0,1,0,0,100,60,0,0,1,0,0,0,49,,,,
151
+ 149,49,0,1,0,0,154,13,123,1,0,0,0,130,90,0,0,0,0,0,1,96,,,,
152
+ 150,28,0,1,0,1,169,0,103,1,1,1,1,120,70,0,0,0,0,1,0,9,,,,
153
+ 151,47,0,1,0,0,72,6,2,0,0,0,0,150,80,0,0,0,0,0,0,73,,,,
154
+ 152,21,0,1,0,0,35,5,15,0,0,0,0,140,70,0,0,0,1,0,1,190,,,,
155
+ 153,52,1,1,1,1,100,8,95,0,0,0,0,150,80,0,0,0,0,0,0,149,,,,
156
+ 154,1,0,0,0,0,270,0,3,0,1,0,0,100,50,0,0,0,0,0,0,16,,,,
157
+ 155,32,0,0,0,0,196,6,46,0,1,0,0,120,60,0,0,1,0,0,1,134,,,,
158
+ 156,33,0,0,0,0,43,13,38,0,0,0,1,130,80,0,0,0,0,0,0,20,,,,
159
+ 157,6,0,0,0,0,200,16,49,0,1,0,0,120,80,0,0,0,0,1,0,114,,,,
160
+ 158,67,0,0,0,0,310,17,58,0,0,0,0,130,60,0,1,1,0,1,0,167,,,,
161
+ 159,79,0,0,0,0,166,14,125,0,1,0,0,100,70,0,0,0,0,0,0,113,,,,
162
+ 160,20,0,1,0,0,173,10,101,0,0,0,0,130,60,0,0,0,0,0,0,2,,,,
163
+ 161,21,0,1,0,1,249,4,123,0,1,0,0,130,80,0,0,0,0,0,0,62,,,,
164
+ 162,65,0,0,0,0,130,5,41,0,0,0,0,140,50,0,1,0,0,0,0,175,,,,
165
+ 163,30,1,1,1,0,240,3,63,1,0,0,1,120,90,0,0,1,0,1,0,42,,,,
166
+ 164,45,0,1,0,0,199,2,42,0,1,0,0,150,90,0,1,0,0,0,0,34,,,,
167
+ 165,14,0,0,0,0,313,7,39,0,1,0,0,100,70,0,0,1,0,0,0,57,,,,
168
+ 166,80,0,0,0,0,178,10,60,0,0,0,0,130,70,0,0,0,0,0,0,94,,,,
169
+ 167,28,0,0,0,1,42,12,51,0,1,0,0,120,80,0,0,0,0,0,0,161,,,,
170
+ 168,54,0,0,1,1,156,8,17,0,1,0,0,150,80,0,0,0,0,0,0,40,,,,
171
+ 169,56,0,0,0,0,122,13,119,0,0,0,0,150,40,1,0,1,0,0,0,47,,,,
172
+ 170,64,0,1,0,0,8,11,21,0,1,0,1,120,80,0,0,0,0,0,0,156,,,,
173
+ 171,47,0,1,0,0,299,12,92,0,1,0,0,120,90,0,0,0,0,0,0,170,,,,
174
+ 172,3,1,1,1,1,308,0,40,0,1,0,0,120,40,0,0,0,0,0,1,143,,,,
175
+ 173,43,0,0,0,0,93,7,99,0,1,0,0,130,70,0,0,0,0,0,0,95,,,,
176
+ 174,37,0,1,0,0,19,9,3,0,0,0,0,80,40,0,0,0,1,0,1,117,,,,
177
+ 175,43,0,0,0,1,115,5,16,0,0,0,0,140,90,0,0,0,0,0,1,159,,,,
178
+ 176,34,0,1,0,0,318,15,17,0,1,1,0,120,90,0,0,0,0,0,1,93,,,,
179
+ 177,59,0,1,0,0,225,9,62,0,1,0,0,150,80,0,0,1,0,0,0,92,,,,
180
+ 178,31,0,0,0,0,317,9,25,0,0,0,0,120,80,0,0,0,0,0,0,62,,,,
181
+ 179,45,0,1,0,0,66,6,55,0,0,0,0,100,60,0,1,1,0,0,1,17,,,,
182
+ 180,7,0,0,0,0,58,11,4,1,1,1,1,100,90,0,0,0,0,0,0,112,,,,
183
+ 181,10,0,1,1,0,236,7,52,0,1,0,0,120,50,0,0,0,0,0,0,97,,,,
184
+ 182,23,0,1,0,0,176,9,87,0,1,0,0,120,70,0,0,0,0,0,0,180,,,,
185
+ 183,42,0,1,0,0,74,5,102,1,1,0,1,120,70,0,0,0,0,0,0,148,,,,
186
+ 184,87,0,0,0,0,45,3,112,0,0,0,0,120,40,0,0,0,0,0,0,87,,,,
187
+ 185,67,0,0,0,0,48,2,77,0,0,0,0,140,60,0,0,0,0,0,0,164,,,,
188
+ 186,56,0,1,0,0,44,2,92,1,1,0,1,120,70,1,0,1,0,0,0,42,,,,
189
+ 187,12,0,1,0,0,227,13,88,0,1,0,0,130,50,0,0,1,0,0,1,68,,,,
190
+ 188,74,0,0,0,0,101,4,74,0,0,0,0,80,80,0,0,0,0,0,0,128,,,,
191
+ 189,50,0,1,0,0,69,4,56,0,1,0,0,130,80,0,0,1,0,0,0,178,,,,
192
+ 190,7,0,0,0,0,244,13,74,0,1,0,0,110,80,0,0,0,0,0,0,46,,,,
193
+ 191,66,1,1,1,1,98,15,45,0,1,0,0,130,40,0,0,1,0,0,0,55,,,,
194
+ 192,17,0,1,0,0,264,7,85,0,0,0,0,110,90,1,0,1,0,1,0,187,,,,
195
+ 193,13,0,1,0,0,242,11,83,0,1,0,0,120,40,0,0,1,0,0,0,63,,,,
196
+ 194,40,0,0,0,1,266,15,25,0,0,0,0,130,40,0,0,0,0,0,0,43,,,,
197
+ 195,3,0,1,0,1,133,15,15,0,1,0,0,120,80,0,0,0,1,0,1,182,,,,
198
+ 196,52,0,0,0,0,140,0,6,0,1,0,0,100,60,0,0,0,0,0,0,12,,,,
199
+ 197,4,0,1,0,0,304,15,71,0,0,0,0,150,80,0,0,0,0,0,0,58,,,,
200
+ 198,48,0,0,0,0,314,0,78,0,0,0,0,120,50,0,0,0,0,0,0,10,,,,
201
+ 199,23,1,1,1,1,284,12,88,0,1,0,0,150,70,0,1,1,0,0,0,39,,,,
202
+ 200,26,0,1,0,0,145,9,7,1,0,1,0,100,70,0,0,1,0,0,1,176,,,,
203
+ 201,41,0,0,0,1,137,5,28,1,0,0,1,130,90,0,0,0,0,0,1,82,,,,
204
+ 202,11,0,1,0,0,40,2,33,0,1,0,0,110,80,0,0,0,0,0,0,123,,,,
205
+ 203,72,0,0,0,0,254,3,112,0,0,0,0,130,90,0,0,0,0,0,0,184,,,,
206
+ 204,70,0,0,0,0,12,10,54,1,0,0,0,120,90,0,0,0,0,0,0,178,,,,
207
+ 205,59,0,0,1,0,278,4,101,0,1,0,0,140,80,1,0,1,1,0,1,68,,,,
208
+ 206,36,0,0,0,0,179,15,105,0,1,0,0,120,90,0,0,0,0,0,0,3,,,,
209
+ 207,12,0,1,0,0,238,8,63,1,1,1,1,130,90,0,0,0,0,0,0,71,,,,
210
+ 208,83,0,0,0,0,234,1,53,1,1,0,1,130,90,0,0,0,0,0,0,99,,,,
211
+ 209,63,0,0,0,0,227,4,106,0,1,0,0,140,70,0,0,1,0,1,0,52,,,,
212
+ 210,6,0,0,0,0,291,1,73,0,0,0,1,110,60,0,0,0,0,0,1,131,,,,
213
+ 211,8,1,0,1,1,204,1,79,0,0,0,0,90,80,0,0,0,0,0,0,69,,,,
214
+ 212,87,0,0,0,1,11,12,115,0,1,0,0,120,60,0,0,0,0,1,1,152,,,,
215
+ 213,57,0,0,0,0,130,11,22,0,1,0,0,130,80,0,0,1,0,0,0,47,,,,
216
+ 214,9,0,0,0,0,233,9,128,0,0,0,0,150,80,0,0,0,0,0,0,181,,,,
217
+ 215,44,0,1,0,0,193,14,69,0,1,0,0,100,90,0,0,0,0,0,0,190,,,,
218
+ 216,63,0,0,0,0,192,12,100,0,1,0,0,100,70,0,0,0,0,0,0,46,,,,
219
+ 217,24,0,1,0,0,110,0,79,0,0,0,0,110,90,0,0,0,1,0,1,49,,,,
220
+ 218,39,0,1,0,0,263,11,45,0,1,0,0,140,40,0,0,0,0,0,0,143,,,,
221
+ 219,74,0,0,0,0,315,16,119,0,1,0,1,100,60,0,0,1,0,1,0,102,,,,
222
+ 220,26,0,1,0,0,245,12,96,0,0,0,0,90,50,0,0,1,0,0,0,194,,,,
223
+ 221,15,0,1,0,0,283,3,72,0,1,0,0,80,70,0,0,1,0,0,0,78,,,,
224
+ 222,65,0,1,0,0,172,11,70,0,1,0,0,130,60,0,0,1,0,0,0,82,,,,
225
+ 223,11,0,0,0,1,184,17,67,0,1,0,0,150,80,0,1,1,0,0,0,132,,,,
226
+ 224,25,0,0,0,0,97,1,47,0,1,0,0,150,70,1,1,1,0,1,0,160,,,,
227
+ 225,78,0,1,0,0,317,9,102,0,0,0,0,140,90,0,0,0,0,0,0,177,,,,
228
+ 226,35,0,1,0,0,267,3,73,0,0,0,0,120,60,0,0,0,0,0,0,135,,,,
229
+ 227,61,0,1,0,0,93,12,54,0,0,0,0,130,70,0,0,0,0,0,0,8,,,,
230
+ 228,8,0,1,0,1,33,9,0,1,1,1,1,120,90,0,0,1,0,0,0,173,,,,
231
+ 229,54,0,1,0,0,136,9,127,0,0,0,0,150,80,0,0,0,0,0,1,162,,,,
232
+ 230,47,0,0,0,1,304,6,126,0,0,0,0,130,80,0,0,0,0,0,0,102,,,,
233
+ 231,29,0,1,0,0,175,16,76,0,0,0,0,100,80,0,0,0,0,0,0,7,,,,
234
+ 232,15,0,0,0,0,132,17,0,0,1,0,0,150,70,1,1,1,0,1,0,6,,,,
235
+ 233,67,0,1,0,1,132,5,29,0,0,0,0,150,80,0,0,0,0,0,0,122,,,,
236
+ 234,75,0,0,0,0,148,15,81,1,1,0,1,130,90,0,0,1,0,0,0,87,,,,
237
+ 235,52,0,0,0,0,119,11,109,0,1,0,0,130,80,0,0,0,0,0,0,91,,,,
238
+ 236,65,0,0,0,0,180,2,14,0,0,0,0,100,90,0,0,0,0,0,0,137,,,,
239
+ 237,32,0,0,0,0,159,0,106,0,1,0,0,90,50,0,0,0,0,1,1,114,,,,
240
+ 238,26,0,0,0,0,43,2,22,0,1,0,0,150,40,0,0,1,0,0,0,117,,,,
241
+ 239,79,0,1,1,0,170,5,11,0,0,0,0,150,80,0,0,1,0,0,1,193,,,,
242
+ 240,39,0,1,0,0,89,17,116,0,0,0,0,120,40,0,0,0,0,1,1,48,,,,
243
+ 241,75,0,0,0,0,173,0,11,0,0,0,0,100,80,0,0,0,0,0,0,189,,,,
244
+ 242,1,0,0,0,0,111,3,42,0,1,0,0,100,70,0,0,0,0,0,1,141,,,,
245
+ 243,9,1,0,1,1,301,3,8,0,1,0,0,120,80,0,0,0,0,0,1,57,,,,
246
+ 244,19,1,1,1,1,282,14,38,0,1,0,0,110,90,0,0,0,0,0,0,188,,,,
247
+ 245,46,0,1,0,0,108,7,78,0,1,0,0,150,40,0,0,0,0,0,0,147,,,,
248
+ 246,69,0,1,0,0,31,17,61,0,0,0,0,140,50,0,0,0,0,0,0,112,,,,
249
+ 247,43,1,0,1,1,0,17,127,0,0,0,0,150,80,0,0,0,1,0,1,180,,,,
250
+ 248,24,0,1,0,1,152,12,98,0,1,0,0,120,50,0,0,1,0,0,0,163,,,,
251
+ 249,51,0,0,0,0,39,17,37,0,0,0,0,120,60,0,0,0,0,0,1,155,,,,
252
+ 250,38,0,0,0,0,94,1,104,0,1,0,1,150,90,0,0,0,0,0,0,176,,,,
253
+ 251,31,0,0,0,0,300,15,13,0,0,0,0,110,70,0,0,1,1,0,1,69,,,,
254
+ 252,20,0,1,0,0,24,14,90,0,0,0,1,150,70,0,0,0,0,0,0,1,,,,
255
+ 253,59,0,1,0,0,116,1,6,0,0,0,0,110,80,0,0,0,0,0,0,159,,,,
256
+ 254,32,0,1,0,0,183,11,117,0,1,0,0,140,40,0,0,0,0,0,0,60,,,,
257
+ 255,81,0,1,0,0,161,11,94,0,1,0,0,120,60,0,0,0,0,0,0,22,,,,
258
+ 256,7,0,0,0,1,44,13,25,0,1,0,1,80,90,0,0,0,0,0,1,36,,,,
259
+ 257,36,0,1,0,0,143,11,65,0,0,0,0,140,90,1,0,1,0,0,0,136,,,,
260
+ 258,72,0,1,0,1,280,9,25,0,0,0,0,120,90,0,0,1,0,0,0,170,,,,
261
+ 259,60,0,1,0,0,293,17,89,0,0,0,0,150,90,0,0,1,0,0,0,133,,,,
262
+ 260,77,0,1,0,0,232,1,60,0,0,0,0,110,80,0,0,1,0,0,0,193,,,,
263
+ 261,26,0,1,0,1,37,2,19,0,0,0,0,80,70,0,0,0,0,0,0,26,,,,
264
+ 262,3,0,0,0,0,228,6,82,0,0,0,0,120,60,0,0,0,0,0,0,124,,,,
265
+ 263,80,0,0,0,0,90,3,127,0,0,0,0,150,80,0,0,1,1,0,1,129,,,,
266
+ 264,5,0,1,0,0,75,10,105,1,1,1,0,130,70,0,0,0,0,0,0,113,,,,
267
+ 265,16,0,0,0,0,213,16,30,0,0,0,1,140,40,0,0,0,0,0,1,138,,,,
268
+ 266,46,0,0,0,0,102,5,43,0,1,0,0,80,90,0,0,0,0,1,0,3,,,,
269
+ 267,77,1,0,1,1,218,0,21,0,1,0,0,100,50,0,0,0,0,0,0,185,,,,
270
+ 268,57,0,0,0,0,17,3,51,0,0,0,0,100,60,0,0,0,0,0,0,194,,,,
271
+ 269,18,0,1,0,1,71,4,59,0,0,0,0,140,60,0,0,1,1,0,1,83,,,,
272
+ 270,4,1,1,1,0,55,0,126,0,0,0,1,120,80,0,0,0,0,0,1,106,,,,
273
+ 271,69,1,0,1,1,106,8,18,0,1,0,1,130,70,0,0,0,0,0,0,154,,,,
274
+ 272,42,0,0,0,0,245,12,89,0,0,0,0,140,80,1,1,1,0,0,0,13,,,,
275
+ 273,9,0,0,0,0,232,1,66,0,0,0,0,150,70,0,0,1,0,0,0,104,,,,
276
+ 274,73,0,0,0,0,91,5,87,0,1,0,0,140,70,0,0,0,0,0,0,65,,,,
277
+ 275,65,0,1,0,0,18,7,98,0,1,0,0,140,90,0,1,0,0,0,1,1,,,,
278
+ 276,85,0,1,0,0,190,1,11,0,1,0,0,150,90,0,0,0,0,0,1,27,,,,
279
+ 277,6,0,0,0,0,246,6,35,0,0,0,0,150,90,0,0,0,0,0,0,18,,,,
280
+ 278,77,1,0,1,0,184,6,75,0,1,1,1,120,90,0,0,0,0,0,0,152,,,,
281
+ 279,67,0,0,1,1,155,3,117,0,0,0,0,150,70,0,0,0,0,0,1,70,,,,
282
+ 280,76,1,0,1,0,282,7,37,0,0,0,0,140,80,0,0,0,0,0,0,4,,,,
283
+ 281,67,0,0,0,0,169,10,95,0,0,0,0,100,70,0,0,0,0,0,0,110,,,,
284
+ 282,40,0,1,0,0,37,12,57,0,0,0,0,140,90,0,0,0,0,0,0,191,,,,
285
+ 283,38,0,1,0,0,96,5,84,1,1,0,0,120,90,0,0,0,0,0,0,41,,,,
286
+ 284,19,0,1,1,0,30,1,34,0,0,0,0,140,90,0,0,0,0,0,1,72,,,,
287
+ 285,78,1,0,1,1,127,17,50,0,1,0,0,140,90,0,0,0,0,0,0,51,,,,
288
+ 286,26,0,1,0,0,256,2,73,0,0,0,1,100,70,0,0,0,0,0,0,167,,,,
289
+ 287,21,0,0,1,0,181,7,19,0,0,0,0,80,70,0,0,0,0,0,0,52,,,,
290
+ 288,66,0,1,0,0,244,14,84,0,0,0,0,140,80,0,0,0,0,0,1,16,,,,
291
+ 289,48,0,0,0,0,8,6,17,0,1,0,0,120,40,0,0,0,0,0,0,113,,,,
292
+ 290,32,0,0,0,0,57,8,55,0,1,0,0,150,70,0,0,0,0,0,0,27,,,,
293
+ 291,82,0,1,0,0,251,5,31,0,0,0,0,80,60,0,0,0,0,0,1,147,,,,
294
+ 292,62,0,1,0,0,222,13,9,0,1,0,0,140,40,0,0,1,0,0,0,100,,,,
295
+ 293,61,1,0,1,1,165,0,9,0,0,0,0,130,90,0,0,0,1,0,1,157,,,,
296
+ 294,80,0,0,0,0,67,17,66,1,0,0,0,130,90,0,0,0,0,0,0,103,,,,
297
+ 295,33,0,0,0,0,14,0,64,0,0,0,1,110,60,0,0,0,0,0,0,174,,,,
298
+ 296,29,0,1,0,0,229,9,95,0,1,0,0,110,80,0,0,0,0,0,1,27,,,,
299
+ 297,14,0,0,0,1,59,5,118,0,1,0,0,100,40,0,0,0,0,0,0,14,,,,
300
+ 298,53,0,1,0,0,233,3,39,0,1,0,0,100,70,0,0,0,0,0,0,4,,,,
301
+ 299,22,0,1,0,0,33,13,16,0,1,0,0,100,40,0,0,0,0,0,0,86,,,,
302
+ 300,8,0,1,0,0,210,0,24,1,1,1,1,120,90,0,0,0,0,0,0,171,,,,
303
+ 301,18,0,0,0,1,222,0,86,0,0,0,0,140,80,0,0,0,1,0,0,58,,,,
304
+ 302,45,0,0,0,0,9,15,79,0,1,0,0,140,60,0,0,0,1,0,1,156,,,,
305
+ 303,36,0,1,0,0,34,14,58,0,1,0,0,140,80,0,0,0,0,0,0,29,,,,
306
+ 304,62,0,1,0,1,80,13,71,1,0,0,0,130,90,0,0,0,0,0,1,19,,,,
307
+ 305,85,0,1,0,0,215,14,128,1,0,0,1,130,90,0,0,0,0,0,0,185,,,,
308
+ 306,83,0,0,0,0,117,0,33,0,0,0,0,150,70,0,0,0,0,0,0,35,,,,
309
+ 307,45,0,1,0,0,137,8,23,0,1,0,0,120,80,0,0,0,0,0,0,76,,,,
310
+ 308,63,0,1,0,0,221,3,37,0,0,0,0,140,60,0,0,1,0,0,1,70,,,,
311
+ 309,35,0,1,0,0,220,13,103,0,0,0,0,150,70,0,0,0,0,0,0,103,,,,
312
+ 310,75,1,1,1,0,286,14,128,1,0,1,0,100,70,0,0,0,0,0,0,95,,,,
313
+ 311,68,0,1,0,0,272,1,0,0,0,0,0,150,90,0,0,0,0,0,0,133,,,,
314
+ 312,43,0,0,0,0,186,3,125,0,1,1,0,130,90,0,0,0,0,0,0,158,,,,
315
+ 313,4,0,1,0,0,129,7,108,0,0,0,0,150,90,0,0,1,0,0,0,174,,,,
316
+ 314,49,1,0,1,1,302,8,27,0,0,0,0,120,70,0,0,0,1,0,1,105,,,,
317
+ 315,14,1,1,1,1,23,14,44,1,1,1,0,120,70,0,0,1,0,0,0,8,,,,
318
+ 316,16,0,1,0,1,285,3,6,0,0,0,0,130,60,0,0,0,0,0,0,185,,,,
319
+ 317,46,0,0,1,0,87,15,124,0,1,0,0,100,80,0,1,0,0,0,1,184,,,,
320
+ 318,39,0,0,0,0,56,6,120,0,1,0,0,110,70,0,0,1,0,0,0,145,,,,
321
+ 319,72,0,0,0,0,241,14,110,0,0,0,0,80,70,0,0,1,0,0,0,38,,,,
322
+ 320,64,0,0,0,1,146,6,75,0,0,0,1,120,70,0,0,0,0,0,0,53,,,,
323
+ 321,60,0,0,0,0,259,7,10,0,0,0,0,150,90,0,0,0,0,0,0,95,,,,
324
+ 322,0,0,1,0,0,58,8,104,0,1,0,0,130,80,1,1,1,0,0,0,184,,,,
325
+ 323,76,0,1,0,1,13,2,3,1,1,0,1,100,90,0,0,0,0,0,0,160,,,,
326
+ 324,23,0,0,0,0,90,10,46,0,0,0,0,150,60,0,0,1,0,0,0,139,,,,
327
+ 325,75,0,1,0,0,188,8,112,0,1,0,0,110,60,0,0,0,0,0,0,56,,,,
328
+ 326,37,0,0,0,0,114,6,104,0,1,0,0,140,60,0,0,1,0,0,0,97,,,,
329
+ 327,83,0,0,0,0,235,6,128,1,1,1,0,120,90,0,0,0,0,0,0,0,,,,
330
+ 328,30,0,0,0,0,249,3,38,0,1,0,0,110,80,0,0,0,0,0,0,59,,,,
331
+ 329,24,0,0,0,0,161,6,80,1,0,0,1,130,70,0,0,0,1,0,1,31,,,,
332
+ 330,80,0,1,0,0,223,10,58,0,1,0,0,110,60,0,0,0,0,0,0,90,,,,
333
+ 331,2,0,1,0,0,50,16,13,0,0,0,0,100,50,0,0,0,0,0,0,115,,,,
334
+ 332,12,0,0,1,0,143,13,107,0,0,0,0,90,60,0,0,1,0,0,1,64,,,,
335
+ 333,63,0,1,0,1,216,2,60,0,0,0,0,100,90,0,0,0,0,0,0,18,,,,
336
+ 334,54,0,0,0,0,309,17,93,0,1,0,0,110,80,0,0,0,0,1,0,157,,,,
337
+ 335,28,0,0,0,0,254,16,91,0,1,0,0,80,70,0,0,0,0,0,0,66,,,,
338
+ 336,66,0,0,0,0,269,14,97,0,1,0,0,80,40,0,0,0,0,1,0,174,,,,
339
+ 337,21,0,1,0,0,81,17,48,0,1,0,0,140,60,0,0,1,0,0,0,50,,,,
340
+ 338,78,0,1,0,0,295,1,109,0,1,0,0,100,60,1,1,1,0,0,0,32,,,,
341
+ 339,82,0,1,0,0,293,17,41,0,1,0,0,120,90,0,0,0,0,0,0,28,,,,
342
+ 340,61,0,0,0,0,252,7,4,0,1,0,0,140,40,0,0,1,0,0,0,74,,,,
343
+ 341,37,0,0,0,0,103,11,62,0,0,0,0,100,80,0,0,0,0,0,0,69,,,,
344
+ 342,18,0,0,0,0,49,4,27,0,1,0,0,150,60,1,0,1,0,0,1,179,,,,
345
+ 343,17,0,0,0,0,181,4,48,0,1,0,1,140,70,0,0,0,0,0,0,35,,,,
346
+ 344,49,0,1,0,0,242,0,67,0,0,1,1,100,90,0,0,0,0,0,0,127,,,,
347
+ 345,53,0,0,0,0,25,16,91,0,1,0,0,120,80,0,0,0,0,0,0,89,,,,
348
+ 346,7,0,1,0,0,32,0,41,0,1,0,0,150,80,1,1,1,0,0,0,30,,,,
349
+ 347,80,0,0,0,0,297,8,123,0,0,0,0,120,70,0,0,1,0,0,0,46,,,,
350
+ 348,48,0,0,0,0,247,17,129,0,1,0,0,150,70,0,0,0,0,0,1,193,,,,
351
+ 349,27,0,0,0,0,49,14,120,0,1,0,0,120,60,0,0,1,0,0,0,115,,,,
352
+ 350,21,0,1,0,0,264,4,27,0,1,0,0,150,70,0,0,0,0,0,0,101,,,,
353
+ 351,78,0,1,0,0,3,6,129,1,0,1,0,150,90,0,0,0,0,0,0,135,,,,
354
+ 352,1,0,0,0,1,189,14,55,0,1,0,0,130,70,0,0,0,0,0,0,1,,,,
355
+ 353,30,0,0,0,1,27,15,62,0,1,0,0,150,70,0,0,0,0,0,1,67,,,,
356
+ 354,35,0,0,0,0,88,11,106,0,0,0,0,140,60,0,0,0,0,0,0,83,,,,
357
+ 355,22,0,1,0,0,63,10,36,0,1,0,0,130,80,0,0,0,0,0,0,176,,,,
358
+ 356,9,0,0,0,0,182,16,47,0,0,0,0,110,90,0,0,1,0,0,1,60,,,,
359
+ 357,19,0,0,1,0,149,2,63,0,0,0,0,150,90,0,0,0,0,1,0,108,,,,
360
+ 358,84,0,0,0,0,151,16,39,1,1,1,1,120,90,0,0,0,0,1,1,40,,,,
361
+ 359,72,0,1,0,0,177,12,49,0,1,1,1,130,70,0,0,1,0,0,0,132,,,,
362
+ 360,54,0,1,0,0,109,12,122,0,1,0,0,90,80,0,0,0,1,0,1,137,,,,
363
+ 361,64,0,1,0,0,186,10,46,1,0,1,1,130,70,0,0,0,0,0,0,105,,,,
364
+ 362,69,0,0,0,0,76,4,67,0,1,0,0,100,60,0,0,0,0,0,1,121,,,,
365
+ 363,6,0,1,0,0,243,1,77,0,1,0,0,150,70,0,0,1,0,0,0,59,,,,
366
+ 364,7,0,1,0,0,2,5,81,0,0,0,0,120,80,0,0,1,0,0,0,175,,,,
367
+ 365,52,0,1,0,0,153,1,15,0,1,1,0,120,90,0,0,0,1,0,1,119,,,,
368
+ 366,16,0,0,0,0,302,11,29,0,0,0,0,110,40,0,0,0,0,1,0,107,,,,
369
+ 367,20,1,1,1,1,237,14,113,0,0,0,0,120,60,0,0,0,0,0,1,106,,,,
370
+ 368,27,0,0,0,0,239,4,10,0,1,0,0,110,90,0,0,0,0,0,0,131,,,,
371
+ 369,55,0,1,0,0,128,7,64,1,1,0,1,120,70,0,0,0,0,0,0,29,,,,
372
+ 370,9,0,1,0,0,4,13,2,0,0,0,1,130,70,0,0,1,0,0,0,9,,,,
373
+ 371,54,0,1,1,0,203,12,91,0,0,0,0,130,80,0,0,0,0,0,0,66,,,,
374
+ 372,86,0,1,0,0,157,13,68,1,0,1,0,120,70,0,0,0,0,0,0,127,,,,
375
+ 373,66,0,0,0,0,157,1,23,0,1,0,0,120,90,0,0,0,0,0,1,118,,,,
376
+ 374,42,0,1,0,0,235,10,50,0,1,0,1,130,90,0,0,0,0,0,0,122,,,,
377
+ 375,71,0,1,0,0,310,13,88,0,0,0,0,100,70,0,0,0,0,0,0,143,,,,
378
+ 376,53,1,0,1,0,39,7,11,0,1,0,0,120,70,0,0,0,0,0,0,166,,,,
379
+ 377,71,0,1,0,0,285,5,30,1,1,0,1,120,90,0,0,0,1,0,0,92,,,,
380
+ 378,22,0,0,1,0,138,13,99,0,1,0,0,130,80,0,1,1,0,0,0,130,,,,
381
+ 379,11,0,0,1,0,199,11,40,0,1,0,0,130,70,1,1,1,0,0,0,190,,,,
382
+ 380,29,0,1,0,0,224,0,67,0,1,0,0,110,90,0,0,1,0,0,0,183,,,,
383
+ 381,42,0,0,1,0,138,9,28,0,0,0,0,130,70,0,0,0,0,0,0,191,,,,
384
+ 382,70,0,1,0,0,100,12,97,0,0,0,0,140,50,0,0,0,0,0,0,126,,,,
385
+ 383,51,0,1,0,0,183,11,20,1,0,1,0,130,90,0,0,0,0,0,0,163,,,,
386
+ 384,29,0,0,0,0,221,3,93,0,1,0,0,130,90,0,0,1,0,0,0,81,,,,
387
+ 385,76,0,0,1,0,119,10,28,0,1,0,0,150,90,0,0,0,0,0,0,153,,,,
388
+ 386,73,0,1,0,0,34,12,76,0,0,0,0,130,40,0,0,0,0,0,0,83,,,,
389
+ 387,0,0,1,0,0,120,2,58,0,0,0,0,150,70,0,0,0,0,0,0,88,,,,
390
+ 388,72,0,0,0,0,301,5,80,0,1,0,0,120,80,0,0,1,0,0,1,177,,,,
391
+ 389,41,0,1,0,0,298,15,44,0,0,0,0,120,70,0,0,0,0,0,0,79,,,,
392
+ 390,61,0,1,0,0,54,13,103,1,1,1,1,120,90,0,0,0,0,0,0,37,,,,
393
+ 391,50,0,0,0,0,98,5,9,0,1,0,0,140,40,0,0,0,0,0,0,168,,,,
394
+ 392,76,0,1,0,0,72,16,113,0,1,0,0,140,70,0,0,1,0,0,0,125,,,,
395
+ 393,10,0,1,0,0,122,0,35,0,1,0,0,120,70,0,0,0,0,0,0,9,,,,
396
+ 394,2,0,1,0,0,113,10,109,0,0,0,0,120,90,0,0,0,0,0,0,94,,,,
397
+ 395,70,0,1,1,0,38,15,1,1,1,0,1,120,70,0,0,0,0,0,0,73,,,,
398
+ 396,79,0,1,0,1,178,10,118,0,1,0,0,110,80,0,0,0,0,0,0,182,,,,
399
+ 397,60,0,1,0,0,194,11,69,0,0,0,0,150,60,0,0,1,1,0,1,141,,,,
400
+ 398,74,0,0,0,0,211,10,36,0,1,0,1,150,40,0,0,1,0,0,0,20,,,,
401
+ 399,56,0,1,0,0,99,1,76,0,1,0,0,100,90,0,0,0,0,0,1,160,,,,
402
+ 400,62,0,1,0,0,276,1,108,0,0,0,0,140,80,0,0,0,0,0,0,67,,,,
403
+ 401,35,0,1,0,0,35,4,111,1,0,0,1,130,70,0,0,0,0,0,0,32,,,,
404
+ 402,63,0,0,0,0,83,3,13,0,0,0,0,140,70,0,0,0,0,0,1,139,,,,
405
+ 403,44,0,0,0,0,258,4,59,0,0,0,1,140,90,0,0,0,0,0,0,76,,,,
406
+ 404,74,0,1,1,0,290,17,98,0,0,0,0,150,80,0,0,0,0,0,1,89,,,,
407
+ 405,8,0,1,0,0,224,8,56,0,1,0,0,130,50,0,0,0,0,0,0,150,,,,
408
+ 406,70,0,1,0,0,163,9,16,1,0,0,0,120,90,0,0,0,0,0,0,10,,,,
409
+ 407,58,0,1,0,0,105,3,61,1,1,1,0,120,90,0,0,0,0,0,1,138,,,,
410
+ 408,53,0,1,0,0,15,7,23,0,0,0,0,150,50,0,0,0,0,0,0,26,,,,
411
+ 409,14,0,0,0,0,160,10,97,0,1,0,0,120,40,0,0,0,0,0,0,50,,,,
412
+ 410,52,0,1,0,1,198,17,37,0,1,0,0,100,40,0,1,0,0,0,0,111,,,,
413
+ 411,55,0,0,0,0,190,16,59,0,0,0,0,100,90,0,0,1,0,0,0,186,,,,
414
+ 412,84,0,0,0,0,51,15,115,0,0,0,0,140,80,0,0,0,0,0,0,155,,,,
415
+ 413,18,0,1,0,0,177,8,49,1,0,1,0,120,90,0,0,0,0,0,0,19,,,,
416
+ 414,8,0,0,0,0,96,17,17,0,1,0,0,120,80,0,0,0,0,0,1,93,,,,
417
+ 415,20,0,0,0,0,148,2,99,0,1,0,0,100,40,0,0,1,0,0,0,179,,,,
418
+ 416,68,1,0,1,0,229,15,94,0,1,0,0,100,40,0,0,0,0,0,0,43,,,,
419
+ 417,69,0,1,0,1,62,6,72,0,1,0,0,140,80,0,0,0,0,0,0,117,,,,
420
+ 418,0,0,0,0,0,120,11,126,0,1,0,0,100,90,0,0,0,0,0,0,119,,,,
421
+ 419,19,0,1,0,0,252,14,18,0,0,0,0,120,80,0,0,0,0,0,0,53,,,,
422
+ 420,61,0,1,0,0,237,12,83,0,0,0,0,120,70,0,0,0,0,0,0,129,,,,
423
+ 421,1,0,1,0,0,30,4,44,0,0,0,0,120,60,0,0,0,0,0,0,76,,,,
424
+ 422,13,0,1,0,0,287,13,12,0,0,0,0,130,70,0,0,0,0,0,0,28,,,,
425
+ 423,81,0,0,0,0,306,7,77,0,1,0,0,130,80,0,0,0,0,0,0,148,,,,
426
+ 424,84,0,0,0,1,103,5,120,0,0,0,0,130,70,0,0,0,0,1,0,91,,,,
427
+ 425,0,0,1,0,1,134,8,95,0,1,0,0,140,90,0,0,0,0,0,0,6,,,,
428
+ 426,38,0,0,0,0,212,6,122,0,1,0,0,140,60,0,0,1,0,0,0,11,,,,
429
+ 427,64,0,0,0,0,159,13,39,0,0,0,0,120,90,0,0,0,0,0,1,168,,,,
430
+ 428,13,0,1,0,0,305,13,91,0,0,0,0,150,50,0,0,1,0,0,0,44,,,,
431
+ 429,47,0,1,0,0,83,1,27,0,0,0,0,140,50,0,0,0,0,0,1,21,,,,
432
+ 430,32,0,0,0,0,311,3,31,0,1,0,0,120,90,0,0,0,0,0,1,134,,,,
433
+ 431,41,0,0,0,0,248,12,5,0,1,0,0,120,80,0,0,1,0,0,0,168,,,,
434
+ 432,13,0,1,0,0,36,16,74,0,0,0,0,140,70,0,0,0,0,0,0,155,,,,
435
+ 433,77,0,1,0,1,174,3,10,0,0,0,1,80,60,1,0,1,0,0,0,17,,,,
436
+ 434,2,1,0,1,1,239,15,114,0,1,0,0,140,90,0,0,0,0,0,0,149,,,,
437
+ 435,72,0,1,0,0,47,6,51,0,0,0,0,130,90,0,0,0,0,0,0,65,,,,
438
+ 436,14,0,0,0,0,16,1,77,0,0,0,0,150,90,0,0,0,0,0,1,18,,,,
439
+ 437,27,0,0,0,0,260,10,43,0,0,0,0,120,70,0,0,0,0,1,0,142,,,,
440
+ 438,57,0,1,0,0,5,17,1,0,0,0,0,120,90,1,1,1,0,0,1,57,,,,
441
+ 439,83,0,0,0,1,316,0,112,0,1,0,0,120,90,0,0,0,1,0,1,48,,,,
442
+ 440,19,0,1,0,0,160,0,81,0,0,0,0,150,60,0,0,0,0,1,0,56,,,,
443
+ 441,44,0,0,0,0,7,12,7,0,0,0,1,150,40,0,0,0,0,0,0,73,,,,
444
+ 442,23,0,1,0,0,203,12,53,0,1,0,0,140,70,0,0,0,0,0,0,181,,,,
445
+ 443,34,0,1,0,0,89,11,125,0,0,0,0,140,80,0,0,1,0,0,0,65,,,,
446
+ 444,10,0,0,0,0,195,15,99,0,0,0,1,100,90,0,0,0,0,0,0,99,,,,
447
+ 445,63,0,0,0,0,265,4,101,0,1,0,0,150,60,0,0,0,0,0,0,3,,,,
448
+ 446,82,0,1,0,0,128,14,56,0,1,0,1,130,80,0,0,1,0,0,0,141,,,,
449
+ 447,5,0,1,0,0,124,16,26,0,1,0,0,100,70,0,0,0,0,0,1,17,,,,
450
+ 448,39,0,0,0,0,124,9,8,0,0,0,0,120,50,0,1,1,0,0,1,158,,,,
451
+ 449,86,0,1,0,0,48,16,62,0,0,0,0,120,60,0,0,0,0,0,1,110,,,,
452
+ 450,29,0,1,0,0,188,9,72,0,0,0,0,150,60,0,0,0,0,0,1,25,,,,
453
+ 451,45,0,0,1,0,29,3,1,1,0,1,1,170,90,0,0,1,0,0,0,145,,,,
454
+ 452,0,0,1,0,0,271,5,124,0,1,0,0,140,50,0,0,0,0,0,1,34,,,,
455
+ 453,47,1,0,1,0,6,5,1,0,0,0,0,100,70,0,0,0,0,0,0,10,,,,
456
+ 454,85,1,0,1,1,68,16,71,1,0,0,1,120,70,0,1,0,0,0,1,112,,,,
457
+ 455,85,1,1,1,1,129,6,69,0,1,0,0,110,70,0,0,0,0,1,1,98,,,,
458
+ 456,79,0,0,0,1,144,3,28,0,0,0,0,150,70,1,0,1,0,0,0,188,,,,
459
+ 457,4,0,0,1,0,142,15,116,0,1,0,0,140,90,0,0,1,0,0,0,40,,,,
460
+ 458,39,0,0,0,0,202,2,80,0,0,0,0,150,70,0,0,1,0,0,1,86,,,,
461
+ 459,82,0,1,0,0,209,9,43,1,0,1,0,130,90,0,0,0,1,0,0,148,,,,
462
+ 460,79,0,0,0,0,123,8,44,0,0,0,0,110,80,0,0,0,0,0,0,186,,,,
463
+ 461,53,0,1,0,0,28,7,100,0,1,0,0,150,80,0,0,1,1,0,1,136,,,,
464
+ 462,51,0,0,0,0,46,2,87,0,0,0,0,100,50,0,0,0,0,0,0,24,,,,
465
+ 463,32,0,0,0,0,246,4,66,0,1,0,0,110,70,0,0,0,0,0,1,22,,,,
466
+ 464,76,0,0,0,0,131,15,22,0,0,0,0,80,90,0,0,0,0,0,1,182,,,,
467
+ 465,11,0,0,0,0,147,14,90,0,1,0,0,110,60,0,0,1,0,0,0,54,,,,
468
+ 466,43,0,0,0,0,16,2,90,1,0,0,1,120,90,0,0,0,1,0,1,2,,,,
469
+ 467,3,0,1,0,0,311,5,12,0,0,0,0,140,60,0,0,0,0,0,0,173,,,,
470
+ 468,27,0,1,0,0,260,3,31,0,0,1,0,120,90,0,0,0,0,0,0,33,,,,
471
+ 469,78,0,1,1,0,262,5,122,0,0,0,0,150,70,0,0,0,0,0,0,164,,,,
472
+ 470,84,0,0,0,0,266,9,55,0,1,0,0,110,90,0,0,0,0,0,1,68,,,,
473
+ 471,58,0,1,0,0,180,9,4,0,0,0,0,100,90,0,0,0,0,0,1,48,,,,
474
+ 472,62,0,0,0,0,25,16,85,0,1,0,0,140,80,0,0,0,0,0,1,183,,,,
475
+ 473,76,0,0,0,1,155,0,16,1,0,0,1,130,90,0,0,0,0,0,0,5,,,,
476
+ 474,47,0,1,0,1,271,7,45,0,1,0,0,120,90,0,0,0,0,0,0,43,,,,
477
+ 475,0,0,0,0,0,238,16,56,0,0,0,0,140,70,0,0,0,0,0,1,106,,,,
478
+ 476,51,0,1,0,0,300,8,90,0,0,0,0,90,80,0,0,0,0,0,1,21,,,,
479
+ 477,51,0,0,0,0,151,16,104,0,1,0,0,150,50,0,0,0,0,0,1,39,,,,
480
+ 478,49,0,1,0,0,303,16,29,0,0,0,0,140,60,0,0,0,0,0,0,118,,,,
481
+ 479,81,0,0,0,0,47,4,107,1,1,1,1,120,90,0,0,0,0,0,0,24,,,,
482
+ 480,45,0,1,0,0,185,9,118,0,1,0,0,150,90,0,0,0,0,1,0,98,,,,
483
+ 481,62,0,1,0,1,92,10,82,0,0,0,0,130,70,0,0,0,0,0,1,146,,,,
484
+ 482,73,1,1,1,0,279,4,118,0,1,0,0,150,60,0,0,1,0,0,0,151,,,,
485
+ 483,3,0,0,0,0,73,1,80,0,0,0,0,130,80,0,0,0,1,0,1,103,,,,
486
+ 484,55,0,1,0,1,21,9,75,1,1,0,1,120,70,0,0,0,0,0,0,194,,,,
487
+ 485,24,0,0,0,0,64,4,34,0,1,0,0,110,80,0,0,0,0,0,1,82,,,,
488
+ 486,31,0,1,0,1,303,10,14,0,0,0,0,100,60,0,0,0,0,0,0,146,,,,
489
+ 487,87,0,1,0,0,10,6,7,0,0,0,1,110,60,0,0,0,0,0,0,84,,,,
490
+ 488,25,0,1,0,1,79,9,89,0,1,0,0,110,90,0,0,0,0,0,0,81,,,,
491
+ 489,86,0,1,0,0,187,9,53,0,1,0,0,110,80,0,0,0,0,0,0,121,,,,
492
+ 490,25,0,0,0,0,172,16,61,0,1,0,0,100,70,0,0,0,0,0,0,192,,,,
493
+ 491,57,0,1,0,0,3,7,68,0,1,0,0,130,90,0,0,0,0,0,0,15,,,,
494
+ 492,26,0,1,0,0,107,0,115,0,0,0,0,140,70,0,0,0,1,0,1,172,,,,
495
+ 493,73,0,1,0,0,106,14,42,0,1,0,0,80,50,0,0,0,0,0,1,102,,,,
496
+ 494,61,1,1,1,1,255,2,97,0,0,0,0,100,40,0,0,1,0,0,1,144,,,,
497
+ 495,81,0,1,0,0,319,12,26,0,1,0,0,120,90,0,0,1,0,0,0,165,,,,
498
+ 496,75,1,0,1,1,102,7,3,0,0,0,0,80,50,0,0,0,0,0,0,147,,,,
499
+ 497,40,0,0,0,0,121,5,93,0,0,0,0,100,80,0,0,0,1,0,1,140,,,,
500
+ 498,18,0,0,0,1,53,10,74,0,0,0,0,130,90,0,1,1,0,0,1,151,,,,
501
+ 499,2,0,0,0,0,64,2,96,0,1,0,0,140,80,0,0,0,0,0,0,53,,,,
README.md CHANGED
@@ -1,13 +1,52 @@
1
  ---
2
- title: XLabel
3
  emoji: 💻
4
- colorFrom: indigo
5
  colorTo: gray
6
  sdk: streamlit
7
- sdk_version: 1.17.0
8
  app_file: app.py
9
- pinned: false
10
  license: apache-2.0
11
  ---
12
 
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  ---
2
+ title: "XLabel: eXplainable Labeling Assistant"
3
  emoji: 💻
4
+ colorFrom: pink
5
  colorTo: gray
6
  sdk: streamlit
7
+ sdk_version: 1.15.2
8
  app_file: app.py
9
+ pinned: true
10
  license: apache-2.0
11
  ---
12
 
13
+ # XLabel: e**X**plainable **Label**ing Assistant
14
+
15
+ XLabel is an open-source [Streamlit](https://streamlit.io/) app that takes an explainable machine learning approach to visual-interactive data labeling.
16
+
17
+ This is the official code of the following paper:
18
+ [An Explainable Machine Learning Approach to Visual-Interactive Labeling: A Case Study on Non-communicable Disease Data](https://arxiv.org/abs/2209.12778)
19
+ Donlapark Ponnoprat, Parichart Pattarapanitchai, Phimphaka Taninpong, Suthep Suantai
20
+
21
+ ## News (01/01/2023)
22
+ * Use tabs instead of radio buttons for multiple labels.
23
+ * The app now requires `streamlit>=1.16.0` for the tabs and `interpret>=0.3.0` for handling missing data.
24
+
25
+ ## Features
26
+ XLabel can:
27
+ * Predict the most probable labels using Explainable Boosting Machine (EBM).
28
+ * Show the contributions of each feature towards the predicted labels.
29
+ * Provide an option to write the labels directly into the data file (use `XLabel.py`) or save them in a separate file (use `XLabelDL.py`)
30
+ * Support data with multiple labels and multiple classes.
31
+ * Support data with missing values ([thanks to EBM](https://github.com/interpretml/interpret/issues/18)) and/or non-numeric categorical features.
32
+
33
+ ## Usage
34
+ Before using XLabel, the data file must follow the following tabular convention:
35
+ * The file must be in either CSV or Excel format.
36
+ * The first row of the file must be the names of the columns.
37
+ * The first column must contain a unique identifier (id) for each row.
38
+ * The label columns must appear last.
39
+ In addition, a few instances must have already been labeled, with each class appearing at least once (For example, if a label has five possible classes, then the required minimum number of labeled instances is 5).
40
+
41
+ With your data file satisfying these conditions, you can now start data labeling with XLabel!
42
+ 1. Copy `XLabel.py` to the directory that contains the data file and run the `streamlit` command:
43
+ ```
44
+ streamlit run XLabel.py
45
+ ```
46
+ * By design, `XLabel.py` will write the labeled data to the original data file. If instead you would like to download the labeled data as a separate file, use `XLabelDL.py` instead.
47
+ * You can assign a specific list of input features for each label by editing `configs.json` and copying it along with `XLabel.py`. There are also other sidebar options that you can play around as well. Here is an example ofr [`configs.json`](configs.json).
48
+ 2. Upload a data file (only on the first run), select the options on the sidebar, and then click "**Sample**". The samples with lowest predictive confidences will be shown first in the main screen.
49
+ 3. Check the suggested labels; you can keep the correct ones and change the wrong ones.
50
+ 4. Click the "**Submit Labels**" button at the bottom of the page to save the labels.
51
+ * If you are using `XLabel.py`, the labels will be saved directly to the original data file.
52
+ * If you are using `XLabelDL.py`, you need to click the `Download labeled data` in the sidebar to download the labeled data as a new file.
app.py ADDED
@@ -0,0 +1,614 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A Streamlit app designed to help with data labeling with explainable
2
+ machine learning approach. It can handle data with many labels and many
3
+ classes. For each label, an Explainable Boosting Machine model is
4
+ trained on the labeled data, then it makes class predictions and
5
+ provides per-instance local explanations, which are then used to make
6
+ heatmaps, displayed in the main screen.
7
+ Model page: https://huggingface.co/Donlapark/XLabel
8
+ """
9
+ import json
10
+ import math
11
+ import os
12
+ import pickle as pkle
13
+
14
+ from interpret.glassbox.ebm.ebm import ExplainableBoostingClassifier
15
+
16
+ import numpy as np
17
+ import pandas as pd
18
+
19
+ import altair as alt
20
+ import streamlit as st
21
+ from streamlit import session_state as _state
22
+ import streamlit.components.v1 as components
23
+
24
+ _PERSIST_STATE_KEY = f"{__name__}_PERSIST"
25
+ _CONFIGS_FILE = "configs.json"
26
+ _MODEL = "_saved_models.pickle"
27
+ _NUM_FEAT_PER_ROW = 11
28
+
29
+ st.set_page_config(layout="wide")
30
+
31
+
32
+ def main():
33
+ """The main Streamlit app."""
34
+ if "configs" not in _state:
35
+ try:
36
+ with open(_CONFIGS_FILE, "r") as _file:
37
+ _state["configs"] = json.load(_file)
38
+ except FileNotFoundError:
39
+ create_config_file()
40
+
41
+ _state["loaded_new_file"] = True
42
+
43
+ st.sidebar.write("Current database: " + _state.configs["db_filename"])
44
+
45
+ st.sidebar.file_uploader("Upload a CSV or Excel file",
46
+ type=["csv", "xlsx", "xls"],
47
+ key="uploaded_files",
48
+ accept_multiple_files=False,
49
+ on_change=update_file)
50
+
51
+ with st.sidebar.form("sidebar"):
52
+ st.slider("Number of labels",
53
+ min_value=1,
54
+ max_value=20,
55
+ value=_state.configs["sidebar"]["num_labels"],
56
+ step=1,
57
+ key="num_labels")
58
+
59
+ st.selectbox("Include data with label/prediction mismatches?",
60
+ ("Yes", "No"),
61
+ key="relabel",
62
+ index=("Yes",
63
+ "No").index(_state.configs["sidebar"]["relabel"]))
64
+
65
+ st.selectbox("Sampling mode",
66
+ ("Fixed sample size", "Confidence threshold"),
67
+ key="mode",
68
+ index=("Fixed sample size", "Confidence threshold").index(
69
+ _state.configs["sidebar"]["mode"]))
70
+
71
+ st.slider("Sample size (for \"Fixed sample size\" mode)",
72
+ min_value=1,
73
+ max_value=500,
74
+ value=_state.configs["sidebar"]["n_samples"],
75
+ step=1,
76
+ key="n_samples")
77
+
78
+ st.slider("Threshold (for \"Confidence threshold\" mode)",
79
+ min_value=0.00,
80
+ max_value=1.00,
81
+ value=_state.configs["sidebar"]["threshold"],
82
+ step=0.01,
83
+ format="%.2f",
84
+ key="threshold")
85
+
86
+ form_cols = st.columns((2, 2, 2))
87
+ form_cols[1].form_submit_button("Sample", on_click=sample_and_predict)
88
+
89
+ if "pages" in _state:
90
+ page_list = list(_state.pages)
91
+ tabs = st.tabs(page_list)
92
+ for i, tab in enumerate(tabs):
93
+ with tab:
94
+ display_main_screen(page_list[i])
95
+
96
+ filename = _state.configs["db_filename"]
97
+ file_pre, file_ext = os.path.splitext(filename)
98
+ if "database" in _state:
99
+ data, mime = convert_to_downloadable(_state.database, file_ext)
100
+
101
+ st.sidebar.download_button(label="Download labeled data",
102
+ data=data,
103
+ file_name=filename,
104
+ mime=mime)
105
+
106
+
107
+ def update_file():
108
+ """Update the state parameters after a file has been uploaded"""
109
+ if _state.uploaded_files is not None:
110
+ _state.configs["db_filename"] = _state.uploaded_files.name
111
+ _state.loaded_new_file = True
112
+
113
+
114
+ def init_state_params():
115
+ """Initialize all state parameters.
116
+
117
+ This function will be called by sample_and_predict() when
118
+ _state.pages has not been initialized.
119
+
120
+ State parameters:
121
+ database: The pandas dataframe of the database.
122
+ configs: The saved configs of sidebar widgets.
123
+ pages: The list of all labels.
124
+ classes: The dict of all classes for each label.
125
+ class_to_num: The encoding dict of classes into integers.
126
+ num_to_class: The decoding dict of integers into classes.
127
+ class_to_num: The dict that maps each class to the
128
+ corresponding number.
129
+ previous_: The index of the previous page.
130
+ next_: The index of the next page.
131
+ next_clicked: The index of the current page.
132
+ local_results: A dict of outputs of EBM
133
+ used to write predictions and plot heatmaps on screen.
134
+ models: A dict of EBM models to predict the labels.
135
+ models_params: A dict of models' attributes, which will be
136
+ saved as a pickle file.
137
+ predictions: A pandas dataframe; each column contains EBM's
138
+ predictions of each label.
139
+ unlabeled_index: A pandas index of unlabeled rows. When new
140
+ labels are added to the database, compute_unlabeled_index()
141
+ needs to be called to track the changes.
142
+ """
143
+ if _state.uploaded_files is not None:
144
+ data_file = _state.uploaded_files
145
+ _state.configs["db_filename"] = data_file.name
146
+ else:
147
+ data_file = _state.configs["db_filename"]
148
+
149
+ filename = _state.configs["db_filename"]
150
+ if filename == "None":
151
+ return
152
+
153
+ file_pre, file_ext = os.path.splitext(filename)
154
+ if file_ext == ".csv":
155
+ _state.database = pd.read_csv(data_file, index_col=0)
156
+ elif (file_ext == ".xlsx") or (file_ext == ".xls"):
157
+ _state.database = pd.read_excel(data_file, index_col=0)
158
+
159
+ create_pages()
160
+
161
+
162
+ def create_pages():
163
+ """Add or change state parameters that are related to labeling pages
164
+
165
+ These parameters assign the labels to multiple pages, with
166
+ one label per page.
167
+ """
168
+ _state["pages"] = _state.database.columns[-_state.num_labels:]
169
+ _state["classes"] = {
170
+ label: sorted(list(_state.database[label].dropna().unique()))
171
+ for label in _state.pages
172
+ }
173
+ _state["num_to_class"] = {
174
+ label: dict(enumerate(_state.classes[label]))
175
+ for label in _state.pages
176
+ }
177
+ _state["class_to_num"] = {
178
+ label: {c: i
179
+ for i, c in enumerate(_state.classes[label])}
180
+ for label in _state.pages
181
+ }
182
+
183
+ _state.update({
184
+ 'local_results': {}
185
+ })
186
+
187
+ _state["predictions"] = pd.DataFrame(index=_state.database.index,
188
+ columns=_state.pages)
189
+
190
+ file_pre, file_ext = os.path.splitext(_state.configs["db_filename"])
191
+ try:
192
+ with open(file_pre + str(_state.num_labels) + _MODEL, 'rb') as _file:
193
+ _state["models_params"] = pkle.load(_file)
194
+ _state["models"] = {}
195
+ for label in _state.pages:
196
+ _state.models[label] = ExplainableBoostingClassifier()
197
+ _state.models[label].__dict__.update(
198
+ _state.models_params[label])
199
+ except FileNotFoundError:
200
+ _state["models"], _state["models_params"] = initialize_models()
201
+
202
+ compute_unlabeled_index()
203
+
204
+
205
+ def initialize_models():
206
+ """initialize and train EBMs for all labels.
207
+
208
+ If a pickle file of EBM models (stored in _MODEL) is not found
209
+ in the directory, this function will be called by init_state_params()
210
+ to initialize the models.
211
+ """
212
+ models = {}
213
+ models_params = {}
214
+ for label in _state.pages:
215
+ y = _state.database[label].dropna().map(_state.class_to_num[label])
216
+ X = subset_features(_state.database, label)
217
+ X = X.loc[y.index, :]
218
+ models[label] = ExplainableBoostingClassifier().fit(X, y)
219
+ models_params[label] = models[label].__dict__
220
+
221
+ return models, models_params
222
+
223
+
224
+ def subset_features(X, label):
225
+ """Returns a subset of features specified in state's input_features parameters
226
+ Args:
227
+ X: a Pandas DataFrame.
228
+ label: The column name of the labels.
229
+
230
+ Returns
231
+ A Pandas DataFrame consisting of a subset of features in X.
232
+ """
233
+ input_features = _state.configs["input_features"]
234
+ if label not in input_features.keys():
235
+ X = X.iloc[:, :-_state.num_labels]
236
+ else:
237
+ X = X.loc[:, input_features[label]]
238
+ return X
239
+
240
+
241
+ def compute_unlabeled_index(new_labeled_index=None, label=None):
242
+ """Track the indices of unlabeled data after introducing new labels.
243
+
244
+ Args:
245
+ new_labeled_index: A pandas index of newly labeled data.
246
+ label: The column name of the new labels.
247
+ """
248
+ if new_labeled_index is not None:
249
+ _state.unlabeled_index[label] = _state.unlabeled_index[
250
+ label].difference(new_labeled_index)
251
+ else:
252
+ all_index = _state.database.index
253
+ _state.unlabeled_index = {
254
+ label: all_index[_state.database[label].isna()]
255
+ for label in _state.pages
256
+ }
257
+
258
+
259
+ def create_config_file():
260
+ """Create a new config file"""
261
+ _state["configs"] = {
262
+ "db_filename": "None",
263
+ "sidebar": {
264
+ "num_labels": 1,
265
+ "relabel": "Yes",
266
+ "mode": "Fixed sample size",
267
+ "n_samples": 50,
268
+ "threshold": 0.95
269
+ },
270
+ "input_features": {}
271
+ }
272
+ with open(_CONFIGS_FILE, "w") as _file:
273
+ json.dump(_state.configs, _file, indent=4)
274
+
275
+
276
+ @st.experimental_memo
277
+ def convert_to_downloadable(data, file_type):
278
+ """Convert a dataframe to a downloadable format
279
+
280
+ Args:
281
+ data: A Pandas DataFrame.
282
+ file_type: A target format for the conversion: ".csv", ".xls" or ".xlsx".
283
+
284
+ Returns:
285
+ converted_data: Data converted to the specified format.
286
+ """
287
+ if file_type == ".csv":
288
+ converted_data = data.to_csv().encode('utf-8')
289
+ mime = "text/csv"
290
+ elif (file_type == ".xlsx") or (file_type == ".xls"):
291
+ converted_data = data.to_excel().encode('utf-8')
292
+ mime = "application/vnd.ms-excel"
293
+ else:
294
+ raise ValueError("file_type must be \"csv\" or \"excel\"")
295
+
296
+ return converted_data, mime
297
+
298
+
299
+ def display_main_screen(label):
300
+ """Display predictions and heatmaps on the main screen.
301
+
302
+ This function is called after EBM has been trained on the labeled data.
303
+ The predictions and explanations (displayed as heatmaps) will be shown
304
+ on the main screen.
305
+
306
+ Args:
307
+ label: the column name of the predictions.
308
+ """
309
+ main_cols = st.columns((4, 4, 4))
310
+ if _state.unlabeled_index[label].empty:
311
+ main_cols[1].write("All " + label + " data are labeled.")
312
+ else:
313
+ with st.form(label + "Label form"):
314
+ if _state.local_results[label] == {}:
315
+ main_cols[1].write("""There are some unlabeled data left. \n \
316
+ This means that the confidences of the remaining data are \
317
+ above the threshold. \n You can either let the model label \
318
+ these data automatically \n or change the sampling mode to \
319
+ \"Fixed sample size\".""")
320
+ else:
321
+ input_features = _state.configs["input_features"]
322
+ if label not in input_features.keys():
323
+ num_features = _state.database.shape[1] - _state.num_labels
324
+ else:
325
+ num_features = len(input_features[label])
326
+ num_heatmap_rows = math.ceil(num_features / _NUM_FEAT_PER_ROW)
327
+ for page in _state.local_results[label]:
328
+ current_plot = plot_all_features(
329
+ _state.local_results[label][page]['data'],
330
+ title=str(page),
331
+ height=50,
332
+ num_rows=num_heatmap_rows)
333
+ cols = st.columns((6, 1))
334
+ #with cols[0]:
335
+ # if _state.text1 is not None:
336
+ # st.write(_state.data[_state.text1][page])
337
+ # if _state.text2 is not None:
338
+ # st.write(_state.data[_state.text2][page])
339
+
340
+ cols[0].altair_chart(current_plot,
341
+ use_container_width=True)
342
+
343
+ prediction = _state.local_results[label][page][
344
+ 'prediction']
345
+ cols[1].radio("Label",
346
+ options=_state.classes[label],
347
+ key=label + str(page),
348
+ index=int(prediction))
349
+ results = report_results(page, label)
350
+ for result in results:
351
+ cols[1].write(result)
352
+ st.markdown("""---""")
353
+
354
+ label_from_cols = st.columns((4, 4, 4))
355
+
356
+ label_from_cols[1].radio("Automatically label the remaining data?",
357
+ ("Yes", "No"),
358
+ index=1,
359
+ key=label+"_auto")
360
+
361
+ label_from_cols[1].form_submit_button("Submit Labels",
362
+ on_click=update_and_save,
363
+ args=(label, ))
364
+
365
+
366
+ @st.experimental_memo
367
+ def plot_all_features(data, title, height, num_rows):
368
+ """Plot all rows of the heatmap of EBM's per-instance explanation.
369
+
370
+ Args:
371
+ data: Per-instance local explanations from EBM.
372
+ title: The plot's title.
373
+ height: The height of the plot.
374
+ num_rows: The number of rows of the heatmap.
375
+
376
+ Returns:
377
+ obj: An Altair plot object.
378
+ """
379
+ plot_list = [None] * num_rows
380
+ if num_rows == 1:
381
+ plot_list[0] = plot(data, title, height)
382
+ else:
383
+ plot_list[0] = plot(data.iloc[0:_NUM_FEAT_PER_ROW], title, height)
384
+ for i in range(1, num_rows - 1):
385
+ plot_list[i] = plot(
386
+ data.iloc[_NUM_FEAT_PER_ROW * i:_NUM_FEAT_PER_ROW * (i + 1)],
387
+ "", height)
388
+ plot_list[-1] = plot(data.iloc[_NUM_FEAT_PER_ROW * (num_rows - 1):],
389
+ "", height)
390
+
391
+ obj = alt.vconcat(*plot_list).configure_axis(
392
+ labelFontSize=13, titleFontSize=16, labelAngle=0,
393
+ title=None).configure_title(fontSize=16)
394
+
395
+ return obj
396
+
397
+
398
+ def plot(data, title, height):
399
+ """Plot each row of the heatmap of EBM's per-instance explanation.
400
+
401
+ Args:
402
+ data: Per-instance local explanations from EBM.
403
+ title: The plot's title.
404
+ height: The height of the plot.
405
+
406
+ Returns:
407
+ obj: An Altair plot object.
408
+ """
409
+ base = alt.Chart(data).encode(x=alt.X('features', sort=None))
410
+
411
+ heatmap = base.mark_rect().encode(color=alt.Color(
412
+ 'scores:Q',
413
+ scale=alt.Scale(scheme='redblue', reverse=True, domain=[0, 1]),
414
+ legend=alt.Legend(direction='vertical')))
415
+
416
+ # Configure text
417
+ text = base.mark_text(baseline='middle', fontSize=14).encode(
418
+ text='values:N',
419
+ color=alt.condition(
420
+ (alt.datum.scores > 0.8) | (alt.datum.scores < 0.2),
421
+ alt.value('white'), alt.value('black')))
422
+
423
+ obj = (heatmap + text).properties(height=height, width=550, title=title)
424
+
425
+ return obj
426
+
427
+
428
+ @st.experimental_memo
429
+ def report_results(idx, col_name):
430
+ """Create a list that contains current label (if exists) and confidence score.
431
+
432
+ Args:
433
+ idx: A row's index in the database.
434
+ col_name: A column's name in the database.
435
+
436
+ Returns:
437
+ results: A list of current label (if exists) and confidence score.
438
+ """
439
+ results = []
440
+ current_label = _state.database[col_name][idx]
441
+ if not pd.isna(current_label):
442
+ results.append(f"Current label: {current_label}")
443
+
444
+ confidence = _state.local_results[col_name][idx]['confidence']
445
+ results.append(f"Confidence: {confidence:.2f}")
446
+
447
+ return results
448
+
449
+
450
+ def sample_and_predict():
451
+ """Sample data and make a dict of predictions and explanations.
452
+
453
+ This function calls EBM to predict the labels and give per-instance
454
+ local explanations. This function calls generate_explanation() to store
455
+ the predictions and explanations in a dictionary.
456
+ """
457
+ st.experimental_memo.clear()
458
+
459
+ if _state.loaded_new_file:
460
+ init_state_params()
461
+ _state.loaded_new_file = False
462
+ else:
463
+ if "database" not in _state:
464
+ st.error("No database has been uploaded.")
465
+ return
466
+ if _state.configs["sidebar"]["num_labels"] != _state.num_labels:
467
+ create_pages()
468
+
469
+ _state.local_results = dict.fromkeys(_state.pages)
470
+
471
+ for label in _state.pages:
472
+ X = subset_features(_state.database, label)
473
+ if _state.relabel == "No":
474
+ X_unlabeled = X.loc[_state.unlabeled_index[label], :]
475
+ else:
476
+ X_unlabeled = X
477
+ _state.local_results[label] = {}
478
+
479
+ model = _state.models[label]
480
+ generate_explanation(X_unlabeled, label, model)
481
+
482
+ for k in _state.configs["sidebar"].keys():
483
+ _state.configs["sidebar"][k] = _state[k]
484
+ with open(_CONFIGS_FILE, "w") as _file:
485
+ json.dump(_state.configs, _file, indent=4)
486
+
487
+
488
+ def update_and_save(label):
489
+ """Update the labels, then retrain and save the models.
490
+
491
+ Store the user's labels in the database, which is then saved to
492
+ a local disk. EBM is then retrained on the database with addition
493
+ labels, after which, a new list of predictions and explanations
494
+ will be shown on the main screen. This function calls
495
+ generate_explanation() to store the predictions and explanations
496
+ in a dictionary.
497
+
498
+ Args:
499
+ label: the column name of the label.
500
+ """
501
+ new_labeled_index = list(_state.local_results[label].keys())
502
+ _state.database.loc[new_labeled_index, label] = [
503
+ _state[label + str(ix)] for ix in new_labeled_index
504
+ ]
505
+ compute_unlabeled_index(new_labeled_index, label)
506
+
507
+ if _state[label + "_auto"] == "Yes":
508
+ unlabeled_idx = _state.unlabeled_index[label]
509
+ class_pred = _state.predictions.loc[unlabeled_idx, label]
510
+ _state.database.loc[unlabeled_idx, label] = class_pred
511
+ _state.unlabeled_index[label] = pd.Index([])
512
+ labeled_index = _state.database.index
513
+ else:
514
+ labeled_index = _state.database.index.difference(
515
+ _state.unlabeled_index[label])
516
+
517
+ X = subset_features(_state.database, label)
518
+ X_train = X.loc[labeled_index, :]
519
+ ytrain = _state.database.loc[labeled_index, label]
520
+ ebm = ExplainableBoostingClassifier()
521
+ ebm.fit(X_train, ytrain.map(_state.class_to_num[label]))
522
+ _state.models[label] = ebm
523
+ _state.models_params[label] = ebm.__dict__
524
+
525
+ filename = _state.configs["db_filename"]
526
+ file_pre, file_ext = os.path.splitext(filename)
527
+ with open(file_pre + str(_state.num_labels) + _MODEL, 'wb') as _file:
528
+ pkle.dump(_state.models_params, _file, protocol=pkle.HIGHEST_PROTOCOL)
529
+
530
+ _state.local_results[label] = {}
531
+ if _state[label + "_auto"] == "No":
532
+ X = X.loc[_state.unlabeled_index[label], :]
533
+ generate_explanation(X, label, ebm)
534
+
535
+
536
+ def generate_explanation(X, label, model):
537
+ """Create a dict of predictions and explanations of a sample.
538
+
539
+ Make label predictions and per-instance local explanations,
540
+ which are then stored as a dict in _state.local_results.
541
+
542
+ Args:
543
+ X: A set of unlabeled data.
544
+ label: The column name of a label.
545
+ model: A model to predict labels and provide explanations.
546
+ """
547
+ n_samples = X.shape[0]
548
+ n_features = X.shape[1]
549
+
550
+ localx = model.explain_local(X)._internal_obj['specific']
551
+ ypred = np.array([
552
+ _state.num_to_class[label][localx[j]['perf']['predicted']]
553
+ for j in range(n_samples)
554
+ ])
555
+ _state.predictions.loc[X.index, label] = ypred
556
+ y = _state.database.loc[X.index, label]
557
+
558
+ p = np.array(
559
+ [localx[j]['perf']['predicted_score'] for j in range(n_samples)])
560
+ scores = np.minimum(p, (pd.isnull(y) | (ypred == y)))
561
+
562
+ if _state.mode == "Confidence threshold":
563
+ top_ind = np.where(scores <= _state.threshold)[0]
564
+ else:
565
+ n_samples = np.minimum(_state.n_samples, scores.shape[0] - 1)
566
+ top_ind = np.argpartition(scores, n_samples)[:n_samples]
567
+
568
+ X_ = X.iloc[top_ind, :].copy()
569
+ ypred = ypred[top_ind]
570
+
571
+ id_idx_pair = dict(zip(X_.index, top_ind))
572
+
573
+ try:
574
+ data_by_class = [X_[ypred == c] for c in _state.classes[label]]
575
+ except KeyError:
576
+ return
577
+
578
+ feature_names = X.columns
579
+
580
+ for sgn_data in data_by_class:
581
+ current_dict = _state.local_results[label]
582
+ for j in sgn_data.index:
583
+ localxi = localx[id_idx_pair[j]]
584
+
585
+ if len(_state.classes[label]) == 2:
586
+ feature_contrib = localxi['scores'][:n_features]
587
+ else:
588
+ feature_contrib = [
589
+ localxi['scores'][k][localxi['perf']['predicted']]
590
+ for k in range(n_features)
591
+ ]
592
+ heatmap_data = pd.DataFrame({
593
+ 'features':
594
+ feature_names,
595
+ 'values':
596
+ localxi['values'][:n_features],
597
+ 'scores':
598
+ 1 / (1 + 1 / np.exp(feature_contrib))
599
+ })
600
+ heatmap_data = heatmap_data.astype({
601
+ 'features': str,
602
+ 'values': str,
603
+ 'scores': float
604
+ })
605
+ current_dict[j] = {
606
+ 'actual': localxi['perf']['actual'],
607
+ 'prediction': localxi['perf']['predicted'],
608
+ 'confidence': localxi['perf']['predicted_score'],
609
+ 'data': heatmap_data
610
+ }
611
+
612
+
613
+ if __name__ == "__main__":
614
+ main()
configs.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "db_filename": "NCD_synthetic.csv",
3
+ "sidebar": {
4
+ "num_labels": 4,
5
+ "relabel": "Yes",
6
+ "mode": "Fixed sample size",
7
+ "n_samples": 50,
8
+ "threshold": 0.95
9
+ },
10
+ "input_features": {
11
+ "DM_label": [
12
+ "DM_key",
13
+ "DM_ICD10",
14
+ "DM_drugs",
15
+ "Glucose",
16
+ "HbA1c"
17
+ ],
18
+ "HTN_label": [
19
+ "HTN_key",
20
+ "HTN_ICD10",
21
+ "HTN_drugs",
22
+ "sbp1",
23
+ "dbp1"
24
+ ],
25
+ "CKD_label": [
26
+ "CKD_key",
27
+ "CKD_ICD10",
28
+ "eGFR"
29
+ ],
30
+ "DLP_label": [
31
+ "DLP_key",
32
+ "DLP_ICD10",
33
+ "AGE",
34
+ "LDL-c"
35
+ ]
36
+ }
37
+ }
requirements.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ interpret==0.3.2