Here’s my take (no state extensions
):
sdn.pl
:- module(sdn, [sdn/2,eval_sdn/2]).
:- dynamic sdn_cache/5.
sdn(0,[o]) :- !.
sdn(1,[i,o]) :- !.
sdn(I,[i,i|P]) :-
retractall(sdn_cache(_,_,_,_,_)),
sdn(I, 2, P, [o], _).
sdn(I, A, P, T, L) :-
sdn_cache(I, A, P, T, L),
!.
sdn(I, I, T, T, 0) :- !.
sdn(I, A, P, T, L) :-
sdn_(I, A, P, T, L),
asserta(sdn_cache(I, A, P, T, L)).
sdn_(I, A, P, T, L) :-
A < I,
!,
C is A * A,
B is A + 1,
( C - I > I - B
-> sdn(I, B, P0, T, L0),
P = [i|P0],
L is L0 + 1
; sdn(I, C, P1, T, L1),
sdn(I, B, P0, T, L0),
( L0 < L1
-> P = [i|P0],
L is L0 + 1
; P = [s|P1],
L is L1 + 1
)
).
sdn_(I, A, [d|P], T, L) :-
B is A - 1,
sdn(I, B, P, T, L0),
L is L0 + 1.
eval_sdn(P,A) :-
eval_sdn(P,0,A).
eval_sdn([], A, A).
eval_sdn([H|T], A0, A) :-
eval_sdn_(H,A0,A1),
eval_sdn(T,A1,A).
eval_sdn_(i, A0, A) :- A is A0 + 1.
eval_sdn_(s, A0, A) :- A is A0 * A0.
eval_sdn_(d, A0, A) :- A is A0 - 1.
eval_sdn_(o, A, A) :- writeln(A).
This gives:
?- forall(between(0,255,I),(time(sdn(I,P)),eval_sdn(P,I))).
% -1 inferences, 0.000 CPU in 0.000 seconds (47% CPU, -125000 Lips)
0
% -1 inferences, 0.000 CPU in 0.000 seconds (55% CPU, -166667 Lips)
1
% 2 inferences, 0.003 CPU in 0.003 seconds (98% CPU, 688 Lips)
2
% 10 inferences, 0.000 CPU in 0.000 seconds (74% CPU, 588235 Lips)
3
% 23 inferences, 0.000 CPU in 0.000 seconds (81% CPU, 1277778 Lips)
4
% 31 inferences, 0.000 CPU in 0.000 seconds (77% CPU, 1550000 Lips)
5
% 39 inferences, 0.000 CPU in 0.000 seconds (80% CPU, 1258065 Lips)
6
% 64 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1684211 Lips)
7
% 65 inferences, 0.000 CPU in 0.000 seconds (82% CPU, 1805556 Lips)
8
% 67 inferences, 0.000 CPU in 0.000 seconds (85% CPU, 1763158 Lips)
9
% 75 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1829268 Lips)
10
% 118 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1843750 Lips)
11
% 120 inferences, 0.000 CPU in 0.000 seconds (84% CPU, 1875000 Lips)
12
% 123 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1921875 Lips)
13
% 124 inferences, 0.000 CPU in 0.000 seconds (81% CPU, 1850746 Lips)
14
% 126 inferences, 0.000 CPU in 0.000 seconds (85% CPU, 947368 Lips)
15
% 187 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1255034 Lips)
16
% 189 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1235294 Lips)
17
% 191 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1193750 Lips)
18
% 193 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1054645 Lips)
19
% 195 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1189024 Lips)
20
% 197 inferences, 0.000 CPU in 0.000 seconds (86% CPU, 1223602 Lips)
21
% 269 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1154506 Lips)
22
% 271 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1575581 Lips)
23
% 273 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1605882 Lips)
24
% 275 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1608187 Lips)
25
% 277 inferences, 0.000 CPU in 0.000 seconds (87% CPU, 1573864 Lips)
26
% 279 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 989362 Lips)
27
% 281 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1185654 Lips)
28
% 366 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1224080 Lips)
29
% 368 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1210526 Lips)
30
% 370 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1213115 Lips)
31
% 371 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1466403 Lips)
32
% 373 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1457031 Lips)
33
% 375 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1404494 Lips)
34
% 377 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1406716 Lips)
35
% 379 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1403704 Lips)
36
% 476 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1408284 Lips)
37
% 478 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1377522 Lips)
38
% 480 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1445783 Lips)
39
% 482 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 950690 Lips)
40
% 484 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1301075 Lips)
41
% 486 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1302949 Lips)
42
% 488 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1277487 Lips)
43
% 489 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1124138 Lips)
44
% 491 inferences, 0.000 CPU in 0.000 seconds (90% CPU, 1252551 Lips)
45
% 600 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1360544 Lips)
46
% 602 inferences, 0.001 CPU in 0.001 seconds (96% CPU, 517627 Lips)
47
% 604 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1830303 Lips)
48
% 606 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1792899 Lips)
49
% 608 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1767442 Lips)
50
% 610 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1502463 Lips)
51
% 612 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 976077 Lips)
52
% 614 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1060449 Lips)
53
% 616 inferences, 0.000 CPU in 0.001 seconds (90% CPU, 1299578 Lips)
54
% 618 inferences, 0.000 CPU in 0.000 seconds (89% CPU, 1457547 Lips)
55
% 739 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1282986 Lips)
56
% 741 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1224793 Lips)
57
% 742 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1257627 Lips)
58
% 744 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1461690 Lips)
59
% 746 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1404896 Lips)
60
% 748 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1435701 Lips)
61
% 750 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1409774 Lips)
62
% 752 inferences, 0.001 CPU in 0.002 seconds (96% CPU, 516838 Lips)
63
% 754 inferences, 0.000 CPU in 0.000 seconds (88% CPU, 1830097 Lips)
64
% 756 inferences, 0.001 CPU in 0.001 seconds (88% CPU, 1138554 Lips)
65
% 758 inferences, 0.001 CPU in 0.001 seconds (88% CPU, 1191824 Lips)
66
% 891 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1197581 Lips)
67
% 893 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1242003 Lips)
68
% 895 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1110422 Lips)
69
% 897 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1220408 Lips)
70
% 900 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1444623 Lips)
71
% 901 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1427892 Lips)
72
% 903 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1413146 Lips)
73
% 904 inferences, 0.002 CPU in 0.002 seconds (95% CPU, 591623 Lips)
74
% 906 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 1709434 Lips)
75
% 908 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 1709981 Lips)
76
% 910 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 1059371 Lips)
77
% 912 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 1008850 Lips)
78
% 1,057 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1183651 Lips)
79
% 1,059 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1159912 Lips)
80
% 1,061 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1128723 Lips)
81
% 1,063 inferences, 0.002 CPU in 0.002 seconds (95% CPU, 668553 Lips)
82
% 1,065 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1475069 Lips)
83
% 1,067 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1459644 Lips)
84
% 1,069 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1436828 Lips)
85
% 1,071 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1465116 Lips)
86
% 1,073 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1546110 Lips)
87
% 1,075 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1535714 Lips)
88
% 1,077 inferences, 0.002 CPU in 0.002 seconds (95% CPU, 476549 Lips)
89
% 1,079 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1259043 Lips)
90
% 1,081 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1342857 Lips)
91
% 1,237 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1211557 Lips)
92
% 1,239 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1333692 Lips)
93
% 1,241 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1334409 Lips)
94
% 1,243 inferences, 0.002 CPU in 0.002 seconds (95% CPU, 739001 Lips)
95
% 1,245 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1335837 Lips)
96
% 1,247 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1269857 Lips)
97
% 1,249 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1288958 Lips)
98
% 1,251 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1470035 Lips)
99
% 1,253 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 742739 Lips)
100
% 1,255 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 946456 Lips)
101
% 1,257 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1161738 Lips)
102
% 1,259 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1151876 Lips)
103
% 1,261 inferences, 0.001 CPU in 0.001 seconds (90% CPU, 1227848 Lips)
104
% 1,263 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 794340 Lips)
105
% 1,432 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1175698 Lips)
106
% 1,434 inferences, 0.001 CPU in 0.001 seconds (89% CPU, 1211149 Lips)
107
% 1,436 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1223169 Lips)
108
% 1,438 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 719360 Lips)
109
% 1,440 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1226576 Lips)
110
% 1,442 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1209732 Lips)
111
% 1,443 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1300000 Lips)
112
% 1,445 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 543846 Lips)
113
% 1,447 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1153907 Lips)
114
% 1,449 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1231096 Lips)
115
% 1,451 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1384542 Lips)
116
% 1,453 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 796601 Lips)
117
% 1,455 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1463783 Lips)
118
% 1,457 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1429833 Lips)
119
% 1,459 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1321558 Lips)
120
% 1,640 inferences, 0.002 CPU in 0.002 seconds (95% CPU, 719930 Lips)
121
% 1,642 inferences, 0.001 CPU in 0.002 seconds (92% CPU, 1100536 Lips)
122
% 1,644 inferences, 0.001 CPU in 0.002 seconds (92% CPU, 1171775 Lips)
123
% 1,646 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 781206 Lips)
124
% 1,648 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 785510 Lips)
125
% 1,650 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 989802 Lips)
126
% 1,652 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 727113 Lips)
127
% 1,654 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1291179 Lips)
128
% 1,656 inferences, 0.001 CPU in 0.002 seconds (90% CPU, 1212299 Lips)
129
% 1,658 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 849821 Lips)
130
% 1,660 inferences, 0.001 CPU in 0.002 seconds (92% CPU, 1199422 Lips)
131
% 1,662 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1278462 Lips)
132
% 1,664 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 800000 Lips)
133
% 1,665 inferences, 0.001 CPU in 0.001 seconds (92% CPU, 1306907 Lips)
134
% 1,667 inferences, 0.001 CPU in 0.001 seconds (91% CPU, 1250563 Lips)
135
% 1,669 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 879347 Lips)
136
% 1,862 inferences, 0.003 CPU in 0.003 seconds (92% CPU, 734227 Lips)
137
% 1,864 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 910601 Lips)
138
% 1,866 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 713849 Lips)
139
% 1,868 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1125301 Lips)
140
% 1,870 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1090379 Lips)
141
% 1,872 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 694105 Lips)
142
% 1,874 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1067198 Lips)
143
% 1,876 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1100939 Lips)
144
% 1,878 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 704955 Lips)
145
% 1,880 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1150551 Lips)
146
% 1,882 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1149664 Lips)
147
% 1,884 inferences, 0.002 CPU in 0.003 seconds (95% CPU, 787625 Lips)
148
% 1,886 inferences, 0.003 CPU in 0.003 seconds (92% CPU, 731008 Lips)
149
% 1,888 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 912959 Lips)
150
% 1,890 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 706542 Lips)
151
% 1,892 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1092379 Lips)
152
% 1,894 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1073696 Lips)
153
% 2,099 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 685500 Lips)
154
% 2,101 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1001907 Lips)
155
% 2,103 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 763894 Lips)
156
% 2,105 inferences, 0.002 CPU in 0.002 seconds (92% CPU, 1067444 Lips)
157
% 2,106 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 792922 Lips)
158
% 2,108 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1137615 Lips)
159
% 2,110 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 822612 Lips)
160
% 2,112 inferences, 0.003 CPU in 0.003 seconds (93% CPU, 727523 Lips)
161
% 2,114 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 654692 Lips)
162
% 2,116 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1073022 Lips)
163
% 2,118 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 835503 Lips)
164
% 2,120 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1143474 Lips)
165
% 2,122 inferences, 0.002 CPU in 0.003 seconds (95% CPU, 867539 Lips)
166
% 2,124 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1130990 Lips)
167
% 2,126 inferences, 0.002 CPU in 0.003 seconds (94% CPU, 869530 Lips)
168
% 2,128 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1107756 Lips)
169
% 2,130 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 832682 Lips)
170
% 2,132 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1144391 Lips)
171
% 2,349 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 828279 Lips)
172
% 2,351 inferences, 0.003 CPU in 0.004 seconds (93% CPU, 722717 Lips)
173
% 2,353 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 652343 Lips)
174
% 2,355 inferences, 0.002 CPU in 0.003 seconds (93% CPU, 978803 Lips)
175
% 2,357 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 720575 Lips)
176
% 2,359 inferences, 0.002 CPU in 0.003 seconds (93% CPU, 955835 Lips)
177
% 2,361 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 733686 Lips)
178
% 2,363 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1049290 Lips)
179
% 2,365 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 790441 Lips)
180
% 2,367 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1103497 Lips)
181
% 2,369 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 875786 Lips)
182
% 2,371 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1167980 Lips)
183
% 2,372 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 868546 Lips)
184
% 2,374 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1140798 Lips)
185
% 2,376 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 847663 Lips)
186
% 2,378 inferences, 0.002 CPU in 0.002 seconds (93% CPU, 1166258 Lips)
187
% 2,380 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 815627 Lips)
188
% 2,382 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1052120 Lips)
189
% 2,384 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 820937 Lips)
190
% 2,613 inferences, 0.002 CPU in 0.003 seconds (93% CPU, 1082436 Lips)
191
% 2,615 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 799694 Lips)
192
% 2,617 inferences, 0.002 CPU in 0.003 seconds (94% CPU, 1047638 Lips)
193
% 2,619 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 805846 Lips)
194
% 2,621 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1195712 Lips)
195
% 2,623 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 903548 Lips)
196
% 2,625 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1291831 Lips)
197
% 2,627 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 967587 Lips)
198
% 2,629 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1332489 Lips)
199
% 2,631 inferences, 0.004 CPU in 0.004 seconds (96% CPU, 684978 Lips)
200
% 2,633 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 1039069 Lips)
201
% 2,635 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 800912 Lips)
202
% 2,637 inferences, 0.002 CPU in 0.003 seconds (94% CPU, 1118795 Lips)
203
% 2,639 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 791304 Lips)
204
% 2,641 inferences, 0.002 CPU in 0.003 seconds (94% CPU, 1100875 Lips)
205
% 2,643 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 845219 Lips)
206
% 2,645 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1281492 Lips)
207
% 2,647 inferences, 0.003 CPU in 0.003 seconds (96% CPU, 869294 Lips)
208
% 2,649 inferences, 0.002 CPU in 0.002 seconds (94% CPU, 1306213 Lips)
209
% 2,651 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 937412 Lips)
210
% 2,892 inferences, 0.002 CPU in 0.003 seconds (94% CPU, 1176566 Lips)
211
% 2,893 inferences, 0.003 CPU in 0.004 seconds (96% CPU, 838551 Lips)
212
% 2,895 inferences, 0.003 CPU in 0.004 seconds (93% CPU, 852725 Lips)
213
% 2,897 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 942420 Lips)
214
% 2,899 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 986390 Lips)
215
% 2,901 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 1030551 Lips)
216
% 2,903 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 933140 Lips)
217
% 2,905 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 830237 Lips)
218
% 2,907 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 1044932 Lips)
219
% 2,909 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 829957 Lips)
220
% 2,911 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 904318 Lips)
221
% 2,913 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 967774 Lips)
222
% 2,915 inferences, 0.003 CPU in 0.003 seconds (93% CPU, 910084 Lips)
223
% 2,917 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 969425 Lips)
224
% 2,919 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 1006205 Lips)
225
% 2,921 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 1016000 Lips)
226
% 2,923 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 1026334 Lips)
227
% 2,925 inferences, 0.003 CPU in 0.003 seconds (94% CPU, 903335 Lips)
228
% 2,927 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 1023785 Lips)
229
% 2,929 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 837815 Lips)
230
% 2,931 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 833855 Lips)
231
% 3,184 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 903519 Lips)
232
% 3,186 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 756590 Lips)
233
% 3,188 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 870800 Lips)
234
% 3,190 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 837270 Lips)
235
% 3,193 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 939118 Lips)
236
% 3,195 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 873428 Lips)
237
% 3,198 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 948680 Lips)
238
% 3,199 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 850120 Lips)
239
% 3,201 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 927826 Lips)
240
% 3,203 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 855731 Lips)
241
% 3,204 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 871363 Lips)
242
% 3,206 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 959019 Lips)
243
% 3,208 inferences, 0.003 CPU in 0.003 seconds (95% CPU, 982843 Lips)
244
% 3,210 inferences, 0.004 CPU in 0.005 seconds (94% CPU, 742884 Lips)
245
% 3,212 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 941383 Lips)
246
% 3,214 inferences, 0.004 CPU in 0.005 seconds (95% CPU, 741407 Lips)
247
% 3,216 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 921226 Lips)
248
% 3,218 inferences, 0.004 CPU in 0.004 seconds (94% CPU, 837803 Lips)
249
% 3,220 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 796635 Lips)
250
% 3,222 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 864966 Lips)
251
% 3,224 inferences, 0.003 CPU in 0.004 seconds (95% CPU, 950472 Lips)
252
% 3,226 inferences, 0.004 CPU in 0.004 seconds (95% CPU, 897857 Lips)
253
% 3,491 inferences, 0.004 CPU in 0.004 seconds (96% CPU, 904404 Lips)
254
% 3,493 inferences, 0.005 CPU in 0.005 seconds (95% CPU, 771934 Lips)
255
true.