1
  
2
  
3
  
4
  
5
  
6
  
7
  
8
  
9
  
10
  
11
  
12
  
13
  
14
  
15
  
16
  
17
  
18
  
19
  
20
  
21
  
22
  
23
  
24
  
25
  
26
  
27
  
28
  
29
  
30
  
31
  
32
  
33
  
34
  
35
  
36
  
37
  
38
  
39
  
40
  
41
  
42
  
43
  
44
  
45
  
46
  
47
  
48
  
49
  
50
  
51
  
52
  
53
  
54
  
55
  
56
  
57
  
58
  
59
  
60
  
61
  
62
  
63
  
64
  
65
  
66
  
67
  
68
  
69
  
70
  
71
  
72
  
73
  
74
  
75
  
76
  
77
  
78
  
79
  
80
  
81
  
82
  
83
  
84
  
85
  
86
  
87
  
88
  
89
  
90
  
91
  
92
  
93
  
94
  
95
  
96
  
97
  
98
  
99
  
100
  
101
  
102
  
103
  
104
  
105
  
106
  
107
  
108
  
109
  
110
  
111
  
112
  
113
  
114
  
115
  
116
  
117
  
118
  
119
  
120
  
121
  
122
  
123
  
124
  
125
  
126
  
127
  
128
  
129
  
130
  
131
  
132
  
133
  
134
  
135
  
136
  
137
  
138
  
139
  
140
  
141
  
142
  
143
  
144
  
145
  
146
  
147
  
148
  
149
  
150
  
151
  
152
  
153
  
154
  
155
  
156
  
157
  
158
  
159
  
160
  
161
  
162
  
163
  
164
  
165
  
166
  
167
  
168
  
169
  
170
  
171
  
172
  
173
  
174
  
175
  
176
  
177
  
178
  
179
  
180
  
181
  
182
  
183
  
184
  
185
  
186
  
187
  
188
  
189
  
190
  
191
  
192
  
193
  
194
  
195
  
196
  
197
  
 
#pike __REAL_VERSION__ 
constant description = "Runs some built in Pike benchmarks."; 
constant help = #" 
Benchmarks Pike with %d built in benchmark tests. 
Arguments: 
 
-h, --help 
  Shows this help text. 
 
-l, --list 
  Shows a list of available tests. 
 
-s<number>, --max-seconds=<number> 
  Runs a test at most <number> of seconds, rounded up to the closest 
  complete test. Defaults to 3. 
 
-t<glob>[,<glob>...], --tests=<glob>[,<glob>...] 
  Only run the specified tests. 
 
--json, -j 
  Output result as JSON instead of human readable text 
 
--compare=<file>, -c <file> 
  Read a result previously created by saving the output of --json and 
  print relative results 
"; 
 
 
string dot( string a, int width, bool al, bool odd ) 
{ 
  string pre="",post=""; 
  int wanted = (width-strlen(a)); 
  string pad = " ."*(wanted/2+1); 
  if( al ) 
    return pre+a+" "+pad[wanted&1..wanted-1-!(wanted&1)]+post; 
  return pre+pad[1..wanted-1]+" "+a+post; 
} 
 
string color( float pct ) 
{ 
  if( pct > 0 ) 
  { 
    if( pct > 20 ) 
      return "\e[31;1m"; 
    if( pct > 10 ) 
      return "\e[33;1m"; 
    return ""; 
  } 
  if( pct < -10 ) 
    return  "\e[32;1m" ; 
  return ""; 
} 
 
mapping(string:Tools.Shoot.Test) tests; 
bool json; 
mapping comparison; 
int seconds_per_test = 3; 
array(string) test_globs = ({"*"}); 
 
int main(int num, array(string) args) 
{ 
  tests = Tools.Shoot.tests(); 
  foreach(Getopt.find_all_options(args, ({ 
     ({ "help",    Getopt.NO_ARG,  "-h,--help"/"," }), 
     ({ "maxsec",  Getopt.HAS_ARG, "-s,--max-seconds"/"," }), 
     ({ "tests",   Getopt.HAS_ARG, "-t,--tests"/"," }), 
     ({ "json",    Getopt.NO_ARG,  "-j,--json"/"," }), 
     ({ "compare", Getopt.HAS_ARG, "-c,--compare"/"," }), 
     ({ "list",    Getopt.NO_ARG,  "-l,--list"/"," }), 
   })), array opt) 
   { 
    switch(opt[0]) 
    { 
      case "json": 
        json = true; 
        break; 
      case "compare": 
        /* Convenience: When using make benchmark there will be some 
           junk output before the actual json. 
        */ 
        string data = Stdio.read_file( opt[1] ); 
        if( !data ) 
          exit(1,"Failed to read comparison file %q\n", 
               combine_path( getcwd(), opt[1] )); 
        if( sscanf( data, "%*[^{]{%s", data ) ) 
          data = "{"+data; 
        comparison = Standards.JSON.decode( data ); 
        break; 
      case "help": 
        write(help, sizeof(tests)); 
        write("\nAvailable tests:\n%{    %s\n%}", sort(indices(tests))); 
        return 0; 
      case "maxsec": 
        seconds_per_test = (int)opt[1]; 
        break; 
      case "tests": 
        test_globs = opt[1] / ","; 
        break; 
      case "list": 
        write("Available tests:\n%{  %s\n%}", 
              sort(indices(tests)-({"Overhead"}))); 
        return 0; 
        break; 
    } 
  } 
 
  if( json ) 
    write("{\n"); 
  else if( !comparison ) 
    write("-"*59+"\n%-40s%19s\n"+"-"*59+"\n", 
          "Test","Result"); 
  else 
    write("-"*65+"\n%-40s%18s%7s\n"+"-"*65+"\n", 
          "Test","Result","Change"); 
 
  call_out(Thread.Thread, 0, run_tests); 
  return -1; 
} 
 
void run_tests() 
{ 
  mixed err = catch { 
   /* Run overhead check first. */ 
   float overhead_time; 
   array(string) to_run = glob(test_globs,sort(indices (tests)-({"Overhead"}))); 
   mapping res = Tools.Shoot.run( tests["Overhead"], 1, 0.0 ); 
   float total_pct; 
   int n_tests; 
   overhead_time = res->time / res->n; 
   bool odd; 
   bool isatty = Stdio.Terminfo.is_tty(); 
 
   foreach (to_run; int i; string id) 
   { 
     n_tests++; 
     res = Tools.Shoot.run( tests[id], seconds_per_test, overhead_time ); 
 
     if( json ) 
     { 
       if( comparison ) 
       { 
         if( int on = comparison[id]->n_over_time ) 
         { 
           res->delta  = res->n_over_time - on; 
           res->delta_pct = res->delta*100.0 / on; 
         } 
       } 
       write( "%s%-40s", (i?",\n  ":"  "),"\""+id+"\":" ); 
       write( Standards.JSON.encode( res ) ); 
     } 
     else if( comparison ) 
     { 
       int on = comparison[id]->n_over_time; 
       if( !on ) 
         write( dot(res->readable,19,false,odd)+"\n"); 
       else 
       { 
         int diff = res->n_over_time - on; 
         float pct = diff*100.0 / on; 
         total_pct += pct; 
         if( isatty ) 
           write( color( pct ) ); 
         write("%42s%s %5.1f%%\n", 
               dot(id,42,true,odd=!odd), 
               dot(res->readable,16,false,odd), 
               pct); 
         if( isatty ) write( "\e[0m" ); 
       } 
     } 
     else 
     { 
       write(dot(id,42,true,odd=!odd) + 
             dot(res->readable,17,false,odd)+"\n"); 
     } 
   } 
   if( json ) 
     write( "\n}\n"); 
   else if( comparison ) 
   { 
     write("-"*65+"\n"+ 
           " "*40+"%24.1f%%\n"+ 
           "-"*65+"\n", 
           total_pct / n_tests); 
   } 
   else 
     write("-"*59+"\n"); 
    }; 
  if( err ) 
  { 
    write("\n"+describe_backtrace(err)); 
    exit(1); 
  } 
  exit(0); 
}