pike.git / bin / htmlify_docs.lpc

version» Context lines:

pike.git/bin/htmlify_docs.lpc:1: + #!/usr/local/bin/ulpc    -  + // Parse BMML (Black Magic Markup Language) to HTML + // Written by Fredrik Hubinette, dark sourceror and inventor of BMML +  + mapping efuns = all_efuns(); + mapping pages = ([]); + mapping short_descs = ([]); + mapping keywords = ([]); + mapping subpages = ([]); +  + string new_path; +  + /* +  * Implode an array of strings to an english 'list' +  * ie. ({"foo","bar","gazonk"}) beomces "foo bar, gazonk" +  */ + string implode_nicely(string *foo) + { +  switch(sizeof(foo)) +  { +  case 0: return ""; +  case 1: return foo[0]; +  default: return foo[0..sizeof(foo)-2]*", "+" and "+foo[-1]; +  } + } +  + /* +  * Make a 'header' +  */ + string smallcaps(string foo) + { +  string *ret; +  ret=({"<b>"}); +  foreach(explode(foo," "),foo) +  { +  ret+=({"<font size=+1>"+foo[0..0]+"</font><font size=-1>"+foo[1..0x7fffffff]+"</font>"}); +  } +  return implode(ret," ")+"</b>"; + } +  + /* +  * convert original path to internal format +  */ + string fippel_path(string path) + { +  sscanf(path,"./%s",path); +  return replace(path,"/","_")+".html"; + } +  + string even_more_magic(string block, int indent) + { +  if(-1==search(block,"\t")) +  { +  return replace(block,"\n","<br>\n"); +  }else{ +  int e,d; +  mixed tmp,tmp2; +  +  tmp=explode(block,"\n")+({}); +  for(e=0;e<sizeof(tmp);e++) +  { +  tmp[e]=explode(tmp[e],"\t"); +  if(sscanf(tmp[e][0],"%*[ ]%s",tmp2) && tmp2=="") +  { +  int q; +  for(q=e-1;q>0;q--) +  { +  if(!tmp[q]) continue; +  +  if(sizeof(tmp[q])>=sizeof(tmp[e])) +  { +  for(d=1;d<sizeof(tmp[e]);d++) +  { +  tmp[q][d]+=" "+tmp[e][d]; +  } +  tmp[e]=0; +  } +  break; +  } +  } +  } +  tmp-=({0}); +  +  for(e=0;e<sizeof(tmp);e++) +  { +  tmp[e]=implode(tmp[e]," </td><td> "); +  } +  +  return "<table border=0 cellpadding=0 cellspacing=0>\n<tr valign=top><td>"+ +  implode(tmp,"<br></td></tr>\n<tr valign=top><td>")+ +  "<br></td></tr>\n</table>\n"; +  } + } +  + string more_magic(string s, int quote) + { +  int e; +  string *tmp; +  int *ilevel=({0}); +  string output=""; +  string accumulator=""; +  +  if(!quote && -1==search("\n"+s,"\n ") && -1==search(s,"\t")) +  { +  return s; +  } +  + #define FLUSH() output+=even_more_magic(accumulator,ilevel[-1]); accumulator="" + #define POP() output+="</dl>"; ilevel=ilevel[0..sizeof(ilevel)-2] +  +  tmp=explode(s,"\n"); +  for(e=0;e<sizeof(tmp);e++) +  { +  string spaces, rest; +  +  sscanf(tmp[e],"%[ ]%s",spaces,rest); +  if(strlen(spaces) > ilevel[-1]) +  { +  FLUSH(); +  output+="<dl><dt><dd>"; +  ilevel+=({ strlen(spaces) }); +  } +  else if(strlen(spaces) < ilevel[-1]) +  { +  FLUSH(); +  while(strlen(spaces) < ilevel[-1] && strlen(spaces) <= ilevel[-2]) +  { +  POP(); +  } +  } +  accumulator+=rest+"\n"; +  } +  +  FLUSH(); +  +  while(sizeof(ilevel)>1) +  { +  POP(); +  } +  +  return output; + } +  + string magic(string s, int quote) + { +  string *ret; +  ret=({}); +  +  foreach(explode(s,"\n\n"),s) +  { +  sscanf(s,"\t%s",s); +  s=replace(s,"\n\t","\n"); +  ret += ({ more_magic(s, quote) }); +  } +  +  return implode(ret,"\n<p>"); + } +  + inherit "/precompiled/regexp" : lastident; + inherit "/precompiled/regexp" : megamagic; +  + string syntax_magic(string s) + { +  string *tmp; +  int e; +  +  while(tmp=megamagic::split(s)) +  { +  s=tmp[0]+"<I>"+tmp[1]+"</I>"+tmp[2]; +  } +  +  tmp=explode(s,"\n"); +  for(e=0;e<sizeof(tmp);e++) +  { +  string a,b; +  if(sscanf(tmp[e],"%s(%s",a,b) && strlen(b)>1 && b[-1]==';' && b[-2]==')') +  { +  string *tmp2; +  int d; +  tmp2=explode(b[0..strlen(b)-3],","); +  for(d=0;d<sizeof(tmp2);d++) +  { +  string *tmp3; + // perror("<"+tmp2[d]+">"); +  if(tmp3=lastident::split(tmp2[d])) +  { +  tmp2[d]=tmp3[0]+"<I>"+tmp3[1]+"</I>"+tmp3[2]; +  } +  } +  tmp[e]=a+"("+implode(tmp2,",")+");"; +  } +  } +  s=implode(tmp,"\n"); +  return "<tt>"+magic(s,1)+"</tt>"; + } +  + string html_quote(string s) + { +  return replace(s,({"&","<",">"}),({"&amp;","&lt;","&gt;"})); + } +  + string html_unquote(string s) + { +  return replace(s,({"&amp;","&lt;","&gt;"}),({"&","<",">"})); + } +  + string convert_page(string path, string fname) + { +  string output, short; +  int headno; +  string cont, section, name, part; +  +  output=""; +  +  cont=read_bytes(path); +  +  cont=html_quote(cont); +  +  if(sscanf(cont,"NAME\n\t%s - %s\n",name,short)) +  { +  int partno; +  +  short_descs[name]=short; +  +  string *parts=explode(cont,"============================================================================\n"); +  for(partno=0;partno<sizeof(parts);partno++) +  { +  string part_name="error"; +  string *sections; +  string part; +  int section; +  +  part=parts[partno]; +  if(!strlen(part)) continue; +  +  sections=explode(part,"\n\n"); +  +  for(section=0;section<sizeof(sections);section++) +  { +  if(!strlen(sections[section]) || +  sections[section][0] < 'A' || +  sections[section][0] > 'Z') +  { +  sections[section-1]+="\n\n"+sections[section]; +  sections=sections[0..section-1]+sections[section+1..0x7fffffff]; +  section--; +  } +  } +  +  for(headno=0;headno<sizeof(sections);headno++) +  { +  string type, rest; +  mixed a, b; +  sscanf(sections[headno],"%s\n%s",type,rest); +  +  switch(type) +  { +  case "NAME": +  if(sscanf(rest,"\t%s - %s",part_name,b)!=2) +  perror("Warning NAME section broken!\n"); +  rest="\t<tt>"+part_name+"</tt> - "+b; +  +  case "DESCRIPTION": +  case "NOTA BENE": +  case "BUGS": +  rest=magic(rest,0); +  break; +  +  default: +  perror("Warning: Unknown header: "+type+".\n"); +  rest=magic(rest,0); +  break; +  +  case "KEYWORDS": +  a=replace(rest,({"\n"," ","\t"}),({"","",""}))/","; +  b=({}); +  foreach(a,a) +  { +  // fixme!! +  keywords[a] = ( keywords[a] || ({}) ) + ({ name }); +  b+=({ "<a href=index.html#"+a+">"+a+"</a>" }); +  } +  rest=implode_nicely(b); +  break; +  +  case "SEE ALSO": +  rest=replace(rest,({"\n"," ","\t"}),({"","",""})); +  a=rest/","; +  b=({}); +  foreach(a,a) +  { +  string tmp; +  tmp=a; +  a=explode(a,"/")[-1]; +  if(pages[a]) +  { +  b+=({ "<a href="+pages[a]+">" + a + "</a>" }); +  }else if(subpages[a]){ +  b+=({ "<a href="+subpages[a]+">" + a + "</a>" }); +  }else if(subpages[fname+"-&gt;"+a]){ +  b+=({ "<a href="+subpages[name+"-&gt;"+a]+">" + a + "</a>" }); +  }else{ +  perror("Warning, unlinked SEE ALSO: "+a+"\n"); +  b+=({ tmp }); +  } +  } +  rest=implode_nicely(b); +  break; +  +  case "SYNTAX": +  case "SYNTAX EXAMPLE": +  rest=syntax_magic(rest); +  break; +  +  case "EXAMPLES": +  case "EXAMPLE": +  case "DIRECTIVE": +  case "PREPROCESSOR DIRECTIVES": +  rest="<tt>"+magic(rest,1)+"</tt>"; +  } +  +  sections[headno]="<dt>"+ +  smallcaps(type)+ +  "<dd>\n"+rest+"\n<p>"; +  } +  parts[partno]="<dl>\n"+implode(sections,"\n")+"\n</dl>\n"; +  if(part_name) +  { +  parts[partno]="<a name="+part_name+">\n"+ +  parts[partno]+ +  "\n</a>\n"; +  } +  } +  output="<html><title>uLPC: "+name+"</title>"+ +  implode(parts,"<hr noshade size=1>\n"); +  } +  return output; + } +  + void scanfiles(string path, string fname) + { +  string nf,np; +  nf=convert_page(path, fname); +  +  if(nf && strlen(nf)) +  { +  np=combine_path(new_path,fippel_path(path)); +  write("Writing "+np+".\n"); +  if(file_size(np)>=0) +  rm (np); +  write_file(np,nf); +  } + } +  + void scanlinks(string path, string fname) + { +  string cont,name; +  cont=read_bytes(path); +  cont=html_quote(cont); +  +  if(sscanf(cont,"NAME\n\t%s -",name)) +  { +  path=fippel_path(path); +  pages[name]=path; +  +  int e; +  string *parts=explode(cont,"============================================================================\n"); +  for(e=1;e<sizeof(parts);e++) +  { +  string part_name; +  if(sscanf(parts[e],"NAME\n\t%s -",part_name)) +  { +  subpages[fname+"-&gt;"+part_name]=path+"#"+part_name; +  } +  } +  }else{ +  perror("Warning: not converting "+path+".\n"); +  } + } +  + void traversedir(string path,function fun) + { +  string file; +  foreach(get_dir(path) - ({"CVS","RCS"}),file) +  { +  string tmp; +  if(file[-1]=='~') continue; +  if(file[0]=='#' && file[-1]=='#') continue; +  if(file[0]=='.' && file[1]=='#') continue; +  +  tmp=path+"/"+file; +  +  if(file_size(tmp)==-2) +  { +  traversedir(tmp,fun); +  }else{ +  fun(tmp,file); +  } +  } + } +  + string mkindex() + { +  string ret; +  string a,b; +  mapping tmp=pages+([]); +  +  ret=""; +  +  ret+="<H1>"+version()+"</h1>\n"; +  +  ret+="<H1>Keyword lists</H1>\n<dl>\n"; +  foreach(sort_array(m_indices(keywords)),b) +  { +  ret+="<a name="+b+">"; +  ret+="<dt><h2>"+capitalize(b); +  if(short_descs[b]) ret+=" - "+short_descs[b]; +  ret+="</h2><dd>\n"; +  ret+="<ul>\n"; +  foreach(keywords[b],a) +  { +  ret+="<li><a href="+pages[a]+">"+a+"</a> - "+short_descs[a]+"\n"; +  m_delete(tmp,a); +  } +  ret+="</ul></a>\n"; +  } +  ret+="</dl>\n"; +  +  ret+="<H1>All builtin functions:</H1>\n<ul>\n"; +  +  foreach(sort_array(m_indices(all_efuns())),a) +  { +  a=html_quote(a); +  if(pages[a]) +  { +  ret+="<li><a href="+pages[a]+">"+a+"</a> - "+short_descs[a]+"\n"; +  }else{ +  perror("Warning: no page for function: "+a+".\n"); +  } +  m_delete(tmp,a); +  } +  ret+="</ul>\n"; +  +  ret+="</dl><H1>Builtin programs:</H1>\n<ul>\n"; +  foreach(sort_array(m_indices(tmp)),a) +  { +  if(-1 == search(a,"/")) continue; +  +  ret+="<li><a href="+pages[a]+">"+a+"</a> - "+short_descs[a]+"\n"; +  m_delete(tmp,a); +  } +  +  ret+="</ul>\n"; +  +  ret+="<H1>Other pages</H1>\n<ul>\n"; +  foreach(sort_array(m_indices(tmp)),a) +  { +  ret+="<li><a href="+pages[a]+">"+a+"</a> - "+short_descs[a]+"\n"; +  } +  +  ret+="</ul>\n"; +  return ret; + } +  + int main(int argc, string *argv) + { +  string np; +  +  megamagic::create("^(.*)&lt;([a-z_0-9][a-z_0-9]*)&gt;(.*)$"); +  lastident::create("^(.*[^<>a-z_0-9])([a-z_0-9][a-z_0-9]*)([^<>a-z_0-9]*)$"); +  +  write("Scanning links.\n"); +  new_path=combine_path(getcwd(),argv[2]); +  cd(argv[1]); +  traversedir(".",scanlinks); +  write("Processing pages.\n"); +  traversedir(".",scanfiles); +  +  write("Making index.\n"); +  np=combine_path(new_path,"index.html"); +  if(file_size(np)>=0) +  rm(np); +  write_file(np,mkindex()); + }   Newline at end of file added.