wikiheaders.pl 66 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802
  1. #!/usr/bin/perl -w
  2. use warnings;
  3. use strict;
  4. use File::Path;
  5. use Text::Wrap;
  6. $Text::Wrap::huge = 'overflow';
  7. my $projectfullname = 'Simple Directmedia Layer';
  8. my $projectshortname = 'SDL';
  9. my $wikisubdir = '';
  10. my $incsubdir = 'include';
  11. my $readmesubdir = undef;
  12. my $apiprefixregex = undef;
  13. my $versionfname = 'include/SDL_version.h';
  14. my $versionmajorregex = '\A\#define\s+SDL_MAJOR_VERSION\s+(\d+)\Z';
  15. my $versionminorregex = '\A\#define\s+SDL_MINOR_VERSION\s+(\d+)\Z';
  16. my $versionpatchregex = '\A\#define\s+SDL_PATCHLEVEL\s+(\d+)\Z';
  17. my $mainincludefname = 'SDL.h';
  18. my $selectheaderregex = '\ASDL.*?\.h\Z';
  19. my $projecturl = 'https://libsdl.org/';
  20. my $wikiurl = 'https://wiki.libsdl.org';
  21. my $bugreporturl = 'https://github.com/libsdl-org/sdlwiki/issues/new';
  22. my $srcpath = undef;
  23. my $wikipath = undef;
  24. my $wikireadmesubdir = 'README';
  25. my $warn_about_missing = 0;
  26. my $copy_direction = 0;
  27. my $optionsfname = undef;
  28. my $wikipreamble = undef;
  29. my $wikiheaderfiletext = 'Defined in %fname%';
  30. my $manpageheaderfiletext = 'Defined in %fname%';
  31. my $changeformat = undef;
  32. my $manpath = undef;
  33. my $gitrev = undef;
  34. foreach (@ARGV) {
  35. $warn_about_missing = 1, next if $_ eq '--warn-about-missing';
  36. $copy_direction = 1, next if $_ eq '--copy-to-headers';
  37. $copy_direction = 1, next if $_ eq '--copy-to-header';
  38. $copy_direction = -1, next if $_ eq '--copy-to-wiki';
  39. $copy_direction = -2, next if $_ eq '--copy-to-manpages';
  40. if (/\A--options=(.*)\Z/) {
  41. $optionsfname = $1;
  42. next;
  43. } elsif (/\A--changeformat=(.*)\Z/) {
  44. $changeformat = $1;
  45. next;
  46. } elsif (/\A--manpath=(.*)\Z/) {
  47. $manpath = $1;
  48. next;
  49. } elsif (/\A--rev=(.*)\Z/) {
  50. $gitrev = $1;
  51. next;
  52. }
  53. $srcpath = $_, next if not defined $srcpath;
  54. $wikipath = $_, next if not defined $wikipath;
  55. }
  56. my $default_optionsfname = '.wikiheaders-options';
  57. $default_optionsfname = "$srcpath/$default_optionsfname" if defined $srcpath;
  58. if ((not defined $optionsfname) && (-f $default_optionsfname)) {
  59. $optionsfname = $default_optionsfname;
  60. }
  61. if (defined $optionsfname) {
  62. open OPTIONS, '<', $optionsfname or die("Failed to open options file '$optionsfname': $!\n");
  63. while (<OPTIONS>) {
  64. chomp;
  65. if (/\A(.*?)\=(.*)\Z/) {
  66. my $key = $1;
  67. my $val = $2;
  68. $key =~ s/\A\s+//;
  69. $key =~ s/\s+\Z//;
  70. $val =~ s/\A\s+//;
  71. $val =~ s/\s+\Z//;
  72. $warn_about_missing = int($val), next if $key eq 'warn_about_missing';
  73. $srcpath = $val, next if $key eq 'srcpath';
  74. $wikipath = $val, next if $key eq 'wikipath';
  75. $apiprefixregex = $val, next if $key eq 'apiprefixregex';
  76. $projectfullname = $val, next if $key eq 'projectfullname';
  77. $projectshortname = $val, next if $key eq 'projectshortname';
  78. $wikisubdir = $val, next if $key eq 'wikisubdir';
  79. $incsubdir = $val, next if $key eq 'incsubdir';
  80. $readmesubdir = $val, next if $key eq 'readmesubdir';
  81. $versionmajorregex = $val, next if $key eq 'versionmajorregex';
  82. $versionminorregex = $val, next if $key eq 'versionminorregex';
  83. $versionpatchregex = $val, next if $key eq 'versionpatchregex';
  84. $versionfname = $val, next if $key eq 'versionfname';
  85. $mainincludefname = $val, next if $key eq 'mainincludefname';
  86. $selectheaderregex = $val, next if $key eq 'selectheaderregex';
  87. $projecturl = $val, next if $key eq 'projecturl';
  88. $wikiurl = $val, next if $key eq 'wikiurl';
  89. $bugreporturl = $val, next if $key eq 'bugreporturl';
  90. $wikipreamble = $val, next if $key eq 'wikipreamble';
  91. $wikiheaderfiletext = $val, next if $key eq 'wikiheaderfiletext';
  92. $manpageheaderfiletext = $val, next if $key eq 'manpageheaderfiletext';
  93. }
  94. }
  95. close(OPTIONS);
  96. }
  97. my $wordwrap_mode = 'mediawiki';
  98. sub wordwrap_atom { # don't call this directly.
  99. my $str = shift;
  100. my $retval = '';
  101. # wordwrap but leave links intact, even if they overflow.
  102. if ($wordwrap_mode eq 'mediawiki') {
  103. while ($str =~ s/(.*?)\s*(\[https?\:\/\/.*?\s+.*?\])\s*//ms) {
  104. $retval .= fill('', '', $1); # wrap it.
  105. $retval .= "\n$2\n"; # don't wrap it.
  106. }
  107. } elsif ($wordwrap_mode eq 'md') {
  108. while ($str =~ s/(.*?)\s*(\[.*?\]\(https?\:\/\/.*?\))\s*//ms) {
  109. $retval .= fill('', '', $1); # wrap it.
  110. $retval .= "\n$2\n"; # don't wrap it.
  111. }
  112. }
  113. return $retval . fill('', '', $str);
  114. }
  115. sub wordwrap_with_bullet_indent { # don't call this directly.
  116. my $bullet = shift;
  117. my $str = shift;
  118. my $retval = '';
  119. #print("WORDWRAP BULLET ('$bullet'):\n\n$str\n\n");
  120. # You _can't_ (at least with Pandoc) have a bullet item with a newline in
  121. # MediaWiki, so _remove_ wrapping!
  122. if ($wordwrap_mode eq 'mediawiki') {
  123. $retval = "$bullet$str";
  124. $retval =~ s/\n/ /gms;
  125. $retval =~ s/\s+$//gms;
  126. #print("WORDWRAP BULLET DONE:\n\n$retval\n\n");
  127. return "$retval\n";
  128. }
  129. my $bulletlen = length($bullet);
  130. # wrap it and then indent each line to be under the bullet.
  131. $Text::Wrap::columns -= $bulletlen;
  132. my @wrappedlines = split /\n/, wordwrap_atom($str);
  133. $Text::Wrap::columns += $bulletlen;
  134. my $prefix = $bullet;
  135. my $usual_prefix = ' ' x $bulletlen;
  136. foreach (@wrappedlines) {
  137. s/\s*\Z//;
  138. $retval .= "$prefix$_\n";
  139. $prefix = $usual_prefix;
  140. }
  141. return $retval;
  142. }
  143. sub wordwrap_one_paragraph { # don't call this directly.
  144. my $retval = '';
  145. my $p = shift;
  146. #print "\n\n\nPARAGRAPH: [$p]\n\n\n";
  147. if ($p =~ s/\A([\*\-] )//) { # bullet list, starts with "* " or "- ".
  148. my $bullet = $1;
  149. my $item = '';
  150. my @items = split /\n/, $p;
  151. foreach (@items) {
  152. if (s/\A([\*\-] )//) {
  153. $retval .= wordwrap_with_bullet_indent($bullet, $item);
  154. $item = '';
  155. }
  156. s/\A\s*//;
  157. $item .= "$_\n"; # accumulate lines until we hit the end or another bullet.
  158. }
  159. if ($item ne '') {
  160. $retval .= wordwrap_with_bullet_indent($bullet, $item);
  161. }
  162. } else {
  163. $retval = wordwrap_atom($p) . "\n";
  164. }
  165. return $retval;
  166. }
  167. sub wordwrap_paragraphs { # don't call this directly.
  168. my $str = shift;
  169. my $retval = '';
  170. my @paragraphs = split /\n\n/, $str;
  171. foreach (@paragraphs) {
  172. next if $_ eq '';
  173. $retval .= wordwrap_one_paragraph($_);
  174. $retval .= "\n";
  175. }
  176. return $retval;
  177. }
  178. my $wordwrap_default_columns = 76;
  179. sub wordwrap {
  180. my $str = shift;
  181. my $columns = shift;
  182. $columns = $wordwrap_default_columns if not defined $columns;
  183. $columns += $wordwrap_default_columns if $columns < 0;
  184. $Text::Wrap::columns = $columns;
  185. my $retval = '';
  186. #print("\n\nWORDWRAP:\n\n$str\n\n\n");
  187. $str =~ s/\A\n+//ms;
  188. while ($str =~ s/(.*?)(\`\`\`.*?\`\`\`|\<syntaxhighlight.*?\<\/syntaxhighlight\>)//ms) {
  189. #print("\n\nWORDWRAP BLOCK:\n\n$1\n\n ===\n\n$2\n\n\n");
  190. $retval .= wordwrap_paragraphs($1); # wrap it.
  191. $retval .= "$2\n\n"; # don't wrap it.
  192. }
  193. $retval .= wordwrap_paragraphs($str); # wrap what's left.
  194. $retval =~ s/\n+\Z//ms;
  195. #print("\n\nWORDWRAP DONE:\n\n$retval\n\n\n");
  196. return $retval;
  197. }
  198. # This assumes you're moving from Markdown (in the Doxygen data) to Wiki, which
  199. # is why the 'md' section is so sparse.
  200. sub wikify_chunk {
  201. my $wikitype = shift;
  202. my $str = shift;
  203. my $codelang = shift;
  204. my $code = shift;
  205. #print("\n\nWIKIFY CHUNK:\n\n$str\n\n\n");
  206. if ($wikitype eq 'mediawiki') {
  207. # convert `code` things first, so they aren't mistaken for other markdown items.
  208. my $codedstr = '';
  209. while ($str =~ s/\A(.*?)\`(.*?)\`//ms) {
  210. my $codeblock = $2;
  211. $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
  212. if (defined $apiprefixregex) {
  213. # Convert obvious API things to wikilinks, even inside `code` blocks.
  214. $codeblock =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
  215. }
  216. $codedstr .= "<code>$codeblock</code>";
  217. }
  218. # Convert obvious API things to wikilinks.
  219. if (defined $apiprefixregex) {
  220. $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[[$1]]/gms;
  221. }
  222. # Make some Markdown things into MediaWiki...
  223. # links
  224. $str =~ s/\[(.*?)\]\((https?\:\/\/.*?)\)/\[$2 $1\]/g;
  225. # bold+italic
  226. $str =~ s/\*\*\*(.*?)\*\*\*/'''''$1'''''/gms;
  227. # bold
  228. $str =~ s/\*\*(.*?)\*\*/'''$1'''/gms;
  229. # italic
  230. $str =~ s/\*(.*?)\*/''$1''/gms;
  231. # bullets
  232. $str =~ s/^\- /* /gm;
  233. $str = $codedstr . $str;
  234. if (defined $code) {
  235. $str .= "<syntaxhighlight lang='$codelang'>$code<\/syntaxhighlight>";
  236. }
  237. } elsif ($wikitype eq 'md') {
  238. # convert `code` things first, so they aren't mistaken for other markdown items.
  239. my $codedstr = '';
  240. while ($str =~ s/\A(.*?)(\`.*?\`)//ms) {
  241. my $codeblock = $2;
  242. $codedstr .= wikify_chunk($wikitype, $1, undef, undef);
  243. if (defined $apiprefixregex) {
  244. # Convert obvious API things to wikilinks, even inside `code` blocks,
  245. # BUT ONLY IF the entire code block is the API thing,
  246. # So something like "just call `SDL_Whatever`" will become
  247. # "just call [`SDL_Whatever`](SDL_Whatever)", but
  248. # "just call `SDL_Whatever(7)`" will not. It's just the safest
  249. # way to do this without resorting to wrapping things in html <code> tags.
  250. $codeblock =~ s/\A\`($apiprefixregex[a-zA-Z0-9_]+)\`\Z/[`$1`]($1)/gms;
  251. }
  252. $codedstr .= $codeblock;
  253. }
  254. # Convert obvious API things to wikilinks.
  255. if (defined $apiprefixregex) {
  256. $str =~ s/\b($apiprefixregex[a-zA-Z0-9_]+)/[$1]($1)/gms;
  257. }
  258. $str = $codedstr . $str;
  259. if (defined $code) {
  260. $str .= "```$codelang$code```";
  261. }
  262. }
  263. #print("\n\nWIKIFY CHUNK DONE:\n\n$str\n\n\n");
  264. return $str;
  265. }
  266. sub wikify {
  267. my $wikitype = shift;
  268. my $str = shift;
  269. my $retval = '';
  270. #print("WIKIFY WHOLE:\n\n$str\n\n\n");
  271. while ($str =~ s/\A(.*?)\`\`\`(c\+\+|c)(.*?)\`\`\`//ms) {
  272. $retval .= wikify_chunk($wikitype, $1, $2, $3);
  273. }
  274. $retval .= wikify_chunk($wikitype, $str, undef, undef);
  275. #print("WIKIFY WHOLE DONE:\n\n$retval\n\n\n");
  276. return $retval;
  277. }
  278. my $dewikify_mode = 'md';
  279. my $dewikify_manpage_code_indent = 1;
  280. sub dewikify_chunk {
  281. my $wikitype = shift;
  282. my $str = shift;
  283. my $codelang = shift;
  284. my $code = shift;
  285. #print("\n\nDEWIKIFY CHUNK:\n\n$str\n\n\n");
  286. if ($dewikify_mode eq 'md') {
  287. if ($wikitype eq 'mediawiki') {
  288. # Doxygen supports Markdown (and it just simply looks better than MediaWiki
  289. # when looking at the raw headers), so do some conversions here as necessary.
  290. # Dump obvious wikilinks.
  291. if (defined $apiprefixregex) {
  292. $str =~ s/\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]/$1/gms;
  293. }
  294. # links
  295. $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\[$2\]\($1\)/g;
  296. # <code></code> is also popular. :/
  297. $str =~ s/\<code>(.*?)<\/code>/`$1`/gms;
  298. # bold+italic
  299. $str =~ s/'''''(.*?)'''''/***$1***/gms;
  300. # bold
  301. $str =~ s/'''(.*?)'''/**$1**/gms;
  302. # italic
  303. $str =~ s/''(.*?)''/*$1*/gms;
  304. # bullets
  305. $str =~ s/^\* /- /gm;
  306. } elsif ($wikitype eq 'md') {
  307. # Dump obvious wikilinks. The rest can just passthrough.
  308. if (defined $apiprefixregex) {
  309. $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/$1/gms;
  310. }
  311. }
  312. if (defined $code) {
  313. $str .= "```$codelang$code```";
  314. }
  315. } elsif ($dewikify_mode eq 'manpage') {
  316. $str =~ s/\./\\[char46]/gms; # make sure these can't become control codes.
  317. if ($wikitype eq 'mediawiki') {
  318. # Dump obvious wikilinks.
  319. if (defined $apiprefixregex) {
  320. $str =~ s/\s*\[\[($apiprefixregex[a-zA-Z0-9_]+)\]\]\s*/\n.BR $1\n/gms;
  321. }
  322. # links
  323. $str =~ s/\[(https?\:\/\/.*?)\s+(.*?)\]/\n.URL "$1" "$2"\n/g;
  324. # <code></code> is also popular. :/
  325. $str =~ s/\s*\<code>(.*?)<\/code>\s*/\n.BR $1\n/gms;
  326. # bold+italic (this looks bad, just make it bold).
  327. $str =~ s/\s*'''''(.*?)'''''\s*/\n.B $1\n/gms;
  328. # bold
  329. $str =~ s/\s*'''(.*?)'''\s*/\n.B $1\n/gms;
  330. # italic
  331. $str =~ s/\s*''(.*?)''\s*/\n.I $1\n/gms;
  332. # bullets
  333. $str =~ s/^\* /\n\\\(bu /gm;
  334. } elsif ($wikitype eq 'md') {
  335. # Dump obvious wikilinks.
  336. if (defined $apiprefixregex) {
  337. $str =~ s/\[(\`?$apiprefixregex[a-zA-Z0-9_]+\`?)\]\($apiprefixregex[a-zA-Z0-9_]+\)/\n.BR $1\n/gms;
  338. }
  339. # links
  340. $str =~ s/\[(.*?)]\((https?\:\/\/.*?)\)/\n.URL "$2" "$1"\n/g;
  341. # <code></code> is also popular. :/
  342. $str =~ s/\s*\`(.*?)\`\s*/\n.BR $1\n/gms;
  343. # bold+italic (this looks bad, just make it bold).
  344. $str =~ s/\s*\*\*\*(.*?)\*\*\*\s*/\n.B $1\n/gms;
  345. # bold
  346. $str =~ s/\s*\*\*(.*?)\*\*\s*/\n.B $1\n/gms;
  347. # italic
  348. $str =~ s/\s*\*(.*?)\*\s*/\n.I $1\n/gms;
  349. # bullets
  350. $str =~ s/^\- /\n\\\(bu /gm;
  351. } else {
  352. die("Unexpected wikitype when converting to manpages\n"); # !!! FIXME: need to handle Markdown wiki pages.
  353. }
  354. if (defined $code) {
  355. $code =~ s/\A\n+//gms;
  356. $code =~ s/\n+\Z//gms;
  357. if ($dewikify_manpage_code_indent) {
  358. $str .= "\n.IP\n"
  359. } else {
  360. $str .= "\n.PP\n"
  361. }
  362. $str .= ".EX\n$code\n.EE\n.PP\n";
  363. }
  364. } else {
  365. die("Unexpected dewikify_mode\n");
  366. }
  367. #print("\n\nDEWIKIFY CHUNK DONE:\n\n$str\n\n\n");
  368. return $str;
  369. }
  370. sub dewikify {
  371. my $wikitype = shift;
  372. my $str = shift;
  373. return '' if not defined $str;
  374. #print("DEWIKIFY WHOLE:\n\n$str\n\n\n");
  375. $str =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms;
  376. $str =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms;
  377. my $retval = '';
  378. while ($str =~ s/\A(.*?)<syntaxhighlight lang='?(.*?)'?>(.*?)<\/syntaxhighlight\>//ms) {
  379. $retval .= dewikify_chunk($wikitype, $1, $2, $3);
  380. }
  381. $retval .= dewikify_chunk($wikitype, $str, undef, undef);
  382. #print("DEWIKIFY WHOLE DONE:\n\n$retval\n\n\n");
  383. return $retval;
  384. }
  385. sub filecopy {
  386. my $src = shift;
  387. my $dst = shift;
  388. my $endline = shift;
  389. $endline = "\n" if not defined $endline;
  390. open(COPYIN, '<', $src) or die("Failed to open '$src' for reading: $!\n");
  391. open(COPYOUT, '>', $dst) or die("Failed to open '$dst' for writing: $!\n");
  392. while (<COPYIN>) {
  393. chomp;
  394. s/[ \t\r\n]*\Z//;
  395. print COPYOUT "$_$endline";
  396. }
  397. close(COPYOUT);
  398. close(COPYIN);
  399. }
  400. sub usage {
  401. die("USAGE: $0 <source code git clone path> <wiki git clone path> [--copy-to-headers|--copy-to-wiki|--copy-to-manpages] [--warn-about-missing] [--manpath=<man path>]\n\n");
  402. }
  403. usage() if not defined $srcpath;
  404. usage() if not defined $wikipath;
  405. #usage() if $copy_direction == 0;
  406. if (not defined $manpath) {
  407. $manpath = "$srcpath/man";
  408. }
  409. my @standard_wiki_sections = (
  410. 'Draft',
  411. '[Brief]',
  412. 'Deprecated',
  413. 'Header File',
  414. 'Syntax',
  415. 'Function Parameters',
  416. 'Return Value',
  417. 'Remarks',
  418. 'Thread Safety',
  419. 'Version',
  420. 'Code Examples',
  421. 'See Also'
  422. );
  423. # Sections that only ever exist in the wiki and shouldn't be deleted when
  424. # not found in the headers.
  425. my %only_wiki_sections = ( # The ones don't mean anything, I just need to check for key existence.
  426. 'Draft', 1,
  427. 'Code Examples', 1,
  428. 'Header File', 1
  429. );
  430. my %headers = (); # $headers{"SDL_audio.h"} -> reference to an array of all lines of text in SDL_audio.h.
  431. my %headersyms = (); # $headersyms{"SDL_OpenAudio"} -> string of header documentation for SDL_OpenAudio, with comment '*' bits stripped from the start. Newlines embedded!
  432. my %headerdecls = ();
  433. my %headersymslocation = (); # $headersymslocation{"SDL_OpenAudio"} -> name of header holding SDL_OpenAudio define ("SDL_audio.h" in this case).
  434. my %headersymschunk = (); # $headersymschunk{"SDL_OpenAudio"} -> offset in array in %headers that should be replaced for this symbol.
  435. my %headersymshasdoxygen = (); # $headersymshasdoxygen{"SDL_OpenAudio"} -> 1 if there was no existing doxygen for this function.
  436. my %headersymstype = (); # $headersymstype{"SDL_OpenAudio"} -> 1 (function), 2 (macro), 3 (struct), 4 (enum)
  437. my %wikitypes = (); # contains string of wiki page extension, like $wikitypes{"SDL_OpenAudio"} == 'mediawiki'
  438. my %wikisyms = (); # contains references to hash of strings, each string being the full contents of a section of a wiki page, like $wikisyms{"SDL_OpenAudio"}{"Remarks"}.
  439. my %wikisectionorder = (); # contains references to array, each array item being a key to a wikipage section in the correct order, like $wikisectionorder{"SDL_OpenAudio"}[2] == 'Remarks'
  440. sub print_undocumented_section {
  441. my $fh = shift;
  442. my $typestr = shift;
  443. my $typeval = shift;
  444. print $fh "## $typestr defined in the headers, but not in the wiki\n\n";
  445. my $header_only_sym = 0;
  446. foreach (sort keys %headersyms) {
  447. my $sym = $_;
  448. if ((not defined $wikisyms{$sym}) && ($headersymstype{$sym} == $typeval)) {
  449. print $fh "- [$sym]($sym)\n";
  450. $header_only_sym = 1;
  451. }
  452. }
  453. if (!$header_only_sym) {
  454. print $fh "(none)\n";
  455. }
  456. print $fh "\n";
  457. if (0) { # !!! FIXME: this lists things that _shouldn't_ be in the headers, like MigrationGuide, etc, but also we don't know if they're functions, macros, etc at this point (can we parse that from the wiki page, though?)
  458. print $fh "## $typestr defined in the wiki, but not in the headers\n\n";
  459. my $wiki_only_sym = 0;
  460. foreach (sort keys %wikisyms) {
  461. my $sym = $_;
  462. if ((not defined $headersyms{$sym}) && ($headersymstype{$sym} == $typeval)) {
  463. print $fh "- [$sym]($sym)\n";
  464. $wiki_only_sym = 1;
  465. }
  466. }
  467. if (!$wiki_only_sym) {
  468. print $fh "(none)\n";
  469. }
  470. print $fh "\n";
  471. }
  472. }
  473. my $incpath = "$srcpath";
  474. $incpath .= "/$incsubdir" if $incsubdir ne '';
  475. my $wikireadmepath = "$wikipath/$wikireadmesubdir";
  476. my $readmepath = undef;
  477. if (defined $readmesubdir) {
  478. $readmepath = "$srcpath/$readmesubdir";
  479. }
  480. opendir(DH, $incpath) or die("Can't opendir '$incpath': $!\n");
  481. while (my $d = readdir(DH)) {
  482. my $dent = $d;
  483. next if not $dent =~ /$selectheaderregex/; # just selected headers.
  484. open(FH, '<', "$incpath/$dent") or die("Can't open '$incpath/$dent': $!\n");
  485. my @contents = ();
  486. while (<FH>) {
  487. chomp;
  488. my $symtype = 0; # nothing, yet.
  489. my $decl;
  490. my @templines;
  491. my $str;
  492. my $has_doxygen = 1;
  493. # Since a lot of macros are just preprocessor logic spam and not all macros are worth documenting anyhow, we only pay attention to them when they have a Doxygen comment attached.
  494. # Functions and other things are a different story, though!
  495. if (/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) { # a function declaration without a doxygen comment?
  496. $symtype = 1; # function declaration
  497. @templines = ();
  498. $decl = $_;
  499. $str = '';
  500. $has_doxygen = 0;
  501. } elsif (not /\A\/\*\*\s*\Z/) { # not doxygen comment start?
  502. push @contents, $_;
  503. next;
  504. } else { # Start of a doxygen comment, parse it out.
  505. @templines = ( $_ );
  506. while (<FH>) {
  507. chomp;
  508. push @templines, $_;
  509. last if /\A\s*\*\/\Z/;
  510. if (s/\A\s*\*\s*\`\`\`/```/) { # this is a hack, but a lot of other code relies on the whitespace being trimmed, but we can't trim it in code blocks...
  511. $str .= "$_\n";
  512. while (<FH>) {
  513. chomp;
  514. push @templines, $_;
  515. s/\A\s*\*\s?//;
  516. if (s/\A\s*\`\`\`/```/) {
  517. $str .= "$_\n";
  518. last;
  519. } else {
  520. $str .= "$_\n";
  521. }
  522. }
  523. } else {
  524. s/\A\s*\*\s*//;
  525. $str .= "$_\n";
  526. }
  527. }
  528. $decl = <FH>;
  529. $decl = '' if not defined $decl;
  530. chomp($decl);
  531. if ($decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC/) {
  532. $symtype = 1; # function declaration
  533. } elsif ($decl =~ /\A\s*\#\s*define\s+/) {
  534. $symtype = 2; # macro
  535. } else {
  536. #print "Found doxygen but no function sig:\n$str\n\n";
  537. foreach (@templines) {
  538. push @contents, $_;
  539. }
  540. push @contents, $decl;
  541. next;
  542. }
  543. }
  544. my @decllines = ( $decl );
  545. my $sym = '';
  546. if ($symtype == 1) { # a function
  547. if (not $decl =~ /\)\s*;/) {
  548. while (<FH>) {
  549. chomp;
  550. push @decllines, $_;
  551. s/\A\s+//;
  552. s/\s+\Z//;
  553. $decl .= " $_";
  554. last if /\)\s*;/;
  555. }
  556. }
  557. $decl =~ s/\s+\);\Z/);/;
  558. $decl =~ s/\s+\Z//;
  559. if ($decl =~ /\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(const\s+|)(unsigned\s+|)(.*?)\s*(\*?)\s*SDLCALL\s+(.*?)\s*\((.*?)\);/) {
  560. $sym = $6;
  561. #$decl =~ s/\A\s*extern\s+DECLSPEC\s+(.*?)\s+SDLCALL/$1/;
  562. } else {
  563. #print "Found doxygen but no function sig:\n$str\n\n";
  564. foreach (@templines) {
  565. push @contents, $_;
  566. }
  567. foreach (@decllines) {
  568. push @contents, $_;
  569. }
  570. next;
  571. }
  572. $decl = ''; # build this with the line breaks, since it looks better for syntax highlighting.
  573. foreach (@decllines) {
  574. if ($decl eq '') {
  575. $decl = $_;
  576. $decl =~ s/\Aextern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL\s+/$2$3 /;
  577. } else {
  578. my $trimmed = $_;
  579. # !!! FIXME: trim space for SDL_DEPRECATED if it was used, too.
  580. $trimmed =~ s/\A\s{24}//; # 24 for shrinking to match the removed "extern DECLSPEC SDLCALL "
  581. $decl .= $trimmed;
  582. }
  583. $decl .= "\n";
  584. }
  585. } elsif ($symtype == 2) { # a macro
  586. if ($decl =~ /\A\s*\#\s*define\s+(.*?)(\(.*?\)|)\s+/) {
  587. $sym = $1;
  588. #$decl =~ s/\A\s*extern\s+DECLSPEC\s+(.*?)\s+SDLCALL/$1/;
  589. } else {
  590. #print "Found doxygen but no macro:\n$str\n\n";
  591. foreach (@templines) {
  592. push @contents, $_;
  593. }
  594. foreach (@decllines) {
  595. push @contents, $_;
  596. }
  597. next;
  598. }
  599. while ($decl =~ /\\\Z/) {
  600. my $l = <FH>;
  601. last if not $l;
  602. chomp($l);
  603. push @decllines, $l;
  604. #$l =~ s/\A\s+//;
  605. $l =~ s/\s+\Z//;
  606. $decl .= "\n$l";
  607. }
  608. } else {
  609. die("Unexpected symtype $symtype\n");
  610. }
  611. #print("DECL: [$decl]\n");
  612. #print("$sym:\n$str\n\n");
  613. # There might be multiple declarations of a function due to #ifdefs,
  614. # and only one of them will have documentation. If we hit an
  615. # undocumented one before, delete the placeholder line we left for
  616. # it so it doesn't accumulate a new blank line on each run.
  617. my $skipsym = 0;
  618. if (defined $headersymshasdoxygen{$sym}) {
  619. if ($headersymshasdoxygen{$sym} == 0) { # An undocumented declaration already exists, nuke its placeholder line.
  620. delete $contents[$headersymschunk{$sym}]; # delete DOES NOT RENUMBER existing elements!
  621. } else { # documented function already existed?
  622. $skipsym = 1; # don't add this copy to the list of functions.
  623. if ($has_doxygen) {
  624. print STDERR "WARNING: Symbol '$sym' appears to be documented in multiple locations. Only keeping the first one we saw!\n";
  625. }
  626. push @contents, join("\n", @decllines); # just put the existing declation in as-is.
  627. }
  628. }
  629. if (!$skipsym) {
  630. $headersyms{$sym} = $str;
  631. $headerdecls{$sym} = $decl;
  632. $headersymslocation{$sym} = $dent;
  633. $headersymschunk{$sym} = scalar(@contents);
  634. $headersymshasdoxygen{$sym} = $has_doxygen;
  635. $headersymstype{$sym} = $symtype;
  636. push @contents, join("\n", @templines);
  637. push @contents, join("\n", @decllines);
  638. }
  639. }
  640. close(FH);
  641. $headers{$dent} = \@contents;
  642. }
  643. closedir(DH);
  644. # !!! FIXME: we need to parse enums and typedefs and structs and defines and and and and and...
  645. # !!! FIXME: (but functions are good enough for now.)
  646. opendir(DH, $wikipath) or die("Can't opendir '$wikipath': $!\n");
  647. while (my $d = readdir(DH)) {
  648. my $dent = $d;
  649. my $type = '';
  650. if ($dent =~ /\.(md|mediawiki)\Z/) {
  651. $type = $1;
  652. } else {
  653. next; # only dealing with wiki pages.
  654. }
  655. my $sym = $dent;
  656. $sym =~ s/\..*\Z//;
  657. # Ignore FrontPage.
  658. next if $sym eq 'FrontPage';
  659. # Ignore "Category*" pages.
  660. next if ($sym =~ /\ACategory/);
  661. open(FH, '<', "$wikipath/$dent") or die("Can't open '$wikipath/$dent': $!\n");
  662. my $current_section = '[start]';
  663. my @section_order = ( $current_section );
  664. my %sections = ();
  665. $sections{$current_section} = '';
  666. my $firstline = 1;
  667. while (<FH>) {
  668. chomp;
  669. my $orig = $_;
  670. s/\A\s*//;
  671. s/\s*\Z//;
  672. if ($type eq 'mediawiki') {
  673. if (defined($wikipreamble) && $firstline && /\A\=\=\=\=\=\= (.*?) \=\=\=\=\=\=\Z/ && ($1 eq $wikipreamble)) {
  674. $firstline = 0; # skip this.
  675. next;
  676. } elsif (/\A\= (.*?) \=\Z/) {
  677. $firstline = 0;
  678. $current_section = ($1 eq $sym) ? '[Brief]' : $1;
  679. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  680. push @section_order, $current_section;
  681. $sections{$current_section} = '';
  682. } elsif (/\A\=\= (.*?) \=\=\Z/) {
  683. $firstline = 0;
  684. $current_section = ($1 eq $sym) ? '[Brief]' : $1;
  685. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  686. push @section_order, $current_section;
  687. $sections{$current_section} = '';
  688. next;
  689. } elsif (/\A\-\-\-\-\Z/) {
  690. $firstline = 0;
  691. $current_section = '[footer]';
  692. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  693. push @section_order, $current_section;
  694. $sections{$current_section} = '';
  695. next;
  696. }
  697. } elsif ($type eq 'md') {
  698. if (defined($wikipreamble) && $firstline && /\A\#\#\#\#\#\# (.*?)\Z/ && ($1 eq $wikipreamble)) {
  699. $firstline = 0; # skip this.
  700. next;
  701. } elsif (/\A\#+ (.*?)\Z/) {
  702. $firstline = 0;
  703. $current_section = ($1 eq $sym) ? '[Brief]' : $1;
  704. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  705. push @section_order, $current_section;
  706. $sections{$current_section} = '';
  707. next;
  708. } elsif (/\A\-\-\-\-\Z/) {
  709. $firstline = 0;
  710. $current_section = '[footer]';
  711. die("Doubly-defined section '$current_section' in '$dent'!\n") if defined $sections{$current_section};
  712. push @section_order, $current_section;
  713. $sections{$current_section} = '';
  714. next;
  715. }
  716. } else {
  717. die("Unexpected wiki file type. Fixme!\n");
  718. }
  719. if ($firstline) {
  720. $firstline = ($_ ne '');
  721. }
  722. if (!$firstline) {
  723. $sections{$current_section} .= "$orig\n";
  724. }
  725. }
  726. close(FH);
  727. foreach (keys %sections) {
  728. $sections{$_} =~ s/\A\n+//;
  729. $sections{$_} =~ s/\n+\Z//;
  730. $sections{$_} .= "\n";
  731. }
  732. # older section name we used, migrate over from it.
  733. if (defined $sections{'Related Functions'}) {
  734. if (not defined $sections{'See Also'}) {
  735. $sections{'See Also'} = $sections{'Related Functions'};
  736. }
  737. delete $sections{'Related Functions'};
  738. }
  739. if (0) {
  740. foreach (@section_order) {
  741. print("$sym SECTION '$_':\n");
  742. print($sections{$_});
  743. print("\n\n");
  744. }
  745. }
  746. $wikitypes{$sym} = $type;
  747. $wikisyms{$sym} = \%sections;
  748. $wikisectionorder{$sym} = \@section_order;
  749. }
  750. closedir(DH);
  751. delete $wikisyms{"Undocumented"};
  752. {
  753. my $path = "$wikipath/Undocumented.md";
  754. open(my $fh, '>', $path) or die("Can't open '$path': $!\n");
  755. print $fh "# Undocumented\n\n";
  756. print_undocumented_section($fh, 'Functions', 1);
  757. print_undocumented_section($fh, 'Macros', 2);
  758. close($fh);
  759. }
  760. if ($warn_about_missing) {
  761. foreach (keys %wikisyms) {
  762. my $sym = $_;
  763. if (not defined $headersyms{$sym}) {
  764. print("WARNING: $sym defined in the wiki but not the headers!\n");
  765. }
  766. }
  767. foreach (keys %headersyms) {
  768. my $sym = $_;
  769. if (not defined $wikisyms{$sym}) {
  770. print("WARNING: $sym defined in the headers but not the wiki!\n");
  771. }
  772. }
  773. }
  774. if ($copy_direction == 1) { # --copy-to-headers
  775. my %changed_headers = ();
  776. $dewikify_mode = 'md';
  777. $wordwrap_mode = 'md'; # the headers use Markdown format.
  778. foreach (keys %headersyms) {
  779. my $sym = $_;
  780. next if not defined $wikisyms{$sym}; # don't have a page for that function, skip it.
  781. my $wikitype = $wikitypes{$sym};
  782. my $sectionsref = $wikisyms{$sym};
  783. my $remarks = $sectionsref->{'Remarks'};
  784. my $params = $sectionsref->{'Function Parameters'};
  785. my $returns = $sectionsref->{'Return Value'};
  786. my $threadsafety = $sectionsref->{'Thread Safety'};
  787. my $version = $sectionsref->{'Version'};
  788. my $related = $sectionsref->{'See Also'};
  789. my $deprecated = $sectionsref->{'Deprecated'};
  790. my $brief = $sectionsref->{'[Brief]'};
  791. my $addblank = 0;
  792. my $str = '';
  793. $headersymshasdoxygen{$sym} = 1; # Added/changed doxygen for this header.
  794. $brief = dewikify($wikitype, $brief);
  795. $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  796. my @briefsplit = split /\n/, $brief;
  797. $brief = shift @briefsplit;
  798. if (defined $remarks) {
  799. $remarks = join("\n", @briefsplit) . dewikify($wikitype, $remarks);
  800. }
  801. if (defined $brief) {
  802. $str .= "\n" if $addblank; $addblank = 1;
  803. $str .= wordwrap($brief) . "\n";
  804. }
  805. if (defined $remarks) {
  806. $str .= "\n" if $addblank; $addblank = 1;
  807. $str .= wordwrap($remarks) . "\n";
  808. }
  809. if (defined $deprecated) {
  810. # !!! FIXME: lots of code duplication in all of these.
  811. $str .= "\n" if $addblank; $addblank = 1;
  812. my $v = dewikify($wikitype, $deprecated);
  813. my $whitespacelen = length("\\deprecated") + 1;
  814. my $whitespace = ' ' x $whitespacelen;
  815. $v = wordwrap($v, -$whitespacelen);
  816. my @desclines = split /\n/, $v;
  817. my $firstline = shift @desclines;
  818. $str .= "\\deprecated $firstline\n";
  819. foreach (@desclines) {
  820. $str .= "${whitespace}$_\n";
  821. }
  822. }
  823. if (defined $params) {
  824. $str .= "\n" if $addblank; $addblank = (defined $returns) ? 0 : 1;
  825. my @lines = split /\n/, dewikify($wikitype, $params);
  826. if ($wikitype eq 'mediawiki') {
  827. die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start
  828. while (scalar(@lines) >= 3) {
  829. my $name = shift @lines;
  830. my $desc = shift @lines;
  831. my $terminator = shift @lines; # the '|-' or '|}' line.
  832. last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table.
  833. $name =~ s/\A\|\s*//;
  834. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  835. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  836. $desc =~ s/\A\|\s*//;
  837. #print STDERR "SYM: $sym NAME: $name DESC: $desc TERM: $terminator\n";
  838. my $whitespacelen = length($name) + 8;
  839. my $whitespace = ' ' x $whitespacelen;
  840. $desc = wordwrap($desc, -$whitespacelen);
  841. my @desclines = split /\n/, $desc;
  842. my $firstline = shift @desclines;
  843. $str .= "\\param $name $firstline\n";
  844. foreach (@desclines) {
  845. $str .= "${whitespace}$_\n";
  846. }
  847. }
  848. } elsif ($wikitype eq 'md') {
  849. my $l;
  850. $l = shift @lines;
  851. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\|\s*\|\s*\Z/);
  852. $l = shift @lines;
  853. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/);
  854. while (scalar(@lines) >= 1) {
  855. $l = shift @lines;
  856. if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) {
  857. my $name = $1;
  858. my $desc = $2;
  859. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  860. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  861. #print STDERR "SYM: $sym NAME: $name DESC: $desc\n";
  862. my $whitespacelen = length($name) + 8;
  863. my $whitespace = ' ' x $whitespacelen;
  864. $desc = wordwrap($desc, -$whitespacelen);
  865. my @desclines = split /\n/, $desc;
  866. my $firstline = shift @desclines;
  867. $str .= "\\param $name $firstline\n";
  868. foreach (@desclines) {
  869. $str .= "${whitespace}$_\n";
  870. }
  871. } else {
  872. last; # we seem to have run out of table.
  873. }
  874. }
  875. } else {
  876. die("write me");
  877. }
  878. }
  879. if (defined $returns) {
  880. $str .= "\n" if $addblank; $addblank = 1;
  881. my $r = dewikify($wikitype, $returns);
  882. my $retstr = "\\returns";
  883. if ($r =~ s/\AReturn(s?) //) {
  884. $retstr = "\\return$1";
  885. }
  886. my $whitespacelen = length($retstr) + 1;
  887. my $whitespace = ' ' x $whitespacelen;
  888. $r = wordwrap($r, -$whitespacelen);
  889. my @desclines = split /\n/, $r;
  890. my $firstline = shift @desclines;
  891. $str .= "$retstr $firstline\n";
  892. foreach (@desclines) {
  893. $str .= "${whitespace}$_\n";
  894. }
  895. }
  896. if (defined $threadsafety) {
  897. # !!! FIXME: lots of code duplication in all of these.
  898. $str .= "\n" if $addblank; $addblank = 1;
  899. my $v = dewikify($wikitype, $threadsafety);
  900. my $whitespacelen = length("\\threadsafety") + 1;
  901. my $whitespace = ' ' x $whitespacelen;
  902. $v = wordwrap($v, -$whitespacelen);
  903. my @desclines = split /\n/, $v;
  904. my $firstline = shift @desclines;
  905. $str .= "\\threadsafety $firstline\n";
  906. foreach (@desclines) {
  907. $str .= "${whitespace}$_\n";
  908. }
  909. }
  910. if (defined $version) {
  911. # !!! FIXME: lots of code duplication in all of these.
  912. $str .= "\n" if $addblank; $addblank = 1;
  913. my $v = dewikify($wikitype, $version);
  914. my $whitespacelen = length("\\since") + 1;
  915. my $whitespace = ' ' x $whitespacelen;
  916. $v = wordwrap($v, -$whitespacelen);
  917. my @desclines = split /\n/, $v;
  918. my $firstline = shift @desclines;
  919. $str .= "\\since $firstline\n";
  920. foreach (@desclines) {
  921. $str .= "${whitespace}$_\n";
  922. }
  923. }
  924. if (defined $related) {
  925. # !!! FIXME: lots of code duplication in all of these.
  926. $str .= "\n" if $addblank; $addblank = 1;
  927. my $v = dewikify($wikitype, $related);
  928. my @desclines = split /\n/, $v;
  929. foreach (@desclines) {
  930. s/\A(\:|\* )//;
  931. s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  932. s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain.
  933. s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain.
  934. s/\A\/*//;
  935. $str .= "\\sa $_\n";
  936. }
  937. }
  938. my $header = $headersymslocation{$sym};
  939. my $contentsref = $headers{$header};
  940. my $chunk = $headersymschunk{$sym};
  941. my @lines = split /\n/, $str;
  942. my $addnewline = (($chunk > 0) && ($$contentsref[$chunk-1] ne '')) ? "\n" : '';
  943. my $output = "$addnewline/**\n";
  944. foreach (@lines) {
  945. chomp;
  946. s/\s*\Z//;
  947. if ($_ eq '') {
  948. $output .= " *\n";
  949. } else {
  950. $output .= " * $_\n";
  951. }
  952. }
  953. $output .= " */";
  954. #print("$sym:\n$output\n\n");
  955. $$contentsref[$chunk] = $output;
  956. #$$contentsref[$chunk+1] = $headerdecls{$sym};
  957. $changed_headers{$header} = 1;
  958. }
  959. foreach (keys %changed_headers) {
  960. my $header = $_;
  961. # this is kinda inefficient, but oh well.
  962. my @removelines = ();
  963. foreach (keys %headersymslocation) {
  964. my $sym = $_;
  965. next if $headersymshasdoxygen{$sym};
  966. next if $headersymslocation{$sym} ne $header;
  967. # the index of the blank line we put before the function declaration in case we needed to replace it with new content from the wiki.
  968. push @removelines, $headersymschunk{$sym};
  969. }
  970. my $contentsref = $headers{$header};
  971. foreach (@removelines) {
  972. delete $$contentsref[$_]; # delete DOES NOT RENUMBER existing elements!
  973. }
  974. my $path = "$incpath/$header.tmp";
  975. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  976. foreach (@$contentsref) {
  977. print FH "$_\n" if defined $_;
  978. }
  979. close(FH);
  980. rename($path, "$incpath/$header") or die("Can't rename '$path' to '$incpath/$header': $!\n");
  981. }
  982. if (defined $readmepath) {
  983. if ( -d $wikireadmepath ) {
  984. mkdir($readmepath); # just in case
  985. opendir(DH, $wikireadmepath) or die("Can't opendir '$wikireadmepath': $!\n");
  986. while (readdir(DH)) {
  987. my $dent = $_;
  988. if ($dent =~ /\A(.*?)\.md\Z/) { # we only bridge Markdown files here.
  989. next if $1 eq 'FrontPage';
  990. filecopy("$wikireadmepath/$dent", "$readmepath/README-$dent", "\n");
  991. }
  992. }
  993. closedir(DH);
  994. }
  995. }
  996. } elsif ($copy_direction == -1) { # --copy-to-wiki
  997. if (defined $changeformat) {
  998. $dewikify_mode = $changeformat;
  999. $wordwrap_mode = $changeformat;
  1000. }
  1001. foreach (keys %headersyms) {
  1002. my $sym = $_;
  1003. next if not $headersymshasdoxygen{$sym};
  1004. my $symtype = $headersymstype{$sym};
  1005. my $origwikitype = defined $wikitypes{$sym} ? $wikitypes{$sym} : 'md'; # default to MarkDown for new stuff.
  1006. my $wikitype = (defined $changeformat) ? $changeformat : $origwikitype;
  1007. die("Unexpected wikitype '$wikitype'\n") if (($wikitype ne 'mediawiki') and ($wikitype ne 'md') and ($wikitype ne 'manpage'));
  1008. #print("$sym\n"); next;
  1009. $wordwrap_mode = $wikitype;
  1010. my $raw = $headersyms{$sym}; # raw doxygen text with comment characters stripped from start/end and start of each line.
  1011. next if not defined $raw;
  1012. $raw =~ s/\A\s*\\brief\s+//; # Technically we don't need \brief (please turn on JAVADOC_AUTOBRIEF if you use Doxygen), so just in case one is present, strip it.
  1013. my @doxygenlines = split /\n/, $raw;
  1014. my $brief = '';
  1015. while (@doxygenlines) {
  1016. last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks.
  1017. last if $doxygenlines[0] =~ /\A\s*\Z/; # blank line? End of paragraph, done.
  1018. my $l = shift @doxygenlines;
  1019. chomp($l);
  1020. $l =~ s/\A\s*//;
  1021. $l =~ s/\s*\Z//;
  1022. $brief .= "$l ";
  1023. }
  1024. $brief =~ s/\s+\Z//;
  1025. $brief =~ s/\A(.*?\.) /$1\n\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  1026. my @briefsplit = split /\n/, $brief;
  1027. next if not defined $briefsplit[0]; # No brief text? Probably a bogus Doxygen comment, skip it.
  1028. $brief = wikify($wikitype, shift @briefsplit) . "\n";
  1029. @doxygenlines = (@briefsplit, @doxygenlines);
  1030. my $remarks = '';
  1031. # !!! FIXME: wordwrap and wikify might handle this, now.
  1032. while (@doxygenlines) {
  1033. last if $doxygenlines[0] =~ /\A\\/; # some sort of doxygen command, assume we're past the general remarks.
  1034. my $l = shift @doxygenlines;
  1035. if ($l =~ /\A\`\`\`/) { # syntax highlighting, don't reformat.
  1036. $remarks .= "$l\n";
  1037. while ((@doxygenlines) && (not $l =~ /\`\`\`\Z/)) {
  1038. $l = shift @doxygenlines;
  1039. $remarks .= "$l\n";
  1040. }
  1041. } else {
  1042. $l =~ s/\A\s*//;
  1043. $l =~ s/\s*\Z//;
  1044. $remarks .= "$l\n";
  1045. }
  1046. }
  1047. #print("REMARKS:\n\n $remarks\n\n");
  1048. $remarks = wordwrap(wikify($wikitype, $remarks));
  1049. $remarks =~ s/\A\s*//;
  1050. $remarks =~ s/\s*\Z//;
  1051. my $decl = $headerdecls{$sym};
  1052. #$decl =~ s/\*\s+SDLCALL/ *SDLCALL/; # Try to make "void * Function" become "void *Function"
  1053. #$decl =~ s/\A\s*extern\s+(SDL_DEPRECATED\s+|)DECLSPEC\s+(.*?)\s+(\*?)SDLCALL/$2$3/;
  1054. my $syntax = '';
  1055. if ($wikitype eq 'mediawiki') {
  1056. $syntax = "<syntaxhighlight lang='c'>\n$decl</syntaxhighlight>\n";
  1057. } elsif ($wikitype eq 'md') {
  1058. $syntax = "```c\n$decl\n```\n";
  1059. } else { die("Expected wikitype '$wikitype'\n"); }
  1060. my %sections = ();
  1061. $sections{'[Brief]'} = $brief; # include this section even if blank so we get a title line.
  1062. $sections{'Remarks'} = "$remarks\n" if $remarks ne '';
  1063. $sections{'Syntax'} = $syntax;
  1064. my @params = (); # have to parse these and build up the wiki tables after, since Markdown needs to know the length of the largest string. :/
  1065. while (@doxygenlines) {
  1066. my $l = shift @doxygenlines;
  1067. if ($l =~ /\A\\param\s+(.*?)\s+(.*)\Z/) {
  1068. my $arg = $1;
  1069. my $desc = $2;
  1070. while (@doxygenlines) {
  1071. my $subline = $doxygenlines[0];
  1072. $subline =~ s/\A\s*//;
  1073. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1074. shift @doxygenlines; # dump this line from the array; we're using it.
  1075. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1076. $desc .= "\n";
  1077. } else {
  1078. $desc .= " $subline";
  1079. }
  1080. }
  1081. $desc =~ s/[\s\n]+\Z//ms;
  1082. # We need to know the length of the longest string to make Markdown tables, so we just store these off until everything is parsed.
  1083. push @params, $arg;
  1084. push @params, $desc;
  1085. } elsif ($l =~ /\A\\r(eturns?)\s+(.*)\Z/) {
  1086. my $retstr = "R$1"; # "Return" or "Returns"
  1087. my $desc = $2;
  1088. while (@doxygenlines) {
  1089. my $subline = $doxygenlines[0];
  1090. $subline =~ s/\A\s*//;
  1091. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1092. shift @doxygenlines; # dump this line from the array; we're using it.
  1093. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1094. $desc .= "\n";
  1095. } else {
  1096. $desc .= " $subline";
  1097. }
  1098. }
  1099. $desc =~ s/[\s\n]+\Z//ms;
  1100. $sections{'Return Value'} = wordwrap("$retstr " . wikify($wikitype, $desc)) . "\n";
  1101. } elsif ($l =~ /\A\\deprecated\s+(.*)\Z/) {
  1102. my $desc = $1;
  1103. while (@doxygenlines) {
  1104. my $subline = $doxygenlines[0];
  1105. $subline =~ s/\A\s*//;
  1106. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1107. shift @doxygenlines; # dump this line from the array; we're using it.
  1108. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1109. $desc .= "\n";
  1110. } else {
  1111. $desc .= " $subline";
  1112. }
  1113. }
  1114. $desc =~ s/[\s\n]+\Z//ms;
  1115. $sections{'Deprecated'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1116. } elsif ($l =~ /\A\\since\s+(.*)\Z/) {
  1117. my $desc = $1;
  1118. while (@doxygenlines) {
  1119. my $subline = $doxygenlines[0];
  1120. $subline =~ s/\A\s*//;
  1121. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1122. shift @doxygenlines; # dump this line from the array; we're using it.
  1123. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1124. $desc .= "\n";
  1125. } else {
  1126. $desc .= " $subline";
  1127. }
  1128. }
  1129. $desc =~ s/[\s\n]+\Z//ms;
  1130. $sections{'Version'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1131. } elsif ($l =~ /\A\\threadsafety\s+(.*)\Z/) {
  1132. my $desc = $1;
  1133. while (@doxygenlines) {
  1134. my $subline = $doxygenlines[0];
  1135. $subline =~ s/\A\s*//;
  1136. last if $subline =~ /\A\\/; # some sort of doxygen command, assume we're past this thing.
  1137. shift @doxygenlines; # dump this line from the array; we're using it.
  1138. if ($subline eq '') { # empty line, make sure it keeps the newline char.
  1139. $desc .= "\n";
  1140. } else {
  1141. $desc .= " $subline";
  1142. }
  1143. }
  1144. $desc =~ s/[\s\n]+\Z//ms;
  1145. $sections{'Thread Safety'} = wordwrap(wikify($wikitype, $desc)) . "\n";
  1146. } elsif ($l =~ /\A\\sa\s+(.*)\Z/) {
  1147. my $sa = $1;
  1148. $sa =~ s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  1149. $sections{'See Also'} = '' if not defined $sections{'See Also'};
  1150. if ($wikitype eq 'mediawiki') {
  1151. $sections{'See Also'} .= ":[[$sa]]\n";
  1152. } elsif ($wikitype eq 'md') {
  1153. $sections{'See Also'} .= "* [$sa]($sa)\n";
  1154. } else { die("Expected wikitype '$wikitype'\n"); }
  1155. }
  1156. }
  1157. my $hfiletext = $wikiheaderfiletext;
  1158. $hfiletext =~ s/\%fname\%/$headersymslocation{$sym}/g;
  1159. $sections{'Header File'} = "$hfiletext\n";
  1160. # Make sure this ends with a double-newline.
  1161. $sections{'See Also'} .= "\n" if defined $sections{'See Also'};
  1162. # We can build the wiki table now that we have all the data.
  1163. if (scalar(@params) > 0) {
  1164. my $str = '';
  1165. if ($wikitype eq 'mediawiki') {
  1166. while (scalar(@params) > 0) {
  1167. my $arg = shift @params;
  1168. my $desc = wikify($wikitype, shift @params);
  1169. $str .= ($str eq '') ? "{|\n" : "|-\n";
  1170. $str .= "|'''$arg'''\n";
  1171. $str .= "|$desc\n";
  1172. }
  1173. $str .= "|}\n";
  1174. } elsif ($wikitype eq 'md') {
  1175. my $longest_arg = 0;
  1176. my $longest_desc = 0;
  1177. my $which = 0;
  1178. foreach (@params) {
  1179. if ($which == 0) {
  1180. my $len = length($_) + 4;
  1181. $longest_arg = $len if ($len > $longest_arg);
  1182. $which = 1;
  1183. } else {
  1184. my $len = length(wikify($wikitype, $_));
  1185. $longest_desc = $len if ($len > $longest_desc);
  1186. $which = 0;
  1187. }
  1188. }
  1189. # Markdown tables are sort of obnoxious.
  1190. $str .= '| ' . (' ' x ($longest_arg+4)) . ' | ' . (' ' x $longest_desc) . " |\n";
  1191. $str .= '| ' . ('-' x ($longest_arg+4)) . ' | ' . ('-' x $longest_desc) . " |\n";
  1192. while (@params) {
  1193. my $arg = shift @params;
  1194. my $desc = wikify($wikitype, shift @params);
  1195. $str .= "| **$arg** " . (' ' x ($longest_arg - length($arg))) . "| $desc" . (' ' x ($longest_desc - length($desc))) . " |\n";
  1196. }
  1197. } else {
  1198. die("Unexpected wikitype!\n"); # should have checked this elsewhere.
  1199. }
  1200. $sections{'Function Parameters'} = $str;
  1201. }
  1202. my $path = "$wikipath/$_.${wikitype}.tmp";
  1203. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  1204. my $sectionsref = $wikisyms{$sym};
  1205. foreach (@standard_wiki_sections) {
  1206. # drop sections we either replaced or removed from the original wiki's contents.
  1207. if (not defined $only_wiki_sections{$_}) {
  1208. delete($$sectionsref{$_});
  1209. }
  1210. }
  1211. my $wikisectionorderref = $wikisectionorder{$sym};
  1212. # Make sure there's a footer in the wiki that puts this function in CategoryAPI...
  1213. if (not $$sectionsref{'[footer]'}) {
  1214. $$sectionsref{'[footer]'} = '';
  1215. push @$wikisectionorderref, '[footer]';
  1216. }
  1217. # If changing format, convert things that otherwise are passed through unmolested.
  1218. if (defined $changeformat) {
  1219. if (($dewikify_mode eq 'md') and ($origwikitype eq 'mediawiki')) {
  1220. $$sectionsref{'[footer]'} =~ s/\[\[(Category[a-zA-Z0-9_]+)\]\]/[$1]($1)/g;
  1221. } elsif (($dewikify_mode eq 'mediawiki') and ($origwikitype eq 'md')) {
  1222. $$sectionsref{'[footer]'} =~ s/\[(Category[a-zA-Z0-9_]+)\]\(.*?\)/[[$1]]/g;
  1223. }
  1224. foreach (keys %only_wiki_sections) {
  1225. my $sect = $_;
  1226. if (defined $$sectionsref{$sect}) {
  1227. $$sectionsref{$sect} = wikify($wikitype, dewikify($origwikitype, $$sectionsref{$sect}));
  1228. }
  1229. }
  1230. }
  1231. my $footer = $$sectionsref{'[footer]'};
  1232. my $symtypename;
  1233. if ($symtype == 1) {
  1234. $symtypename = 'Function';
  1235. } elsif ($symtype == 2) {
  1236. $symtypename = 'Macro';
  1237. } else {
  1238. die("Unexpected symbol type $symtype!\n");
  1239. }
  1240. if ($wikitype eq 'mediawiki') {
  1241. $footer =~ s/\[\[CategoryAPI\]\],?\s*//g;
  1242. $footer =~ s/\[\[CategoryAPI${symtypename}\]\],?\s*//g;
  1243. $footer = "[[CategoryAPI]], [[CategoryAPI$symtypename]]" . (($footer eq '') ? "\n" : ", $footer");
  1244. } elsif ($wikitype eq 'md') {
  1245. $footer =~ s/\[CategoryAPI\]\(CategoryAPI\),?\s*//g;
  1246. $footer =~ s/\[CategoryAPI${symtypename}\]\(CategoryAPI${symtypename}\),?\s*//g;
  1247. $footer = "[CategoryAPI](CategoryAPI), [CategoryAPI$symtypename](CategoryAPI$symtypename)" . (($footer eq '') ? '' : ', ') . $footer;
  1248. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1249. $$sectionsref{'[footer]'} = $footer;
  1250. if (defined $wikipreamble) {
  1251. my $wikified_preamble = wikify($wikitype, $wikipreamble);
  1252. if ($wikitype eq 'mediawiki') {
  1253. print FH "====== $wikified_preamble ======\n";
  1254. } elsif ($wikitype eq 'md') {
  1255. print FH "###### $wikified_preamble\n";
  1256. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1257. }
  1258. my $prevsectstr = '';
  1259. my @ordered_sections = (@standard_wiki_sections, defined $wikisectionorderref ? @$wikisectionorderref : ()); # this copies the arrays into one.
  1260. foreach (@ordered_sections) {
  1261. my $sect = $_;
  1262. next if $sect eq '[start]';
  1263. next if (not defined $sections{$sect} and not defined $$sectionsref{$sect});
  1264. my $section = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect};
  1265. if ($sect eq '[footer]') {
  1266. # Make sure previous section ends with two newlines.
  1267. if (substr($prevsectstr, -1) ne "\n") {
  1268. print FH "\n\n";
  1269. } elsif (substr($prevsectstr, -2) ne "\n\n") {
  1270. print FH "\n";
  1271. }
  1272. print FH "----\n"; # It's the same in Markdown and MediaWiki.
  1273. } elsif ($sect eq '[Brief]') {
  1274. if ($wikitype eq 'mediawiki') {
  1275. print FH "= $sym =\n\n";
  1276. } elsif ($wikitype eq 'md') {
  1277. print FH "# $sym\n\n";
  1278. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1279. } else {
  1280. if ($wikitype eq 'mediawiki') {
  1281. print FH "\n== $sect ==\n\n";
  1282. } elsif ($wikitype eq 'md') {
  1283. print FH "\n## $sect\n\n";
  1284. } else { die("Unexpected wikitype '$wikitype'\n"); }
  1285. }
  1286. my $sectstr = defined $sections{$sect} ? $sections{$sect} : $$sectionsref{$sect};
  1287. print FH $sectstr;
  1288. $prevsectstr = $sectstr;
  1289. # make sure these don't show up twice.
  1290. delete($sections{$sect});
  1291. delete($$sectionsref{$sect});
  1292. }
  1293. print FH "\n\n";
  1294. close(FH);
  1295. if (defined $changeformat and ($origwikitype ne $wikitype)) {
  1296. system("cd '$wikipath' ; git mv '$_.${origwikitype}' '$_.${wikitype}'");
  1297. unlink("$wikipath/$_.${origwikitype}");
  1298. }
  1299. rename($path, "$wikipath/$_.${wikitype}") or die("Can't rename '$path' to '$wikipath/$_.${wikitype}': $!\n");
  1300. }
  1301. if (defined $readmepath) {
  1302. if ( -d $readmepath ) {
  1303. mkdir($wikireadmepath); # just in case
  1304. opendir(DH, $readmepath) or die("Can't opendir '$readmepath': $!\n");
  1305. while (my $d = readdir(DH)) {
  1306. my $dent = $d;
  1307. if ($dent =~ /\AREADME\-(.*?\.md)\Z/) { # we only bridge Markdown files here.
  1308. my $wikifname = $1;
  1309. next if $wikifname eq 'FrontPage.md';
  1310. filecopy("$readmepath/$dent", "$wikireadmepath/$wikifname", "\n");
  1311. }
  1312. }
  1313. closedir(DH);
  1314. my @pages = ();
  1315. opendir(DH, $wikireadmepath) or die("Can't opendir '$wikireadmepath': $!\n");
  1316. while (my $d = readdir(DH)) {
  1317. my $dent = $d;
  1318. if ($dent =~ /\A(.*?)\.(mediawiki|md)\Z/) {
  1319. my $wikiname = $1;
  1320. next if $wikiname eq 'FrontPage';
  1321. push @pages, $wikiname;
  1322. }
  1323. }
  1324. closedir(DH);
  1325. open(FH, '>', "$wikireadmepath/FrontPage.md") or die("Can't open '$wikireadmepath/FrontPage.md': $!\n");
  1326. print FH "# All READMEs available here\n\n";
  1327. foreach (sort @pages) {
  1328. my $wikiname = $_;
  1329. print FH "- [$wikiname]($wikiname)\n";
  1330. }
  1331. close(FH);
  1332. }
  1333. }
  1334. } elsif ($copy_direction == -2) { # --copy-to-manpages
  1335. # This only takes from the wiki data, since it has sections we omit from the headers, like code examples.
  1336. $manpath .= "/man3";
  1337. File::Path::make_path($manpath);
  1338. $dewikify_mode = 'manpage';
  1339. $wordwrap_mode = 'manpage';
  1340. my $introtxt = '';
  1341. if (0) {
  1342. open(FH, '<', "$srcpath/LICENSE.txt") or die("Can't open '$srcpath/LICENSE.txt': $!\n");
  1343. while (<FH>) {
  1344. chomp;
  1345. $introtxt .= ".\\\" $_\n";
  1346. }
  1347. close(FH);
  1348. }
  1349. if (!$gitrev) {
  1350. $gitrev = `cd "$srcpath" ; git rev-list HEAD~..`;
  1351. chomp($gitrev);
  1352. }
  1353. # !!! FIXME
  1354. open(FH, '<', "$srcpath/$versionfname") or die("Can't open '$srcpath/$versionfname': $!\n");
  1355. my $majorver = 0;
  1356. my $minorver = 0;
  1357. my $patchver = 0;
  1358. while (<FH>) {
  1359. chomp;
  1360. if (/$versionmajorregex/) {
  1361. $majorver = int($1);
  1362. } elsif (/$versionminorregex/) {
  1363. $minorver = int($1);
  1364. } elsif (/$versionpatchregex/) {
  1365. $patchver = int($1);
  1366. }
  1367. }
  1368. close(FH);
  1369. my $fullversion = "$majorver.$minorver.$patchver";
  1370. foreach (keys %headersyms) {
  1371. my $sym = $_;
  1372. next if not defined $wikisyms{$sym}; # don't have a page for that function, skip it.
  1373. my $wikitype = $wikitypes{$sym};
  1374. my $sectionsref = $wikisyms{$sym};
  1375. my $remarks = $sectionsref->{'Remarks'};
  1376. my $params = $sectionsref->{'Function Parameters'};
  1377. my $returns = $sectionsref->{'Return Value'};
  1378. my $version = $sectionsref->{'Version'};
  1379. my $threadsafety = $sectionsref->{'Thread Safety'};
  1380. my $related = $sectionsref->{'See Also'};
  1381. my $examples = $sectionsref->{'Code Examples'};
  1382. my $deprecated = $sectionsref->{'Deprecated'};
  1383. my $headerfile = $manpageheaderfiletext;
  1384. $headerfile =~ s/\%fname\%/$headersymslocation{$sym}/g;
  1385. $headerfile .= "\n";
  1386. my $brief = $sectionsref->{'[Brief]'};
  1387. my $decl = $headerdecls{$sym};
  1388. my $str = '';
  1389. $brief = "$brief";
  1390. $brief =~ s/\A[\s\n]*\= .*? \=\s*?\n+//ms;
  1391. $brief =~ s/\A[\s\n]*\=\= .*? \=\=\s*?\n+//ms;
  1392. $brief =~ s/\A(.*?\.) /$1\n/; # \brief should only be one sentence, delimited by a period+space. Split if necessary.
  1393. my @briefsplit = split /\n/, $brief;
  1394. $brief = shift @briefsplit;
  1395. $brief = dewikify($wikitype, $brief);
  1396. if (defined $remarks) {
  1397. $remarks = dewikify($wikitype, join("\n", @briefsplit) . $remarks);
  1398. }
  1399. $str .= $introtxt;
  1400. $str .= ".\\\" This manpage content is licensed under Creative Commons\n";
  1401. $str .= ".\\\" Attribution 4.0 International (CC BY 4.0)\n";
  1402. $str .= ".\\\" https://creativecommons.org/licenses/by/4.0/\n";
  1403. $str .= ".\\\" This manpage was generated from ${projectshortname}'s wiki page for $sym:\n";
  1404. $str .= ".\\\" $wikiurl/$sym\n";
  1405. $str .= ".\\\" Generated with SDL/build-scripts/wikiheaders.pl\n";
  1406. $str .= ".\\\" revision $gitrev\n" if $gitrev ne '';
  1407. $str .= ".\\\" Please report issues in this manpage's content at:\n";
  1408. $str .= ".\\\" $bugreporturl\n";
  1409. $str .= ".\\\" Please report issues in the generation of this manpage from the wiki at:\n";
  1410. $str .= ".\\\" https://github.com/libsdl-org/SDL/issues/new?title=Misgenerated%20manpage%20for%20$sym\n";
  1411. $str .= ".\\\" $projectshortname can be found at $projecturl\n";
  1412. # Define a .URL macro. The "www.tmac" thing decides if we're using GNU roff (which has a .URL macro already), and if so, overrides the macro we just created.
  1413. # This wizadry is from https://web.archive.org/web/20060102165607/http://people.debian.org/~branden/talks/wtfm/wtfm.pdf
  1414. $str .= ".de URL\n";
  1415. $str .= '\\$2 \(laURL: \\$1 \(ra\\$3' . "\n";
  1416. $str .= "..\n";
  1417. $str .= '.if \n[.g] .mso www.tmac' . "\n";
  1418. $str .= ".TH $sym 3 \"$projectshortname $fullversion\" \"$projectfullname\" \"$projectshortname$majorver FUNCTIONS\"\n";
  1419. $str .= ".SH NAME\n";
  1420. $str .= "$sym";
  1421. $str .= " \\- $brief" if (defined $brief);
  1422. $str .= "\n";
  1423. if (defined $deprecated) {
  1424. $str .= ".SH DEPRECATED\n";
  1425. $str .= dewikify($wikitype, $deprecated) . "\n";
  1426. }
  1427. if (defined $headerfile) {
  1428. $str .= ".SH HEADER FILE\n";
  1429. $str .= dewikify($wikitype, $headerfile) . "\n";
  1430. }
  1431. $str .= ".SH SYNOPSIS\n";
  1432. $str .= ".nf\n";
  1433. $str .= ".B #include \\(dq$mainincludefname\\(dq\n";
  1434. $str .= ".PP\n";
  1435. my @decllines = split /\n/, $decl;
  1436. foreach (@decllines) {
  1437. $str .= ".BI \"$_\n";
  1438. }
  1439. $str .= ".fi\n";
  1440. if (defined $remarks) {
  1441. $str .= ".SH DESCRIPTION\n";
  1442. $str .= $remarks . "\n";
  1443. }
  1444. if (defined $params) {
  1445. $str .= ".SH FUNCTION PARAMETERS\n";
  1446. my @lines = split /\n/, $params;
  1447. if ($wikitype eq 'mediawiki') {
  1448. die("Unexpected data parsing MediaWiki table") if (shift @lines ne '{|'); # Dump the '{|' start
  1449. while (scalar(@lines) >= 3) {
  1450. my $name = shift @lines;
  1451. my $desc = shift @lines;
  1452. my $terminator = shift @lines; # the '|-' or '|}' line.
  1453. last if ($terminator ne '|-') and ($terminator ne '|}'); # we seem to have run out of table.
  1454. $name =~ s/\A\|\s*//;
  1455. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  1456. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  1457. $desc =~ s/\A\|\s*//;
  1458. $desc = dewikify($wikitype, $desc);
  1459. #print STDERR "FN: $sym NAME: $name DESC: $desc TERM: $terminator\n";
  1460. $str .= ".TP\n";
  1461. $str .= ".I $name\n";
  1462. $str .= "$desc\n";
  1463. }
  1464. } elsif ($wikitype eq 'md') {
  1465. my $l;
  1466. $l = shift @lines;
  1467. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\|\s*\|\s*\Z/);
  1468. $l = shift @lines;
  1469. die("Unexpected data parsing Markdown table") if (not $l =~ /\A\s*\|\s*\-*\s*\|\s*\-*\s*\|\s*\Z/);
  1470. while (scalar(@lines) >= 1) {
  1471. $l = shift @lines;
  1472. if ($l =~ /\A\s*\|\s*(.*?)\s*\|\s*(.*?)\s*\|\s*\Z/) {
  1473. my $name = $1;
  1474. my $desc = $2;
  1475. $name =~ s/\A\*\*(.*?)\*\*/$1/;
  1476. $name =~ s/\A\'\'\'(.*?)\'\'\'/$1/;
  1477. $desc = dewikify($wikitype, $desc);
  1478. $str .= ".TP\n";
  1479. $str .= ".I $name\n";
  1480. $str .= "$desc\n";
  1481. } else {
  1482. last; # we seem to have run out of table.
  1483. }
  1484. }
  1485. } else {
  1486. die("write me");
  1487. }
  1488. }
  1489. if (defined $returns) {
  1490. $str .= ".SH RETURN VALUE\n";
  1491. $str .= dewikify($wikitype, $returns) . "\n";
  1492. }
  1493. if (defined $examples) {
  1494. $str .= ".SH CODE EXAMPLES\n";
  1495. $dewikify_manpage_code_indent = 0;
  1496. $str .= dewikify($wikitype, $examples) . "\n";
  1497. $dewikify_manpage_code_indent = 1;
  1498. }
  1499. if (defined $threadsafety) {
  1500. $str .= ".SH THREAD SAFETY\n";
  1501. $str .= dewikify($wikitype, $threadsafety) . "\n";
  1502. }
  1503. if (defined $version) {
  1504. $str .= ".SH AVAILABILITY\n";
  1505. $str .= dewikify($wikitype, $version) . "\n";
  1506. }
  1507. if (defined $related) {
  1508. $str .= ".SH SEE ALSO\n";
  1509. # !!! FIXME: lots of code duplication in all of these.
  1510. my $v = dewikify($wikitype, $related);
  1511. my @desclines = split /\n/, $v;
  1512. my $nextstr = '';
  1513. foreach (@desclines) {
  1514. s/\A(\:|\* )//;
  1515. s/\(\)\Z//; # Convert "SDL_Func()" to "SDL_Func"
  1516. s/\[\[(.*?)\]\]/$1/; # in case some wikilinks remain.
  1517. s/\[(.*?)\]\(.*?\)/$1/; # in case some wikilinks remain.
  1518. s/\A\*\s*\Z//;
  1519. s/\A\/*//;
  1520. s/\A\.BR\s+//; # dewikify added this, but we want to handle it.
  1521. s/\A\.I\s+//; # dewikify added this, but we want to handle it.
  1522. s/\A\s+//;
  1523. s/\s+\Z//;
  1524. next if $_ eq '';
  1525. $str .= "$nextstr.BR $_ (3)";
  1526. $nextstr = ",\n";
  1527. }
  1528. $str .= "\n";
  1529. }
  1530. if (0) {
  1531. $str .= ".SH COPYRIGHT\n";
  1532. $str .= "This manpage is licensed under\n";
  1533. $str .= ".UR https://creativecommons.org/licenses/by/4.0/\n";
  1534. $str .= "Creative Commons Attribution 4.0 International (CC BY 4.0)\n";
  1535. $str .= ".UE\n";
  1536. $str .= ".PP\n";
  1537. $str .= "This manpage was generated from\n";
  1538. $str .= ".UR $wikiurl/$sym\n";
  1539. $str .= "${projectshortname}'s wiki\n";
  1540. $str .= ".UE\n";
  1541. $str .= "using SDL/build-scripts/wikiheaders.pl";
  1542. $str .= " revision $gitrev" if $gitrev ne '';
  1543. $str .= ".\n";
  1544. $str .= "Please report issues in this manpage at\n";
  1545. $str .= ".UR $bugreporturl\n";
  1546. $str .= "our bugtracker!\n";
  1547. $str .= ".UE\n";
  1548. }
  1549. my $path = "$manpath/$_.3.tmp";
  1550. open(FH, '>', $path) or die("Can't open '$path': $!\n");
  1551. print FH $str;
  1552. close(FH);
  1553. rename($path, "$manpath/$_.3") or die("Can't rename '$path' to '$manpath/$_.3': $!\n");
  1554. }
  1555. }
  1556. # end of wikiheaders.pl ...