# Makefile to update the HTML files in this directory. May be run from # cron or by hand. Some useful targets: # # make (or: make all) - update all HTML, RSS and Atom files # make foo.html - for any foo, rebuild just that file # make foo.atom - for any foo, rebuild just that file # make commit - update all files and commit in CVS # # foo.html is generated from foo.tmpl, which is itself an HTML file, # but may contain inclusions (handled with hxincl). Some of these # inclusions are themselves generated by extracting information from # other HTML files or from Atom feeds. Different methods are used for # building those inclusions. In general, there are the following kinds # of files: # # .html - HTML file to serve on-line, typically generated # .tmpl - template to generate a .html file from, typically hand-made # .inc - HTML fragment for inclusion in other files, typically hand-made # .ini - Config file to generate .gen files from, typically hand-made # .gen - HTML fragment for inclusion in other files, typically generated # .rss - RSS file, typically generated from a .html file # .atom - Atom file, typically generated from a .html file # .stored - local copy of a remote file # .trans1 - a set of elements pointing to translations, generated # .trans2 - a text pointing to the text in .trans4, if any; generated # .trans3 - a feedback form (only on non-English pages), generated # .trans4 - a set of elements pointing to translations, generated # .addr - a footer with info about the page's author # .var (typically .foo.var) - database of translations of foo.html # .php3 - members.*.php3 uses PHP to get a current list of WG members # # Some of the .gen files: # # news.*.gen - HTML fragment with most recent news items from several pages # wgnews.gen - HTML fragment with the most recent CSS blog entries # roadmap*.gen - HTML fragments from the table of publications # # Localization: # # The AWK programs are localized with GNU gettext. Translations of # boilerplate text are in the directories XXX/LC_MESSAGES/ for each # locale XXX. Unfortunately, this requires the corresponding locale to # be installed on the machine that runs gawk. Under Debian, install # localizations with: dpkg-reconfigure locales # # To extract localizable strings from the AWK scripts, use: # gawk --gen-po -f foo.awk >foo.po # # Translations are named after a language where possible (foo.fr.html, # foo.de.html) unless there are significant differences between # regions, in which case the region is included, too (foo.pt_BR.html, # foo.pt_PT). # # Apache does language negotiation after content type negotiation. # So, to exclude files from being negotiated, Add a low qs at the end # of the *Content-Type* line in the corresponding .foo.var file. # # Locales also have to be present in the OS. Under Debian, # "dpkg-reconfigure locales" can be used to add missing locales to the # system. # # Adding new files: # # A new translation of an existing file only has to be added to the # relevant .var file, not to this Makefile. # # A completely new file needs its basename added to BASENAMES below and # it needs a .var file. Create the latter with "make .foo.var" where foo # is the basename. # # To do: # # As of March 2009, the feedback form is still disabled, for lack of a # backend to process the submissions. # # Created: 28 November 2008 # Author: Bert Bos # What other directories the articles for articlelists come from: # DIRS = .. ../Examples/007 Test SAC Buttons ../LieBos2e/enter ../2011 \ ../LieBos2e/history ../History ../Examples/011 Test/Fonts # Rather than list all the TARGETS here explicitly, we grep them from the # various .var files. # BASENAMES = Disclosures Overview articlelist current-work learning \ members read software specs translating all-properties all-descriptors TARGETS = $(sort $(shell \ grep -hE '^URI:.*(html|php3)$$' $(BASENAMES:%=.%.var) /dev/null|cut -d: -f2-)) FEEDS = current-work.atom current-work.rss learning.atom learning.rss \ Overview.atom Overview.rss software.atom software.rss # COMMITS = $(TARGETS) $(BASENAMES:=.var) $(FEEDS) daily-color.css COMMITS = $(TARGETS) $(FEEDS) daily-color.css all: $(COMMITS) # subdirs #subdirs:; @$(foreach f,$(DIRS),$(MAKE) -C $(f); ) # The Overview.* files depend on some news files, in addition to the # various other files that all HTML files depend on (see # Makefile.common) # $(foreach lang,$(sort $(subst .,,$(suffix $(basename $(TARGETS))))),\ $(eval Overview.$(lang).proto: news.$(lang).gen wgnews.gen roadmap-compl.gen \ roadmap-high.gen)) # The articlelist.* files depend on sitemap.* files, in addition to the # various other files that all HTML files depend on (see # Makefile.common) # $(foreach lang,$(sort $(subst .,,$(suffix $(basename $(TARGETS))))),\ $(eval articlelist.$(lang).proto: sitemap.$(lang).gen)) # The specs.*.html files are generated from the specs.*.proto files, # but with an extra step to add some links and translated text. # This rule takes precedence over the generic rule in Makefile.common # to make .html from .proto # specs.%.html: specs.%.proto @echo "* Adding some links and normalizing -> $@" @hxnormalize -x -i 0 -l 10000 $< | \ hxpipe | LC_ALL=$(LOCALE_$*) $(AWK) -f specs.awk | hxunpipe | \ hxunent -b | xml2asc | asc2xml | hxnormalize -i 1 >$@.tmp @$(COMPARE2) -s -t @@@ $@.tmp $@ && touch $@ || cp $@.tmp $@ @rm $@.tmp news.%.gen: learning.%.html software.%.html current-work.%.html\ learning.en.html software.en.html current-work.en.html\ sort.awk news.awk @echo "* Extracting recent news -> $@" @(hxnormalize -x learning.$*.html |\ hxcopy -s -i learning.$*.html -o Overview; \ hxnormalize -x current-work.$*.html |\ hxcopy -s -i current-work.$*.html -o Overview;\ hxnormalize -x software.$*.html |\ hxcopy -s -i software.$*.html -o Overview; \ hxnormalize -x learning.en.html |\ hxcopy -s -i learning.en.html -o Overview; \ hxnormalize -x current-work.en.html |\ hxcopy -s -i current-work.en.html -o Overview;\ hxnormalize -x software.en.html |\ hxcopy -s -i software.en.html -o Overview) |\ hxpipe |\ LANGUAGE= LC_ALL=$(LOCALE_$*) gawk -f sort.awk -f news.awk |\ hxunpipe >$@ wgnews.gen: blog.stored blognews.awk @echo "* Extracting blog items -> $@" @hxunent -b $< | hxpipe | \ LANGUAGE= LC_ALL=$(DEFAULT) gawk -f blognews.awk | hxuncdata >$@ roadmap-compl.gen: current-work.en.html @echo "* Extracting roadmap (completed) -> $@" @hxnormalize -x $< | hxcopy -s -i $< -o Overview |\ hxselect '#completed' 'tr' '+' 'tr' >$@ roadmap-high.gen: current-work.en.html @echo "* Extracting roadmap (stable) -> $@" @hxnormalize -x $< | hxcopy -s -i $< -o Overview |\ hxselect '#stable' 'tr' '+' 'tr' >$@ # current-work.*.html depends also on deliverables.*.gen # and publications.*.gen # deliverables.*.gen is generated from deliverables.ini and translated. # publications.*.gen is generated from publications.ini and translated. # $(foreach lang,$(sort $(subst .,,$(suffix $(basename $(TARGETS))))),\ $(eval \ current-work.$(lang).proto: deliverables.$(lang).gen publications.$(lang).gen)) deliverables.%.gen: deliverables.ini deliverables.awk %.mo-is-ok @echo "* Generating table of deliverables -> $@" @LANGUAGE= LC_ALL=$(LOCALE_$*) gawk -f deliverables.awk $< >$@ publications.%.gen: publications.ini publications.awk %.mo-is-ok @echo "* Generating publication news -> $@" @LANGUAGE= LC_ALL=$(LOCALE_$*) gawk -f publications.awk $< >$@ # When current-work.%.tmpl is absent for some language, don't generate # an empty file (as in Makefile.common), but include publications in # it. # current-work.%.tmpl: @echo "" >$@ # Get the blog no more than once per hour. And if the online version # hasn't changed, don't change the time stamp. # blog.stored: blog.retrieved @echo "* Checking if blog entries changed -> $@" @cmp -s $< $@ || cp $< $@ blog.retrieved: always @if [ ! -f $@ ] || [ `find $@ -mmin +60` ]; then \ echo "* Retrieving blog entries -> $@"; \ curl -s -L https://www.w3.org/blog/CSS/feed/atom >$@; \ fi # Make a list of all pages, translated if available, English otherwise # PAGELISTS = $(addsuffix /titles1.gen,$(DIRS) .) %/titles1.gen: % @echo "* Checking sitemap -> $*/" @$(MAKE) -C $* titles | hxcopy -i $*/ -o ./ >$@ titles1.gen: titles.gen @echo "* Checking sitemap -> ." @cp $< $@ sitemap.%.gen: $(PAGELISTS) mksitemap.awk @echo "" >>sitemap.$*.tmp @cmp -s sitemap.$*.tmp $@ && rm sitemap.$*.tmp || mv sitemap.$*.tmp $@ # The CSS.po files are updated with new line numbers and new strings # to translate when the CSS.pot file changes. (But obviously any new # strings are not automatically translated. That's also why this # doesn't need to be in Makefile.common: no translations are changed, # so no files need to be regenerated.) # # The CSS.pot file itself is made by extracting the strings to # translate from the various scripts and from the list of # deliverables. # # The sed command adjusts the path to the program files and removes # line numbers, because the line numbers sometimes change and cause # unnecessary CVS commits. # # The 's/^"/" /' works around a bug in gawk 4.0.1 that causes a space # between words to disappear when a string is split over two lines. # %/LC_MESSAGES/CSS.po: en_US/LC_MESSAGES/CSS.pot @echo "* Updating strings to translate -> $@"; @cp $@ $@~ @msgmerge -N -q -i -o $@ $@~ $< en_US/LC_MESSAGES/CSS.pot: deliverables.ini.pot address.awk.pot \ deliverables.awk.pot banner.awk.pot \ translations2.awk.pot translations3.awk.pot \ translations4.awk.pot specs.awk.pot \ publications.awk.pot publications.ini.pot @echo "* Collecting all strings to translate -> $@"; @msgcat -i -o $@ $^ %.awk.pot: %.awk @echo "* Extracting strings to translate -> $@"; @gawk -g -f $< | \ sed -e 's|#: |#: ../../|' -e '/^#:/s/:[0-9][0-9]*//g' | \ msguniq -i >$@ %.ini.pot: %.ini @echo "* Extracting strings to translate -> $@"; @gawk -- 'BEGIN {\ print "msgid \"\"";\ print "msgstr \"Content-Type: text/plain; charset=UTF-8\\n\"\n"}\ /^(title|note) *= *[^ ]/ {\ sub(/^[^=]*= */, "");\ gsub(/"/, "\\\"");\ printf "#: ../../%s\n", FILENAME;\ printf "msgid \"%s\"\n", $$0;\ printf "msgstr \"\"\n\n"}' $< | msguniq -i >$@ # A bit of fun: change the color hue if it hasn't changed in 23 hours # (Used in combination with floatspan-n.css.) # daily-color.css: always @if [ ! -f $@ ]; then \ echo "* Creating background color -> $@"; \ echo 'body {background-color: hsl('`date +%j`',95%,17%)}' >$@; \ elif [ `find $@ -mmin +1380` ]; then \ echo "* Updating background color -> $@"; \ echo 'body {background-color: hsl('`date +%j`',95%,17%)}' >$@; \ fi clean:: @rm -f deliverables.ini.pot address.awk.pot \ deliverables.awk.pot banner.awk.pot translations2.awk.pot \ translations3.awk.pot translations4.awk.pot very-clean:: @rm -f $(FEEDS) blog.stored blog.retrieved include Makefile.common .PHONY: all subdirs always .NOTPARALLEL: blog.retrieved