Newer
Older
Handbook / make-includes / standard_rules.make
################################################################################
#
# File: $Id$
#
# Standard rules for a variety of situations. Includes standard suffixes and
# paths.
# 
#
################################################################################


################################################################################
#
# Required make variables. These should be defined by the calling makefile.
#
# PAPER_NUMBER
#	The paper number for the current paper, e.g., 212.
#
PAPER_NUMBER?=$(error The required make variable PAPER_NUMBER has not been defined. Please set it to the correct value)
#
# IMGDIR
#	The path to the directory that contains images, e.g., images.
#
IMGDIR?=$(error The required make variable IMGDIR has not been defined. Please set it to the directory in which images are located)


################################################################################
#
# Default rules.
#
#
# PDF from LaTeX via DVI, typically for images drawn using PSTricks).
# 
%.pdf %-print.pdf: $(IMGDIR)/%.tex
	$(LATEX) --jobname=$(IMGDIR)/$* $<
	$(DVIPS) -q -f $(IMGDIR)/$* | $(PS2EPS) --quiet --loose | $(EPSTOPDF) --filter --outfile=$(IMGDIR)/$@
#
# PDF from LaTeX.
#
%.pdf: %.tex
ifdef UNICODE
	$(XELATEX) $<
	$(XELATEX) $<
else
	$(PDFLATEX) $<
	$(PDFLATEX) $<
endif
#
# 2-up PDF from 1-up PDF.
#
%-2up.pdf: %.pdf
	$(PDFNUP) $< --nup 2x1 --outfile $@
#
# Derived XML with inclusions from source XML without inclusions.
#
# This is done by simply running the original source through xmllint
# with the --xinclude option to process any xi:include elements.
# The result of this could just be piped into the XSLT processor,
# except that (a) not all of the processors support input from stdin,
# and (b) combining both xmllint and XSLT processing into one command
# means that make won't stop if there's any errors from xmllint.
#
# Sed is used to add a comment to the derived XML file, warning that this
# is generated and shouldn't be edited. Sed was used because the <?xml?>
# processing instruction MUST be on the first line. The implication here
# is that all input source files have an <?xml?> processing instruction
# (which they should anyway).
#
%-derived.xml: %.xml
	$(XMLLINT) --xinclude $< > $@
	@$(SED) --in-place --expression='1a <!-- THIS FILE IS AUTOMATICALLY GENERATED. DO NOT EDIT! -->' $@
#
# LaTeX from derived XML.
#
%.tex: %-derived.xml
ifdef UNICODE
	$(call xslt,$<,xml2xelatex.xsl,$(call xslt_parameter,subject-code,'$(SUBJECT_CODE)'),$(call xslt_parameter,paper-number,'$(PAPER_NUMBER)'),$(call xslt_parameter,paper-year,'$(PAPER_YEAR)'),$(call xslt_parameter,period-code,'$(PAPER_PERIOD)')) > $@
else
	$(call xslt,$<,xml2latex.xsl,$(call xslt_parameter,subject-code,'$(SUBJECT_CODE)'),$(call xslt_parameter,paper-number,'$(PAPER_NUMBER)'),$(call xslt_parameter,paper-year,'$(PAPER_YEAR)'),$(call xslt_parameter,period-code,'$(PAPER_PERIOD)')) > $@
endif
#
# HTML from derived XML.
#
%.html: %-derived.xml
ifdef UNICODE
	$(call xslt,$<,xml2xhtml.xsl,$(call xslt_parameter,subject-code,'$(SUBJECT_CODE)'),$(call xslt_parameter,paper-number,'$(PAPER_NUMBER)'),$(call xslt_parameter,paper-year,'$(PAPER_YEAR)'),$(call xslt_parameter,period-code,'$(PAPER_PERIOD)')) > $@
else
	$(call xslt,$<,xml2html.xsl,$(call xslt_parameter,subject-code,'$(SUBJECT_CODE)'),$(call xslt_parameter,paper-number,'$(PAPER_NUMBER)'),$(call xslt_parameter,paper-year,'$(PAPER_YEAR)'),$(call xslt_parameter,period-code,'$(PAPER_PERIOD)')) > $@
endif
#
# Image rules can be a bit messy, because the prerequisite might be in any one
# of several different formats, but we want to apply the same commands anyway
# (e.g., ImageMagick convert) to generate the target. It would be nice if we
# could specify variable file types in a prerequisite of a pattern rule (e.g.,
# %-foo.png: %.png %.jpg %.tif), but unfortunately we can't :(. The solution is
# to create a make function to refactor the commands, and then specify as many
# rules as required depending on the number of file formats. We still end up
# repeating the rule actions, but at least the guts of the action is coded only
# once in the function.
#
# PNG from TIFF, JPEG, PICT (normal image)
# Note: PNG prerequisites are dealt with further down.
#
convert_normal = $(CONVERT) "$(1)" "$(2)"

%.png %-print.png %-web.png: %.tif
	$(call convert_normal,$<,$(IMGDIR)/$@)

%.png %-print.png %-web.png: %.jpg
	$(call convert_normal,$<,$(IMGDIR)/$@)

%.png %-print.png %-web.png: %.pict
	$(call convert_normal,$<,$(IMGDIR)/$@)
#
# PNG from PNG, TIFF, JPEG, PICT (slide background)
#
# We lighten these so that they don't overwhelm the text. Note that 15% is
# too light for most data projectors, so let's try 33% and see what
# happens... (this looks far too much on screen, but data projectors tend
# to wash things out much, much more).
#
# Includes both blurred and unblurred versions, use as required.
#
# Unblurred:
convert_bg = $(CONVERT) "$(1)" -threshold "-1" -depth 16 - | $(COMPOSITE) -dissolve 33% "$(1)" - "$(2)"

%-BG.png: %.tif
	$(call convert_bg,$<,$(IMGDIR)/$@)

%-BG.png: %.png
	$(call convert_bg,$<,$(IMGDIR)/$@)

%-BG.png: %.jpg
	$(call convert_bg,$<,$(IMGDIR)/$@)

%-BG.png: %.pict
	$(call convert_bg,$<,$(IMGDIR)/$@)
#
# Blurred:
convert_bg_blur = $(CONVERT) -blur 0.5 "$(1)" "$(2)"; $(CONVERT) "$(2)" -threshold "-1" -depth 16 - | $(COMPOSITE) -dissolve 33% "$(2)" - "$(3)"; rm -f "$(2)"

%-BG-blur.png: %.tif
	$(call convert_bg_blur,$<,$(IMGDIR)/$*-tmp.png,$(IMGDIR)/$@)

%-BG-blur.png: %.png
	$(call convert_bg_blur,$<,$(IMGDIR)/$*-tmp.png,$(IMGDIR)/$@)

%-BG-blur.png: %.jpg
	$(call convert_bg_blur,$<,$(IMGDIR)/$*-tmp.png,$(IMGDIR)/$@)

%-BG-blur.png: %.pict
	$(call convert_bg_blur,$<,$(IMGDIR)/$*-tmp.png,$(IMGDIR)/$@)
#
# PNG from XCF.
#
%.png: %.xcf
	$(XCF2PNG) -o $(IMGDIR)/$@ $<
#
# PNG from EPS.
#
%.png %-print.png %-web.png: %.eps
	$(PS2EPS) --ignoreBB --nohires --loose --gsbbox < $< | \
		$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -dEPSCrop -r96 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ -

%-web-zoom.png: %.eps
	$(PS2EPS) --ignoreBB --nohires --loose --gsbbox < $< | \
		$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -dEPSCrop -r144 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ -
#
# PNG from PS.
#
%-web.png: %.ps
	@$(PS2EPS) --ignoreBB --nohires --loose --gsbbox < $< | \
		$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -dEPSCrop -r96 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ -

%-web-zoom.png: %.ps
	@$(PS2EPS) --ignoreBB --nohires --loose --gsbbox < $< | \
		$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -dEPSCrop -r144 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ -
#
# PNG from PDF.
#
%-web.png: %.pdf
	$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -r96 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ $<

%-web-zoom.png: %.pdf
	$(GS) -q -dNOPAUSE -dBATCH -dSAFER -sDEVICE=png16m -r144 \
		-dTextAlphaBits=4 -dGraphicsAlphaBits=4 -sOutputFile=$(IMGDIR)/$@ $<
#
# PDF from Ploticus.
#
%.pdf %-print.pdf: %.plo
	$(PLOTICUS) -eps -tightcrop -o stdout $< | $(SHIFTBBOX) | $(EPSTOPDF) --filter --outfile=$(IMGDIR)/$@
#
# PDF from R.
# This assumes that the first argument to the R script is the output filename
# for the resultant PDF.
#
%.pdf %-print.pdf: %.R
	$(R) --slave --file=$< --args "$(IMGDIR)/$@"
	$(PDFCROP) $(IMGDIR)/$@ $(IMGDIR)/$*-crop.pdf
	-$(MV) $(IMGDIR)/$*-crop.pdf $(IMGDIR)/$@
#
# PDF from PS
#
%.pdf %-print.pdf: %.ps
	$(PS2EPS) --quiet --ignoreBB --nohires --loose < $< | $(EPSTOPDF) --filter --outfile=$(IMGDIR)/$@
#
# PDF from EPS
#
%.pdf %-print.pdf: %.eps
	$(EPSTOPDF) --outfile=$(IMGDIR)/$@ $<
# 	$(PS2EPS) --ignoreBB --nohires --loose < $< | $(PS2PDF) -dEPSCrop - $(IMGDIR)/$@
#
# PDF from SVG (via Inkscape)
# The --export-area-drawing option now appears to behave correctly with
# PDFs (previously it over-cropped slightly).
#
%.pdf %-print.pdf: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-pdf=$(IMGDIR)/$@
#
# PNG from SVG (via Inkscape)
#
%.png %-print.png %-web.png: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-background-opacity=1.0 --export-dpi=96 --export-png=$(IMGDIR)/$@

%-web-zoom.png: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-background-opacity=1.0 --export-dpi=144 --export-png=$(IMGDIR)/$@

%-print-zoom.png: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-background-opacity=1.0 --export-dpi=300 --export-png=$(IMGDIR)/$@

%-print-zoom-transparent.png: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-background-opacity=0 --export-dpi=300 --export-png=$(IMGDIR)/$@

%-transparent.png: %.svg
	$(INKSCAPE) --file=$< --without-gui --export-area-drawing --export-background-opacity=0 --export-dpi=96 --export-png=$(IMGDIR)/$@
#
# PNG from PNG (change the name).
# These appear to need to be separate rules. If both variants need to be
# created (i.e., both %-web.png and %-print.png), it only generates one of
# them (most likely %-web.png as it's first in the rule) if the rules are
# combined into one. This is Kind of Weird.
#
%-print.png: %.png
	$(CP) $< $(IMGDIR)/$@

%-web.png: %.png
	$(CP) $< $(IMGDIR)/$@
#
# CSS from Sass (both syntaxes).
#
%.css: %.sass
	$(SASS) $< $@

%.css: %.scss
	$(SASS) $< $@