__all__ = [
# Globals
- "cfg", # configuration from webber.ini
- "directories", # global hash of directories, by rel_path
- "files", # global hash of files, by rel_path
- "functions", # all exported template functions
+ "cfg", # configuration from webber.ini
+ "directories", # global hash of directories, by rel_path
+ "files", # global hash of files, by rel_path
+ "functions", # all exported template functions
# Functions
- "set_hook", # decorator for hook-functions
- "set_macro", # define macro
+ "set_hook", # decorator for hook-functions
+ "set_macro", # define macro
"set_function", # define functions for the template
"get_file_for",
"get_link_from",
"get_current_file", # because mako-called functions cannot access the
- # current File object
+ # current File object
"get_program_directory",
- "log", # misc logging functions
+ "log", # misc logging functions
"info",
"warning",
"error",
# Warn about long titles / long linktitles
if len(self.linktitle) > 20:
- log('%s: define a shorter "linktitle: xxx"')
+ log('%s: define a shorter linktitle' % self.rel_path)
self.contents = "".join(txt)
def get_file_for(name):
"""webber.files is an hash of File objects, but keyed on the real file name.
This function returns a File object for a specific linktitle."""
-
+
try:
return _get_file_for_cache[name]
except:
if rel_path.startswith("./"):
rel_path = rel_path[2:]
#print " from path:", source.out_path
- #print " to path: ", out_path
+ #print " to path: ", out_path
#print " rel path: ", rel_path
return rel_path
#
# Logging
#
-# 1 Error
-# 2 Warning
-# 3 Info
-# 4 Log
+# 1 Error
+# 2 Warning
+# 3 Info
+# 4 Log
# 5... Debug
#
def log(s, level=4):
# IkiWiki does something like this:
# At startup:
-# getopt modify ARGV
-# checkconfig check configuration
-# refresh allow plugins to build source files
+# getopt modify ARGV
+# checkconfig check configuration
+# refresh allow plugins to build source files
# While scanning files:
-# needsbuild detect if page needs to be rebuild
-# filter arbitrary changes
-# scan collect metadata
+# needsbuild detect if page needs to be rebuild
+# filter arbitrary changes
+# scan collect metadata
# While rendering files:
-# filter arbitrary changes
-# preprocess execute macros
-# linkify change wikilinks into links
-# htmlize turns text into html
-# sanitize sanitize html
-# templatefile allows changing of the template on a per-file basis
-# pagetemplate fill template with page
-# format similar to sanitize, but act on whole page body
+# filter arbitrary changes
+# preprocess execute macros
+# linkify change wikilinks into links
+# htmlize turns text into html
+# sanitize sanitize html
+# templatefile allows changing of the template on a per-file basis
+# pagetemplate fill template with page
+# format similar to sanitize, but act on whole page body
# At the end:
-# savestate plugins can save their state
+# savestate plugins can save their state
#
#
# We do something like this:
#
# At startup:
-# addoptions allow plugins to add command-line options
-# checkconfig check configuration
-# start
+# addoptions allow plugins to add command-line options
+# checkconfig check configuration
+# start
# While reading files:
-# read ask any reader (plugins!) to read the file
-# filter ask anybody to filter the contents
+# read ask any reader (plugins!) to read the file
+# filter ask anybody to filter the contents
# While scanning files:
-# scan called per file, let plugins act on file data
-# scan_done Allows post-processing of scanned data
+# scan called per file, let plugins act on file data
+# scan_done Allows post-processing of scanned data
# While rendering files:
-# htmlize turns text into html-part
-# linkify convert link macros to HTML
-# pagetemplate ask template engine (plugin!) to generate HTML out
-# of template and body part
+# htmlize turns text into html-part
+# linkify convert link macros to HTML
+# pagetemplate ask template engine (plugin!) to generate HTML out
+# of template and body part
# At the end:
# finish
#
def load_plugins():
"""Loads all plugins in the plugins directory."""
sys.path.append(os.path.join(get_program_directory(), "plugins"))
+ if cfg.has_key("plugin_dirs"):
+ for s in cfg.plugin_dirs:
+ sys.path.append(s)
for s in cfg.plugins:
- #print "import:", s
- #try:
exec "import %s" % s
- #except:
- # print "Could not import plugin '%s'" % s
- # sys.exit(1)
def set_hook(name, last=False):
return_holder=False)
if not contents:
return
+ file.contents = contents
log("filtering file %s" % file.rel_path, level=6)
- file.contents = contents
res = run_hooks("filter",
direc=direc,
file=file)
)
file.inheritFrom(direc)
read_file(direc, file)
-
+
walk(dirpath)
#
reMacro = re.compile(r'''
- \[\[\! # Begin of macro
+ \[\[\! # Begin of macro
\s*
- ([^\s\]]+) # Macro name
+ ([^\s\]]+) # Macro name
(?:
- \s+ # optional space
- ([^\]]+) # optional argumens
+ \s+ # optional space
+ ([^\]]+) # optional argumens
)?
- \]\] # End of macro
+ \]\] # End of macro
''', re.VERBOSE)
reMacroArgs = re.compile(r'''
([-_\w]+) # parameter name
=
\s*
(?:
- "([^"]*)" # single-quoted
+ "([^"]*)" # single-quoted
|
- (\S+) # unquoted
+ (\S+) # unquoted
)
)?
''', re.VERBOSE)
s = reMacro.sub(do_macro, contents)
#print s
return s
-
+
def scan_files():
info("Scanning files ...")
direc = directories[file.direc]
+ # Output-Filename "berechnen"
+ if file.render and file.render == "html":
+ file.out_path = os.path.splitext(s)[0] + ".html"
+
run_hooks("scan",
direc=direc,
file=file)
continue
file.contents = contents
- # Output-Filename "berechnen"
- file.out_path = os.path.splitext(fname_in)[0] + ".html"
-
for fname_in in files:
file = files[fname_in]
current_file = file
continue
direc = directories[file.direc]
- contents = run_hooks("linkify",
+ run_hooks("linkify",
direc=direc,
file=file,
- return_holder=False)
+ return_holder=True)
#print "contents after 'linkify':", contents
- if not contents:
+ if not file.contents:
continue
- file.contents = contents
# TODO: einige Fragmente sollen u.U. in eine andere
# Webseite eingebaut werden und sollten daher nicht in
return parser
-
+
@set_hook("checkconfig", last=True)
def checkconfig(params):
# Ensure absolute paths that end in '/'.