lilypond-devel
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

Sketch of not remaking html files (issue 5498093)


From: PhilEHolmes
Subject: Sketch of not remaking html files (issue 5498093)
Date: Sat, 31 Dec 2011 15:53:53 +0000

Reviewers: percivall,

Message:
Please review

Description:
Should probably have associated this with
http://code.google.com/p/lilypond/issues/detail?id=2028.  It's an
attempt to stop make doc recreating the whole of
build/out-www/offline-root/ every time it's run, whether any of the
files there have been change or not.  It changes the make file to stop
it blindly deleting that directory, and then www_post.py to allow for
the fact that the directory/ies might exist.  It then checks whether the
target file is newer than the source before recreating the target.  On
my quick machine, make LANGS='' doc where there is no work to do now
only takes 5 seconds, as opposed to nearly 20.  On my slow VM, it
reduces the time from 2m 30s to 40s.  I've tested it with a fresh make
doc and confirmed all the required files exist after the make.

Please review this at http://codereview.appspot.com/5498093/

Affected files:
  M GNUmakefile.in
  M python/auxiliar/postprocess_html.py
  M scripts/build/www_post.py


Index: GNUmakefile.in
diff --git a/GNUmakefile.in b/GNUmakefile.in
index aa74dd6bd570c9b62fb43a686f4b1de13493a3d9..8b5d86eba0b0da89e5e01b8dd0aa66fb28929928 100644
--- a/GNUmakefile.in
+++ b/GNUmakefile.in
@@ -122,7 +122,7 @@ WWW-post:
 # need UTF8 setting in case this is hosted on a website.
echo -e 'AddDefaultCharset utf-8\nAddCharset utf-8 .html\nAddCharset utf-8 .en\nAddCharset utf-8 .nl\nAddCharset utf-8 .txt\n' > $(top-build-dir)/.htaccess
        $(buildscript-dir)/mutopia-index -o $(outdir)/examples.html input/
-       find $(outdir) -name '*-root' | xargs rm -rf
+       # find $(outdir) -name '*-root' | xargs rm -rf
$(buildscript-dir)/www_post $(PACKAGE_NAME) $(TOPLEVEL_VERSION) $(outdir) "$(WEB_TARGETS)"
        find $(outdir)/offline-root -type l | xargs rm -f
 endif # ifeq ($(out),www)
Index: python/auxiliar/postprocess_html.py
diff --git a/python/auxiliar/postprocess_html.py b/python/auxiliar/postprocess_html.py index eed34d15863a884010fe5e3ab969be78adeed6c9..fe76e3bbe1a7149acf73fc832418a5578551ef4c 100644
--- a/python/auxiliar/postprocess_html.py
+++ b/python/auxiliar/postprocess_html.py
@@ -350,30 +350,36 @@ def process_html_files (package_name = '',
     for prefix, ext_list in pages_dict.items ():
         for lang_ext in ext_list:
             file_name = langdefs.lang_file_name (prefix, lang_ext, '.html')
-            in_f = open (file_name)
-            s = in_f.read()
-            in_f.close()
-
-            s = s.replace ('%', '%%')
- s = hack_urls (s, prefix, target, bool (int (versiontup[1]) % 2))
-            s = add_header (s, prefix)
-
-            ### add footer
-            if footer_tag_re.search (s) == None:
-                if 'web' in file_name:
-                    s = add_footer (s, footer_tag + web_footer)
-                else:
-                    s = add_footer (s, footer_tag + footer)
-
-                available, missing = find_translations (prefix, lang_ext)
- page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target) - # Add menu after stripping: must not have autoselection for language menu. - page_flavors = add_menu (page_flavors, prefix, available, target, translation)
-            for k in page_flavors:
- page_flavors[k][1] = page_flavors[k][1] % subst[page_flavors[k][0]]
-                out_f = open (name_filter (k), 'w')
-                out_f.write (page_flavors[k][1])
-                out_f.close()
+            SourceTime = os.path.getmtime(file_name)
+            DestTime = 0
+            if os.path.exists(name_filter(file_name)):
+                DestTime = os.path.getmtime(name_filter(file_name))
+            if DestTime < SourceTime:
+
+                in_f = open (file_name)
+                s = in_f.read()
+                in_f.close()
+
+                s = s.replace ('%', '%%')
+ s = hack_urls (s, prefix, target, bool (int (versiontup[1]) % 2))
+                s = add_header (s, prefix)
+
+                ### add footer
+                if footer_tag_re.search (s) == None:
+                    if 'web' in file_name:
+                        s = add_footer (s, footer_tag + web_footer)
+                    else:
+                        s = add_footer (s, footer_tag + footer)
+
+ available, missing = find_translations (prefix, lang_ext) + page_flavors = process_links (s, prefix, lang_ext, file_name, missing, target) + # Add menu after stripping: must not have autoselection for language menu. + page_flavors = add_menu (page_flavors, prefix, available, target, translation)
+                for k in page_flavors:
+ page_flavors[k][1] = page_flavors[k][1] % subst[page_flavors[k][0]]
+                    out_f = open (name_filter (k), 'w')
+                    out_f.write (page_flavors[k][1])
+                    out_f.close()
# if the page is translated, a .en.html symlink is necessary for content negotiation
         if target == 'online' and ext_list != ['']:
os.symlink (os.path.basename (prefix) + '.html', name_filter (prefix + '.en.html'))
Index: scripts/build/www_post.py
diff --git a/scripts/build/www_post.py b/scripts/build/www_post.py
index fec7f0e20cd144eda3bb99b02dd0a0ee38575aec..18fa05bfc50653158f434613d5e62540b0c0ed2b 100644
--- a/scripts/build/www_post.py
+++ b/scripts/build/www_post.py
@@ -70,10 +70,22 @@ strip_re = re.compile (outdir + '/')
 for t in targets:
     out_root = target_pattern % t
strip_file_name[t] = lambda s: os.path.join (target_pattern % t, (strip_re.sub ('', s)))
-    os.mkdir (out_root)
-    map (os.mkdir, [os.path.join (out_root, d) for d in dirs])
+    try:
+        os.mkdir (out_root)
+    except Exception:
+        sys.exc_clear()
+    # map (os.mkdir, [os.path.join (out_root, d) for d in dirs])
+    for d in dirs:
+        NewDir = os.path.join (out_root, d)
+        try:
+            os.mkdir (NewDir)
+        except Exception:
+            sys.exc_clear()
     for f in hardlinked_files:
-        os.link (f, strip_file_name[t] (f))
+       try:
+           os.link (f, strip_file_name[t] (f))
+       except Exception:
+           sys.exc_clear()
     for l in symlinks:
p = mirrortree.new_link_path (os.path.normpath (os.readlink (l)), os.path.dirname (l), strip_re)
         dest = strip_file_name[t] (l)





reply via email to

[Prev in Thread] Current Thread [Next in Thread]