2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # The optimization code is based on pngslim (http://goo.gl/a0XHg)
7 # and executes a similar pipleline to optimize the png file size.
8 # The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
9 # but this runs all other processes, including:
10 # 1) various color-dependent optimizations using optipng.
11 # 2) optimize the number of huffman blocks.
12 # 3) randomize the huffman table.
13 # 4) Further optimize using optipng and advdef (zlib stream).
14 # Due to the step 3), each run may produce slightly different results.
16 # Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
17 # for now as it does not take much time to run.
23 chrome/browser/resources
24 chrome/renderer/resources
30 # Files larger than this file size (in bytes) will
31 # use the optimization parameters tailored for large files.
32 LARGE_FILE_THRESHOLD=3000
34 # Constants used for optimization
35 readonly DEFAULT_MIN_BLOCK_SIZE=128
36 readonly DEFAULT_LIMIT_BLOCKS=256
37 readonly DEFAULT_RANDOM_TRIALS=100
38 # Taken from the recommendation in the pngslim's readme.txt.
39 readonly LARGE_MIN_BLOCK_SIZE=1
40 readonly LARGE_LIMIT_BLOCKS=2
41 readonly LARGE_RANDOM_TRIALS=1
43 # Global variables for stats
49 declare -a THROBBER_STR=('-' '\\' '|' '/')
52 # Show throbber character at current cursor position.
54 echo -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
55 let THROBBER_COUNT=($THROBBER_COUNT+1)%4
58 # Usage: pngout_loop <file> <png_out_options> ...
59 # Optimize the png file using pngout with the given options
60 # using various block split thresholds and filter types.
61 function pngout_loop {
65 if [ $OPTIMIZE_LEVEL == 1 ]; then
66 for j in $(seq 0 5); do
68 pngout -q -k1 -s1 -f$j $opts $file
71 for i in 0 128 256 512; do
72 for j in $(seq 0 5); do
74 pngout -q -k1 -s1 -b$i -f$j $opts $file
80 # Usage: get_color_depth_list
81 # Returns the list of color depth options for current optimization level.
82 function get_color_depth_list {
83 if [ $OPTIMIZE_LEVEL == 1 ]; then
86 echo "-d1 -d2 -d4 -d8"
90 # Usage: process_grayscale <file>
91 # Optimize grayscale images for all color bit depths.
93 # TODO(oshima): Experiment with -d0 w/o -c0.
94 function process_grayscale {
96 for opt in $(get_color_depth_list); do
97 pngout_loop $file -c0 $opt
101 # Usage: process_grayscale_alpha <file>
102 # Optimize grayscale images with alpha for all color bit depths.
103 function process_grayscale_alpha {
105 pngout_loop $file -c4
106 for opt in $(get_color_depth_list); do
107 pngout_loop $file -c3 $opt
111 # Usage: process_rgb <file>
112 # Optimize rgb images with or without alpha for all color bit depths.
113 function process_rgb {
115 for opt in $(get_color_depth_list); do
116 pngout_loop $file -c3 $opt
118 pngout_loop $file -c2
119 pngout_loop $file -c6
122 # Usage: huffman_blocks <file>
123 # Optimize the huffman blocks.
124 function huffman_blocks {
127 local size=$(stat -c%s $file)
128 local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
129 local limit_blocks=$DEFAULT_LIMIT_BLOCKS
131 if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
132 min_block_size=$LARGE_MIN_BLOCK_SIZE
133 limit_blocks=$LARGE_LIMIT_BLOCKS
135 let max_blocks=$size/$min_block_size
136 if [ $max_blocks -gt $limit_blocks ]; then
137 max_blocks=$limit_blocks
140 for i in $(seq 2 $max_blocks); do
142 pngout -q -k1 -ks -s1 -n$i $file
146 # Usage: random_huffman_table_trial <file>
147 # Try compressing by randomizing the initial huffman table.
149 # TODO(oshima): Try adjusting different parameters for large files to
151 function random_huffman_table_trial {
154 local old_size=$(stat -c%s $file)
155 local trials_count=$DEFAULT_RANDOM_TRIALS
157 if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
158 trials_count=$LARGE_RANDOM_TRIALS
160 for i in $(seq 1 $trials_count); do
162 pngout -q -k1 -ks -s0 -r $file
164 local new_size=$(stat -c%s $file)
165 if [ $new_size -lt $old_size ]; then
166 random_huffman_table_trial $file
170 # Usage: final_comprssion <file>
171 # Further compress using optipng and advdef.
172 # TODO(oshima): Experiment with 256.
173 function final_compression {
176 if [ $OPTIMIZE_LEVEL == 2 ]; then
177 for i in 32k 16k 8k 4k 2k 1k 512; do
179 optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
182 for i in $(seq 1 4); do
184 advdef -q -z -$i $file
189 # Usage: get_color_type <file>
190 # Returns the color type name of the png file. Here is the list of names
191 # for each color type codes.
197 # See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
198 # for details about the color type code.
199 function get_color_type {
201 echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
204 # Usage: optimize_size <file>
205 # Performs png file optimization.
206 function optimize_size {
211 advdef -q -z -4 $file
213 pngout -q -s4 -c0 -force $file $file.tmp.png
214 if [ -f $file.tmp.png ]; then
216 process_grayscale $file
217 process_grayscale_alpha $file
219 pngout -q -s4 -c4 -force $file $file.tmp.png
220 if [ -f $file.tmp.png ]; then
222 process_grayscale_alpha $file
229 local old_color_type=$(get_color_type $file)
230 optipng -q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
231 local new_color_type=$(get_color_type $file.tmp.png)
232 # optipng may corrupt a png file when reducing the color type
233 # to grayscale/grayscale+alpha. Just skip such cases until
234 # the bug is fixed. See crbug.com/174505, crbug.com/174084.
235 # The issue is reported in
236 # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
237 if [[ $old_color_type == "RGBA" && $new_color_type =~ gray.* ]] ; then
239 echo -n "[skip opting]"
241 mv $file.tmp.png $file
243 pngout -q -k1 -s1 $file
247 # TODO(oshima): Experiment with strategy 1.
249 if [ $OPTIMIZE_LEVEL == 2 ]; then
251 pngout -q -k1 -ks -s$i $file
254 pngout -q -k1 -ks -s1 $file
257 if [ $OPTIMIZE_LEVEL == 2 ]; then
258 random_huffman_table_trial $file
261 final_compression $file
264 # Usage: process_file <file>
265 function process_file {
267 local name=$(basename $file)
268 # -rem alla removes all ancillary chunks except for tRNS
269 pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null
271 if [ $OPTIMIZE_LEVEL != 0 ]; then
272 optimize_size $TMP_DIR/$name
276 # Usage: optimize_file <file>
277 function optimize_file {
279 if $using_cygwin ; then
280 file=$(cygpath -w $file)
283 local name=$(basename $file)
284 local old=$(stat -c%s $file)
285 local tmp_file=$TMP_DIR/$name
289 local new=$(stat -c%s $tmp_file)
291 let percent=($diff*100)/$old
295 if [ $new -lt $old ]; then
296 echo -ne "$file : $old => $new ($diff bytes : $percent %)\n"
297 mv "$tmp_file" "$file"
298 let TOTAL_OLD_BYTES+=$old
299 let TOTAL_NEW_BYTES+=$new
300 let PROCESSED_FILE+=1
302 if [ $OPTIMIZE_LEVEL == 0 ]; then
303 echo -ne "$file : skipped\r"
309 function optimize_dir {
311 if $using_cygwin ; then
312 dir=$(cygpath -w $dir)
315 for f in $(find $dir -name "*.png"); do
320 function install_if_not_installed {
323 which $program > /dev/null 2>&1
324 if [ "$?" != "0" ]; then
325 if $using_cygwin ; then
326 echo "Couldn't find $program. " \
327 "Please run cygwin's setup.exe and install the $package package."
330 read -p "Couldn't find $program. Do you want to install? (y/n)"
331 [ "$REPLY" == "y" ] && sudo apt-get install $package
332 [ "$REPLY" == "y" ] || exit
337 function fail_if_not_installed {
340 which $program > /dev/null 2>&1
342 echo "Couldn't find $program. Please download and install it from $url ."
348 local program=$(basename $0)
350 "Usage: $program [options] dir ...
352 $program is a utility to reduce the size of png files by removing
353 unnecessary chunks and compressing the image.
356 -o<optimize_level> Specify optimization level: (default is 1)
357 0 Just run pngcrush. It removes unnecessary chunks and perform basic
358 optimization on the encoded data.
359 1 Optimize png files using pngout/optipng and advdef. This can further
360 reduce addtional 5~30%. This is the default level.
361 2 Aggressively optimize the size of png files. This may produce
362 addtional 1%~5% reduction. Warning: this is *VERY*
363 slow and can take hours to process all files.
364 -r<revision> If this is specified, the script processes only png files
365 changed since this revision. The <dir> options will be used
366 to narrow down the files under specific directories.
367 -h Print this help text."
371 if [ ! -e ../.gclient ]; then
372 echo "$0 must be run in src directory"
376 if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
384 while getopts o:r:h opts
388 COMMIT=$(git svn find-rev r$OPTARG | tail -1) || exit
389 if [ -z "$COMMIT" ] ; then
390 echo "Revision $OPTARG not found"
395 if [[ ! "$OPTARG" =~ [012] ]] ; then
398 OPTIMIZE_LEVEL=$OPTARG
405 # Remove options from argument list.
406 shift $(($OPTIND -1))
408 # Make sure we have all necessary commands installed.
409 install_if_not_installed pngcrush pngcrush
410 if [ $OPTIMIZE_LEVEL -ge 1 ]; then
411 install_if_not_installed optipng optipng
413 if $using_cygwin ; then
414 fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
416 install_if_not_installed advdef advancecomp
419 if $using_cygwin ; then
420 pngout_url="http://www.advsys.net/ken/utils.htm"
422 pngout_url="http://www.jonof.id.au/kenutils"
424 fail_if_not_installed pngout $pngout_url
427 # Create tmp directory for crushed png file.
429 if $using_cygwin ; then
430 TMP_DIR=$(cygpath -w $TMP_DIR)
433 # Make sure we cleanup temp dir
434 trap "rm -rf $TMP_DIR" EXIT
436 # If no directories are specified, optimize all directories.
438 set ${DIRS:=$ALL_DIRS}
440 echo "Optimize level=$OPTIMIZE_LEVEL"
441 if [ -n "$COMMIT" ] ; then
442 ALL_FILES=$(git diff --name-only $COMMIT HEAD $DIRS | grep "png$")
443 ALL_FILES_LIST=( $ALL_FILES )
444 echo "Processing ${#ALL_FILES_LIST[*]} files"
445 for f in $ALL_FILES; do
449 echo "Skipping deleted file: $f";
454 echo "Optimizing png files in $d"
461 if [ $PROCESSED_FILE == 0 ]; then
462 echo "Did not find any files (out of $TOTAL_FILE files)" \
463 "that could be optimized" \
464 "in $(date -u -d @$SECONDS +%T)s"
466 let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
467 let percent=$diff*100/$TOTAL_OLD_BYTES
468 echo "Processed $PROCESSED_FILE files (out of $TOTAL_FILE files)" \
469 "in $(date -u -d @$SECONDS +%T)s"
470 echo "Result : $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
471 "($diff bytes : $percent %)"