2 # Copyright 2013 The Chromium Authors. All rights reserved.
3 # Use of this source code is governed by a BSD-style license that can be
4 # found in the LICENSE file.
6 # The optimization code is based on pngslim (http://goo.gl/a0XHg)
7 # and executes a similar pipleline to optimize the png file size.
8 # The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
9 # but this runs all other processes, including:
10 # 1) various color-dependent optimizations using optipng.
11 # 2) optimize the number of huffman blocks.
12 # 3) randomize the huffman table.
13 # 4) Further optimize using optipng and advdef (zlib stream).
14 # Due to the step 3), each run may produce slightly different results.
16 # Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
17 # for now as it does not take much time to run.
21 chrome/android/java/res
23 chrome/browser/resources
24 chrome/renderer/resources
25 content/public/android/java/res
26 content/renderer/resources
27 content/shell/resources
31 ui/webui/resources/images
33 win8/metro_driver/resources
36 # Files larger than this file size (in bytes) will
37 # use the optimization parameters tailored for large files.
38 LARGE_FILE_THRESHOLD=3000
40 # Constants used for optimization
41 readonly DEFAULT_MIN_BLOCK_SIZE=128
42 readonly DEFAULT_LIMIT_BLOCKS=256
43 readonly DEFAULT_RANDOM_TRIALS=100
44 # Taken from the recommendation in the pngslim's readme.txt.
45 readonly LARGE_MIN_BLOCK_SIZE=1
46 readonly LARGE_LIMIT_BLOCKS=2
47 readonly LARGE_RANDOM_TRIALS=1
49 # Global variables for stats
56 declare -a THROBBER_STR=('-' '\\' '|' '/')
61 # Echo only if verbose option is set.
68 # Show throbber character at current cursor position.
70 info -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
71 let THROBBER_COUNT=$THROBBER_COUNT+1
72 let THROBBER_COUNT=$THROBBER_COUNT%4
75 # Usage: pngout_loop <file> <png_out_options> ...
76 # Optimize the png file using pngout with the given options
77 # using various block split thresholds and filter types.
78 function pngout_loop {
82 if [ $OPTIMIZE_LEVEL == 1 ]; then
83 for j in $(eval echo {0..5}); do
85 pngout -q -k1 -s1 -f$j $opts $file
88 for i in 0 128 256 512; do
89 for j in $(eval echo {0..5}); do
91 pngout -q -k1 -s1 -b$i -f$j $opts $file
97 # Usage: get_color_depth_list
98 # Returns the list of color depth options for current optimization level.
99 function get_color_depth_list {
100 if [ $OPTIMIZE_LEVEL == 1 ]; then
103 echo "-d1 -d2 -d4 -d8"
107 # Usage: process_grayscale <file>
108 # Optimize grayscale images for all color bit depths.
110 # TODO(oshima): Experiment with -d0 w/o -c0.
111 function process_grayscale {
112 info -ne "\b\b\b\b\b\b\b\bgray...."
113 for opt in $(get_color_depth_list); do
114 pngout_loop $file -c0 $opt
118 # Usage: process_grayscale_alpha <file>
119 # Optimize grayscale images with alpha for all color bit depths.
120 function process_grayscale_alpha {
121 info -ne "\b\b\b\b\b\b\b\bgray-a.."
122 pngout_loop $file -c4
123 for opt in $(get_color_depth_list); do
124 pngout_loop $file -c3 $opt
128 # Usage: process_rgb <file>
129 # Optimize rgb images with or without alpha for all color bit depths.
130 function process_rgb {
131 info -ne "\b\b\b\b\b\b\b\brgb....."
132 for opt in $(get_color_depth_list); do
133 pngout_loop $file -c3 $opt
135 pngout_loop $file -c2
136 pngout_loop $file -c6
139 # Usage: huffman_blocks <file>
140 # Optimize the huffman blocks.
141 function huffman_blocks {
142 info -ne "\b\b\b\b\b\b\b\bhuffman."
144 local size=$(stat -c%s $file)
145 local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
146 local limit_blocks=$DEFAULT_LIMIT_BLOCKS
148 if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
149 min_block_size=$LARGE_MIN_BLOCK_SIZE
150 limit_blocks=$LARGE_LIMIT_BLOCKS
152 let max_blocks=$size/$min_block_size
153 if [ $max_blocks -gt $limit_blocks ]; then
154 max_blocks=$limit_blocks
157 for i in $(eval echo {2..$max_blocks}); do
159 pngout -q -k1 -ks -s1 -n$i $file
163 # Usage: random_huffman_table_trial <file>
164 # Try compressing by randomizing the initial huffman table.
166 # TODO(oshima): Try adjusting different parameters for large files to
168 function random_huffman_table_trial {
169 info -ne "\b\b\b\b\b\b\b\brandom.."
171 local old_size=$(stat -c%s $file)
172 local trials_count=$DEFAULT_RANDOM_TRIALS
174 if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
175 trials_count=$LARGE_RANDOM_TRIALS
177 for i in $(eval echo {1..$trials_count}); do
179 pngout -q -k1 -ks -s0 -r $file
181 local new_size=$(stat -c%s $file)
182 if [ $new_size -lt $old_size ]; then
183 random_huffman_table_trial $file
187 # Usage: final_comprssion <file>
188 # Further compress using optipng and advdef.
189 # TODO(oshima): Experiment with 256.
190 function final_compression {
191 info -ne "\b\b\b\b\b\b\b\bfinal..."
193 if [ $OPTIMIZE_LEVEL == 2 ]; then
194 for i in 32k 16k 8k 4k 2k 1k 512; do
196 optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
199 for i in $(eval echo {1..4}); do
201 advdef -q -z -$i $file
204 # Clear the current line.
206 printf "\033[0G\033[K"
210 # Usage: get_color_type <file>
211 # Returns the color type name of the png file. Here is the list of names
212 # for each color type codes.
218 # See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
219 # for details about the color type code.
220 function get_color_type {
222 echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
225 # Usage: optimize_size <file>
226 # Performs png file optimization.
227 function optimize_size {
228 # Print filename, trimmed to ensure it + status don't take more than 1 line
229 local filename_length=${#file}
230 local -i allowed_length=$COLUMNS-11
231 local -i trimmed_length=$filename_length-$COLUMNS+14
232 if [ "$filename_length" -lt "$allowed_length" ]; then
233 info -n "$file|........"
235 info -n "...${file:$trimmed_length}|........"
240 advdef -q -z -4 $file
242 pngout -q -s4 -c0 -force $file $file.tmp.png
243 if [ -f $file.tmp.png ]; then
245 process_grayscale $file
246 process_grayscale_alpha $file
248 pngout -q -s4 -c4 -force $file $file.tmp.png
249 if [ -f $file.tmp.png ]; then
251 process_grayscale_alpha $file
257 info -ne "\b\b\b\b\b\b\b\bfilter.."
258 local old_color_type=$(get_color_type $file)
259 optipng -q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
260 local new_color_type=$(get_color_type $file.tmp.png)
261 # optipng may corrupt a png file when reducing the color type
262 # to grayscale/grayscale+alpha. Just skip such cases until
263 # the bug is fixed. See crbug.com/174505, crbug.com/174084.
264 # The issue is reported in
265 # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
266 if [[ $old_color_type == "RGBA" && $new_color_type == gray* ]] ; then
269 mv $file.tmp.png $file
271 pngout -q -k1 -s1 $file
275 # TODO(oshima): Experiment with strategy 1.
276 info -ne "\b\b\b\b\b\b\b\bstrategy"
277 if [ $OPTIMIZE_LEVEL == 2 ]; then
279 pngout -q -k1 -ks -s$i $file
282 pngout -q -k1 -ks -s1 $file
285 if [ $OPTIMIZE_LEVEL == 2 ]; then
286 random_huffman_table_trial $file
289 final_compression $file
292 # Usage: process_file <file>
293 function process_file {
295 local name=$(basename $file)
296 # -rem alla removes all ancillary chunks except for tRNS
297 pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null 2>&1
299 if [ -f $TMP_DIR/$name -a $OPTIMIZE_LEVEL != 0 ]; then
300 optimize_size $TMP_DIR/$name
304 # Usage: optimize_file <file>
305 function optimize_file {
307 if $using_cygwin ; then
308 file=$(cygpath -w $file)
311 local name=$(basename $file)
312 local old=$(stat -c%s $file)
313 local tmp_file=$TMP_DIR/$name
318 if [ ! -e $tmp_file ] ; then
319 let CORRUPTED_FILE+=1
320 echo "$file may be corrupted; skipping\n"
324 local new=$(stat -c%s $tmp_file)
326 let percent=$diff*100
327 let percent=$percent/$old
329 if [ $new -lt $old ]; then
330 info "$file: $old => $new ($diff bytes: $percent%)"
331 cp "$tmp_file" "$file"
332 let TOTAL_OLD_BYTES+=$old
333 let TOTAL_NEW_BYTES+=$new
334 let PROCESSED_FILE+=1
336 if [ $OPTIMIZE_LEVEL == 0 ]; then
337 info "$file: Skipped"
339 info "$file: Unable to reduce size"
345 function optimize_dir {
347 if $using_cygwin ; then
348 dir=$(cygpath -w $dir)
351 for f in $(find $dir -name "*.png"); do
356 function install_if_not_installed {
359 which $program > /dev/null 2>&1
360 if [ "$?" != "0" ]; then
361 if $using_cygwin ; then
362 echo "Couldn't find $program. " \
363 "Please run cygwin's setup.exe and install the $package package."
366 read -p "Couldn't find $program. Do you want to install? (y/n)"
367 [ "$REPLY" == "y" ] && sudo apt-get install $package
368 [ "$REPLY" == "y" ] || exit
373 function fail_if_not_installed {
376 which $program > /dev/null 2>&1
378 echo "Couldn't find $program. Please download and install it from $url ."
384 local program=$(basename $0)
386 "Usage: $program [options] <dir> ...
388 $program is a utility to reduce the size of png files by removing
389 unnecessary chunks and compressing the image.
392 -o<optimize_level> Specify optimization level: (default is 1)
393 0 Just run pngcrush. It removes unnecessary chunks and perform basic
394 optimization on the encoded data.
395 1 Optimize png files using pngout/optipng and advdef. This can further
396 reduce addtional 5~30%. This is the default level.
397 2 Aggressively optimize the size of png files. This may produce
398 addtional 1%~5% reduction. Warning: this is *VERY*
399 slow and can take hours to process all files.
400 -r<revision> If this is specified, the script processes only png files
401 changed since this revision. The <dir> options will be used
402 to narrow down the files under specific directories.
403 -v Shows optimization process for each file.
404 -h Print this help text."
408 if [ ! -e ../.gclient ]; then
409 echo "$0 must be run in src directory"
413 if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
419 # The -i in the shebang line should result in $COLUMNS being set on newer
420 # versions of bash. If it's not set yet, attempt to set it.
421 if [ -z $COLUMNS ]; then
422 which tput > /dev/null 2>&1
423 if [ "$?" == "0" ]; then
426 # No tput either... give up and just guess 80 columns.
434 while getopts o:r:h:v opts
438 COMMIT=$(git svn find-rev r$OPTARG | tail -1) || exit
439 if [ -z "$COMMIT" ] ; then
440 echo "Revision $OPTARG not found"
445 if [[ "$OPTARG" != 0 && "$OPTARG" != 1 && "$OPTARG" != 2 ]] ; then
448 OPTIMIZE_LEVEL=$OPTARG
458 # Remove options from argument list.
459 shift $(($OPTIND -1))
461 # Make sure we have all necessary commands installed.
462 install_if_not_installed pngcrush pngcrush
463 if [ $OPTIMIZE_LEVEL -ge 1 ]; then
464 install_if_not_installed optipng optipng
466 if $using_cygwin ; then
467 fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
469 install_if_not_installed advdef advancecomp
472 if $using_cygwin ; then
473 pngout_url="http://www.advsys.net/ken/utils.htm"
475 pngout_url="http://www.jonof.id.au/kenutils"
477 fail_if_not_installed pngout $pngout_url
480 # Create tmp directory for crushed png file.
482 if $using_cygwin ; then
483 TMP_DIR=$(cygpath -w $TMP_DIR)
486 # Make sure we cleanup temp dir
487 #trap "rm -rf $TMP_DIR" EXIT
489 # If no directories are specified, optimize all directories.
491 set ${DIRS:=$ALL_DIRS}
493 info "Optimize level=$OPTIMIZE_LEVEL"
495 if [ -n "$COMMIT" ] ; then
496 ALL_FILES=$(git diff --name-only $COMMIT HEAD $DIRS | grep "png$")
497 ALL_FILES_LIST=( $ALL_FILES )
498 echo "Processing ${#ALL_FILES_LIST[*]} files"
499 for f in $ALL_FILES; do
503 echo "Skipping deleted file: $f";
509 info "Optimizing png files in $d"
512 elif [ -f $d ] ; then
515 echo "Not a file or directory: $d";
521 echo "Optimized $PROCESSED_FILE/$TOTAL_FILE files in" \
522 "$(date -d "0 + $SECONDS sec" +%Ts)"
523 if [ $PROCESSED_FILE != 0 ]; then
524 let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
525 let percent=$diff*100/$TOTAL_OLD_BYTES
526 echo "Result: $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
527 "($diff bytes: $percent%)"
529 if [ $CORRUPTED_FILE != 0 ]; then
530 echo "Warning: corrupted files found: $CORRUPTED_FILE"
531 echo "Please contact the author of the CL that landed corrupted png files"