#!/usr/bin/env sh
# This scripts downloads the CIFAR10 (binary version) data and unzips it.
-DIR="$(readlink -f $(dirname "$0"))"
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd $DIR
echo "Downloading..."
# - synset ids and words
# - the training splits with labels
-DIR="$(readlink -f $(dirname "$0"))"
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd $DIR
echo "Downloading..."
#!/usr/bin/env sh
# This scripts downloads the mnist data and unzips it.
-DIR="$(readlink -f $(dirname "$0"))"
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
cd $DIR
echo "Downloading..."
if [[ $BRANCH = 'master' ]]; then
# Find the docs dir, no matter where the script is called
- DIR="$(readlink -f $(dirname "$0"))"
+ DIR="$( cd "$(dirname "$0")" ; pwd -P )"
DOCS_SITE_DIR=$DIR/../docs/_site
# Make sure that docs/_site tracks remote:gh-pages.
#!/bin/bash
-# Usage parse_log.sh caffe.log
+# Usage parse_log.sh caffe.log
# It creates two files one caffe.log.test that contains the loss and test accuracy of the test and
# another one caffe.log.loss that contains the loss computed during the training
-#get the dirname of the script
-DIR="$(readlink -f $(dirname "$0"))"
+# get the dirname of the script
+DIR="$( cd "$(dirname "$0")" ; pwd -P )"
if [ "$#" -lt 1 ]
then