    <?xml version="1.0" encoding="UTF-8"?>
<rss version="2.0"
	xmlns:content="http://purl.org/rss/1.0/modules/content/"
	xmlns:wfw="http://wellformedweb.org/CommentAPI/"
	xmlns:dc="http://purl.org/dc/elements/1.1/"
	xmlns:atom="http://www.w3.org/2005/Atom"
	xmlns:sy="http://purl.org/rss/1.0/modules/syndication/"
	xmlns:slash="http://purl.org/rss/1.0/modules/slash/"
	>

<channel>
	<title>Balbu Web Solutions &#187; bash</title>
	<atom:link href="http://bws.balbu.eu/tag/bash/feed/" rel="self" type="application/rss+xml" />
	<link>http://bws.balbu.eu</link>
	<description>Our values are what the clients feel value. We have a creative team, but we always keep in mind what our clients want. We always try to improve our skills and we can use the latest techniques, but we won’t drop the long standing way to be successfull. Our main goal is to build sites where the needs and the working solutions can meet for the satisfaction of the client.</description>
	<lastBuildDate>Fri, 10 Jan 2014 06:09:48 +0000</lastBuildDate>
	<language>en-US</language>
	<sy:updatePeriod>hourly</sy:updatePeriod>
	<sy:updateFrequency>1</sy:updateFrequency>
	<generator>http://wordpress.org/?v=3.5.1</generator>
		<item>
		<title>incremental svn export</title>
		<link>http://bws.balbu.eu/incremental-svn-export/</link>
		<comments>http://bws.balbu.eu/incremental-svn-export/#comments</comments>
		<pubDate>Tue, 09 Jul 2013 09:02:14 +0000</pubDate>
		<dc:creator>oliver nadj</dc:creator>
				<category><![CDATA[Blog]]></category>
		<category><![CDATA[bash]]></category>
		<category><![CDATA[incremental]]></category>
		<category><![CDATA[slution]]></category>
		<category><![CDATA[svn export]]></category>

		<guid isPermaLink="false">http://bws.balbu.eu/?p=329</guid>
		<description><![CDATA[I write this post because I didn&#8217;t find perfect match to our svn exporting needs. Eventually performs an svn export only on changed files. Time to tome the php frameworks grows bigger and bigger &#8211; I had an unfortunate question here &#8211; well if I would like to use these nice frameworks and I don&#8217;t [...]]]></description>
				<content:encoded><![CDATA[<p>I write this post because I didn&#8217;t find perfect match to our svn exporting needs. Eventually performs an svn export only on changed files. Time to tome the php frameworks grows bigger and bigger &#8211; I had an unfortunate question <a title="Is it possible to decrease number of files in Symfony 2" href="http://stackoverflow.com/questions/15308578/is-it-possible-to-decrease-number-of-files-in-symfony-2" target="_blank">here</a> &#8211; well if I would like to use these nice frameworks and I don&#8217;t want to give up using subversion, the full export to the production environment is no longer acceptable. <span id="more-329"></span></p>
<p>The happy case trial ended like these:</p>
<pre class="brush: bash; title: ; notranslate">
$ svn export -r 80:81 https://svn.example.com/trunk/ ./testdiff
svn: Revision range is not allowed
</pre>
<p>Finally I google a while and I made a bash script can handle incremental export. The methodology is quite simple:</p>
<ul>
<li><span style="line-height: 13px;">copies whole previous export to the new place</span></li>
<li>exports modified and added files since previous revision</li>
<li>removes deleted files (compared to prev. rev.)</li>
<li>patches new revision. means force a copy from already prepared folder contains config files, symlinks to binary &#8211; non versioned &#8211; content. etc.</li>
<li>relinks webroot to a new one</li>
<li>restarts webserver</li>
</ul>
<pre class="brush: bash; title: ; notranslate">
#!/bin/bash

##############################
# settings and inicilization #
##############################

SVN_SOURCE=&quot;https://svn.example.com/trunk/&quot;
REV_PATH=&quot;/var/www/revisions/example.com/&quot;
PATCH_PATH=&quot;/var/www/patch/example.com/&quot;
SYM_PATH=&quot;/var/www/public/example.com&quot;

TIME_SPENT=$(date +%s)
REV=$(svn info $SVN_SOURCE | grep Revision | cut -d ' ' -f 2)
PREV=0
VERBOSIVE=0

USAGE_INFO=&quot;$(basename &quot;$0&quot;) [-r REVISION_NUM] [-i PREVIOUS_REVISION_NUM] -- make an incremental svn export

where:
  -i  previous revision (default: 0)
  -h  show this help text
  -r  revision to export (default: $REV)
  -v  verbosive mode. show fetched files

current settins:
  SVN_SOURCE: $SVN_SOURCE
  REV_PATH:   $REV_PATH
  PATCH_PATH: $PATCH_PATH
  SYM_PATH:   $SYM_PATH
&quot;

while getopts r:i:hv option; do
  case &quot;$option&quot; in
    i)  PREV=$OPTARG
        ;;
    h)  echo &quot;$USAGE_INFO&quot;
        exit
        ;;
    r)  REV=$OPTARG
        ;;
    v)  VERBOSIVE=1
        ;;
  esac
done

EV_PATH=$REV_PATH$REV&quot;/&quot;

##############################
#         functions          #
##############################

promtYesOrDie(){
  while true; do
    read -e -p &quot;$1 (y/n): &quot; -i &quot;y&quot; yn
    case $yn in
      [Yy] ) break;;
      [Nn] ) echo &quot;spent: &quot;$((`date +%s` - $TIME_SPENT))&quot;s&quot;
             echo &quot;bye bye&quot;
             exit
             ;;
         * ) echo &quot;Please answer (y)es or (n)o.&quot;;;
    esac
  done
}

doIncrementalExport(){
  PREV_PATH=$REV_PATH$PREV&quot;/&quot;
  if [ -d $PREV_PATH ]; then
    echo &quot;copying files from: $PREV_PATH&quot;
    cp -f -r &quot;$PREV_PATH.&quot; $EV_PATH
    echo &quot;fetching added and modified files since revision $PREV...&quot;
    for FILE_SRC in $(svn diff --summarize -r $PREV:$REV $SVN_SOURCE | awk '/[AM]/ {print $2}'); do
      FILE_PATH=$(echo $FILE_SRC | sed -e &quot;s{$SVN_SOURCE{{&quot;);
      if [ ! -d &quot;$EV_PATH$FILE_PATH&quot; ]; then
        TRG_DIR=&quot;$EV_PATH$(dirname $FILE_PATH)&quot;
        mkdir -p $TRG_DIR
        svn export -r$REV -q --force $FILE_SRC &quot;$EV_PATH$FILE_PATH&quot;
        if [ $VERBOSIVE -eq 1 ]; then
          echo &quot;$EV_PATH$FILE_PATH&quot;
        fi
      fi
    done
    echo &quot;removing deleted files and folders since revision $PREV ...&quot;
    for FILE_SRC in $(svn diff --summarize -r $PREV:$REV $SVN_SOURCE | awk '/D/ {print $2}'); do
      FILE_PATH=$(echo $FILE_SRC | sed -e &quot;s{$SVN_SOURCE{{&quot;);
      rm -r &quot;$EV_PATH$FILE_PATH&quot;
      if [ $VERBOSIVE -eq 1 ]; then
        echo &quot;$EV_PATH$FILE_PATH&quot;
      fi
    done
  else
    echo &quot;previous revision does not exist at: $PREV_PATH&quot;
    exit;
  fi
}

##############################
#       main function        #
##############################

if [ $PREV -eq 0 ]; then
  promtYesOrDie &quot;Do you want to do full export instead of incremental, for revision $REV of repo: [$SVN_SOURCE]&quot;
  echo &quot;fatching source ...&quot;
  if [ $VERBOSIVE -eq 1 ]; then
    svn export -r$REV --force $SVN_SOURCE $EV_PATH
  else
    svn export -r$REV -q --force $SVN_SOURCE $EV_PATH
  fi
else
  promtYesOrDie &quot;Do you want to do incremental export, for revision renge $PREV:$REV of repo: [$SVN_SOURCE]&quot;
  doIncrementalExport
fi

echo &quot;patching from $PATCH_PATH ..&quot;
cp -f -r &quot;$PATCH_PATH.&quot; $EV_PATH

echo &quot;changing owners and permissions ..&quot;
chown www-data:www-data -R $EV_PATH
chmod 0775 -R $EV_PATH

promtYesOrDie &quot;Do you want to make the changes to do live? You should do it manually if got any error!&quot;
echo &quot;update symlink $SYM_PATH to $EV_PATH&quot;
ln -sfn $EV_PATH $SYM_PATH

promtYesOrDie &quot;Do you want to restart web server?&quot;
echo &quot;restarting php-fastcgi and nginx&quot;
/etc/init.d/php-fastcgi restart
/etc/init.d/nginx restart

#echo &quot;restarting apache&quot;
#/etc/init.d/apache2 restart

echo &quot;spent: &quot;$((`date +%s` - $TIME_SPENT))&quot;s&quot;
echo [done]

</pre>
]]></content:encoded>
			<wfw:commentRss>http://bws.balbu.eu/incremental-svn-export/feed/</wfw:commentRss>
		<slash:comments>251</slash:comments>
		</item>
		<item>
		<title>Mysql dump to separate files with archive exclusion</title>
		<link>http://bws.balbu.eu/mysql-dump-to-separate-files-with-archive-exclusion/</link>
		<comments>http://bws.balbu.eu/mysql-dump-to-separate-files-with-archive-exclusion/#comments</comments>
		<pubDate>Wed, 17 Apr 2013 12:55:57 +0000</pubDate>
		<dc:creator>oliver nadj</dc:creator>
				<category><![CDATA[Blog]]></category>
		<category><![CDATA[bash]]></category>
		<category><![CDATA[mysql]]></category>
		<category><![CDATA[solution]]></category>

		<guid isPermaLink="false">http://bws.balbu.eu/?p=218</guid>
		<description><![CDATA[We run some LAMP server mostly on EC2 infrastructure. Considering the servers in different locations and environments there are no unified automate service for backpacking Mysql databases into separate files. And the other hand we don&#8217;t want to regular backup handle the fat archive tables. So what we need is a mysql backup solution, probably [...]]]></description>
				<content:encoded><![CDATA[<p>We run some LAMP server mostly on EC2 infrastructure. Considering the servers in different locations and environments there are no unified automate service for backpacking Mysql databases into separate files. And the other hand we don&#8217;t want to regular backup handle the fat archive tables.</p>
<p>So what we need is a mysql backup solution, probably a bash script what makes separate files per each db and ignores archive and memory engines of course with possibilities of gzipping.</p>
<p><span id="more-218"></span></p>
<p>I googled but I didn&#8217;t found what exactly we need, fortunately I found some similar like this one: <a title="How to dump all MySQL databases into separate files" href="http://carrotplant.com/en/blog/how-to-dump-all-mysql-databases-into-separate-files">http://carrotplant.com/en/blog/how-to-dump-all-mysql-databases-into-separate-files</a> and I combine them together.</p>
<p>Finally there are the dump script.</p>
<pre class="brush: bash; title: ; notranslate">
#!/bin/bash

############
# Settings #
############

# mysql parameters
MYSQL_USER=&quot;root&quot;
MYSQL_PASSWORD=&quot;******&quot;
MYSQLDUMP=&quot;/usr/bin/mysqldump&quot;
MYSQL=&quot;/usr/bin/mysql&quot;
BACKUP_LOCATION=&quot;/var/backups/ervername/mysql/&quot;
BACKUP_SQLDIR=$BACKUP_LOCATION$(date +%Y-%m-%d_%H-%M)
EXCLUDE_DBS=&quot;(Database|information_schema|phpmyadmin|mysql)&quot;
EXCLUDE_EGX=&quot;(MEMORY|ARCHIVE)&quot;
GZIP_ENABLED=1

##############
# THE SCRIPT #
##############

echo &quot;sophisticated dump of mysql databases&quot;
echo &quot;destination: $BACKUP_SQLDIR&quot;
echo &quot;ignore these databases: $EXCLUDE_DBS&quot;
echo &quot;ignore these engines: $EXCLUDE_EGX&quot;
echo &quot;gzipping: $GZIP_ENABLED&quot;

#returns list of ignired tables
ignoredtables () {
   local IGNORES
   TABLES=`$MYSQL --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e &quot;USE $1; SHOW TABLE STATUS;&quot;  | grep -E $EXCLUDE_EGX | awk '{print $1}'`
   for CURRENT_TB in $TABLES; do
       IGNORES=&quot;$IGNORES --ignore-table=$1.$CURRENT_TB&quot;
   done
   echo $IGNORES
}

if [ ! -d &quot;$BACKUP_SQLDIR&quot; ]; then
    echo &quot;make dir: &quot;$BACKUP_SQLDIR
    mkdir -p $BACKUP_SQLDIR
fi

# get a list of databases
DATABASES=`$MYSQL --user=$MYSQL_USER --password=$MYSQL_PASSWORD -e &quot;SHOW DATABASES;&quot; | grep -Ev $EXCLUDE_DBS`

# dump each database in turn
echo &quot;dumping databases...&quot;
TIME_SPENT=`date +%s`
OVERAL_SPENT=`date +%s`
for CURRENT_DB in $DATABASES; do
    echo $CURRENT_DB
    IGNORED_TABLES=`ignoredtables $CURRENT_DB`
    if [ $GZIP_ENABLED == 1 ]; then
        $MYSQLDUMP --force --opt --routines --user=$MYSQL_USER --password=$MYSQL_PASSWORD $IGNORED_TABLES $CURRENT_DB | gzip &gt; &quot;$BACKUP_SQLDIR/$CURRENT_DB.sql.gz&quot;
    else
        $MYSQLDUMP --force --opt --routines --user=$mYSQL_USER --password=$MYSQL_PASSWORD $IGNORED_TABLES $CURRENT_DB &gt; &quot;$BACKUP_SQLDIR/$CURRENT_DB.sql&quot;
    fi
    TIME_SPENT=$((`date +%s` - $TIME_SPENT))
    echo &quot;spent: &quot;$TIME_SPENT&quot;s         overal: &quot;$((`date +%s` - $OVERAL_SPENT))&quot;s&quot;
    TIME_SPENT=`date +%s`
done

# removes previous backup older then 7 days
find $BACKUP_LOCATION -mtime +7 -type d -exec rm -Rv {} \;

echo &quot;[done]&quot;
</pre>
]]></content:encoded>
			<wfw:commentRss>http://bws.balbu.eu/mysql-dump-to-separate-files-with-archive-exclusion/feed/</wfw:commentRss>
		<slash:comments>254</slash:comments>
		</item>
	</channel>
</rss>
