-
-
Save mulderu/fccbf997552ff3fd660d11dbd954e62e to your computer and use it in GitHub Desktop.
| # locale setup | |
| sudo locale-gen "en_US.UTF-8" | |
| sudo locale-gen "ko_KR.UTF-8" | |
| #.bashrc | |
| export LANG="ko_KR.UTF-8" | |
| export LC_ALL="ko_KR.UTF-8" | |
| # multiple fname pattern find | |
| find /home/mulder/Downloads/phone_0 -name "*.jpg" -o -name "*.png" | |
| #!/bin/bash | |
| # Cleans directory from cvs dir/files. | |
| # http://snippets.dzone.com/posts/show/934 | |
| # kill process by grep | |
| ps -ef | grep skess | grep -v grep | awk '{print $2}' | xargs kill | |
| find "$1" -name 'CVS' -exec rm -rf '{}' \; -print | |
| # Generate fake file with a specific size (useful for uploads) | |
| head -c 3000000 /dev/urandom > upload_test_3MB.zip | |
| # Extracts count per page name in an access_log Apache file | |
| awk -F'[ "]+' '{ pagecount[$7]++ } END { for (i in pagecount) { printf "%d - %15s\n", pagecount[i], i } }' access_log | |
| # Get only response headers of a webpage. | |
| curl -s -D- -onul "http://www.google.it" | |
| # Fetch a webpage, returning only its body. | |
| wget -qO- "http://www.google.it" | |
| # Search files modified on the latest 2 days | |
| find . -mtime -2 | |
| # Copy last command with its output. | |
| # Usage: | |
| # $ <COMMAND> | pbcopy | |
| # $ pbcopylast | |
| function pbcopylast { | |
| last=$(history 2 | head -1) | |
| ( | |
| echo -n $last | sed "s/^[ ]*[0-9]*[ ]*/\$ /g" | sed "s/[ ]*\|[ ]*pbcopy$//g" | |
| echo "" | |
| pbpaste | |
| ) | pbcopy | |
| } | |
| #-------------------------------------------------------------------- | |
| # make last 500 files | |
| # | |
| ls -tlr *.{jpg,png} | tail -500 > ls.txt | |
| # | |
| rm -rf updated | |
| mkdir -p updated | |
| cut -c47-80 ls.txt | awk '{print "cp " $1 " updated" }' > cp-shell | |
| sh cp-shell | |
| tar czf updated.tgz updated | |
| #-------------------------------------------------------------------- | |
sudo useradd -m -c "goodUser" mulder -s /bin/bash
sudo passwd mulder
/etc/rc.local
run user level program
su - mulder -s /bin/sh -c "cd /opt/jarvis/webapp && nohup sh run > /opt/jarvis/webapp/jv.log 2>&1 &"
#!/bin/bash
make new label files from source label-list
mkdir -p label2
k=1
input="t1"
while IFS= read -r var
do
echo "$var"
cat "$var" | awk '{print 0 " " $2 " " $3 " " $4 " " $5}' > label2/$var
(( k = k + 1 ))
done < "list-01"
#!/bin/bash
make new label files from source label-list (0,1,2,3,4,5,6,7,8,9) => (0,1)
mkdir -p label2
k=1
input="t1"
while IFS= read -r var
do
echo "$var"
cat "$var" | sed s/^0/0/ | sed s/^1/0/ | sed s/^2/0/ | sed s/^3/0/ | sed s/^4/0/
| sed s/^5/1/ | sed s/^6/1/ | sed s/^7/1/ | sed s/^8/1/ | sed s/^9/1/ > label2/$var
(( k = k + 1 ))
done < "img.zzz"
file copy in text files to dest folder
cat /zzz/test_files.data | xargs cp -t dest_folder/test_files
-- BASH DATE
DATE=date +%Y-%m-%d
DATE=date '+%Y-%m-%d %H:%M:%S'
make tar ball last 500 files in directory
use ls -tlr, tail, cut and awk