nl
stringlengths 13
387
| bash
stringlengths 1
532
|
---|---|
Check that the master SSH control socket "my-ctrl-socket" to "[email protected]" is running | ssh -S my-ctrl-socket -O check [email protected] |
check the type of files in the folder /usr/bin | find /usr/bin | xargs file |
Check whether "$path_in_question" is a mount point | df $path_in_question | grep " $path_in_question$" |
Check whether "/full/path" is a mount point with no output and using the exit code | df /full/path | grep -q /full/path |
Clean the current directory from all subversion directories recursively | find . -type d -name ".svn" -print | xargs rm -rf |
Clear the in-memory history | history -c |
Clear the in-memory history and read from the current history file | history -cr |
Clear the terminal's search history | history -c |
Clears terminal screen. | echo `clear` |
Clears the terminal screen. | clear |
Close the master SSH control socket "my-ctrl-socket" to "[email protected]" | ssh -S my-ctrl-socket -O exit [email protected] |
This command find displays the files which are modified in the last 15 minutes. And it lists only the unhidden files. i.e hidden files that starts with a . (period) are not displayed in the find output. | find . -mmin -15 \( ! -regex ".*/\..*" \) |
The command runs all the directories (-type d) found in the $LOGDIR directory wherein a file's data has been modified within the last 24 hours (-mtime +0) and compresses them (compress -r {}) to save disk space. | find $LOGDIR -type d -mtime +0 -exec compress -r {} \; |
Compare "$source_file" and "$dest_file" line by line | diff "$source_file" "$dest_file" |
Compare "current.log" and "previous.log" line by line and print lines containing regex pattern ">\|<" | diff current.log previous.log | grep ">\|<" #comparring users lists |
Compare "fastcgi_params" and "fastcgi.conf" line by line, output 3 lines of unified context, and print the C function the change is in | diff -up fastcgi_params fastcgi.conf |
Compare "file1" and "file2" line by line with 3 lines of unified context | diff -u file1 file2 |
Compare *.csv files in the current directory tree with their analogs stored in /some/other/path/ | find . -name "*.csv" -exec diff {} /some/other/path/{} ";" -print |
Compares two listings 'ls' and 'ls *Music*', showing only strings that unique for first listing. | comm -23 <(ls) <(ls *Music*) |
Compare the contents of "/bin" and "/usr/bin" line by line | diff <(ls /bin) <(ls /usr/bin) |
Compare the contents of gzip-ompressed files "file1" and "file2" | diff <(zcat file1.gz) <(zcat file2.gz) |
Compare each .xml file under the current directory with a file of the same name in "/destination/dir/2" | find . -name *.xml -exec diff {} /destination/dir/2/{} \; |
compare each C header file in or below the current directory with the file /tmp/master | find . -name '*.h' -execdir diff -u '{}' /tmp/master ';' |
Compare each file in "repos1/" and "repos2/", treat absent files as empty, ignore differences in whitespace and tab expansions, and print 3 lines of unified context | diff -ENwbur repos1/ repos2/ |
Compare files "A1" and "A2" with 3 lines of unified context and print lines beginning with "+" | diff -u A1 A2 | grep -E "^\+" |
Compare files 'file1' and 'file2' and print in three columns strings unique for first file, second file, and common ones | comm abc def |
Compare files in "/tmp/dir1" and "/tmp/dir2", treat absent files as empty and all files as text, and print 3 lines of unified context | diff -Naur dir1/ dir2 |
Compare the files in 'FOLDER1' and 'FOLDER2' and show which ones are indentical and which ones differ | find FOLDER1 -type f -print0 | xargs -0 -I % find FOLDER2 -type f -exec diff -qs --from-file="%" '{}' \+ |
Compare sorted files 'f1.txt' and 'f2.txt' and print in three columns strings unique for first file, second file, and common ones | comm <(sort -n f1.txt) <(sort -n f2.txt) |
Compare sorted files 'f1.txt' and 'f2.txt' and print in three columns strings unique for first file, second file, and common ones | comm <(sort f1.txt) <(sort f2.txt) |
Compare text "hello" and "goodbye" line by line | diff <(echo hello) <(echo goodbye) |
Compose filepath as folder path where file $SRC is located, and lowercase filename of $SRC file, and save it in 'DST' variable | DST=`dirname "${SRC}"`/`basename "${SRC}" | tr '[A-Z]' '[a-z]'` |
Compose filepath as folder path where file $f is located, and lowercase filename of $f file, and save it in 'g' variable | g=`dirname "$f"`/`basename "$f" | tr '[A-Z]' '[a-z]'` |
Composes full process tree with process id numbers, and prints only those strings that contain 'git'. | pstree -p | grep git |
Compress "archive.tar" | gzip archive.tar |
Compress "hello world" and save to variable "hey" | hey=$(echo "hello world" | gzip -cf) |
Compress "my_large_file" with gzip and split the result into files of size 1024 MiB with prefix "myfile_split.gz_" | gzip -c my_large_file | split -b 1024MiB - myfile_split.gz_ |
Compress $file file using gzip | gzip "$file" |
Compress a file named '{}' in the current directory | gzip "{}" |
Compress all ".txt" files in all sub directories with gzip | gzip */*.txt |
Compress all ".txt" files in the current directory tree with gzip | find . -type f -name "*.txt" -exec gzip {} \; |
Compress all *.img files using bzip2 | find ./ -name "*.img" -exec bzip2 -v {} \; |
Compress all directories found in $LOGDIR wherein a file's data has been modified within the last 24 hours | find $LOGDIR -type d -mtime +0 -exec compress -r {} \; |
Compress all directories found in directory tree $LOGDIR that have been modified within the last 24 hours | find $LOGDIR -type d -mtime -1 -exec compress -r {} \; |
Compress all files in the "$FILE" directory tree that were last modified 30 days ago | find $FILE -type f -mtime 30 -exec gzip {} \; |
Compress all files in the "$FILE" directory tree that were last modified 30 days ago and have not already been compressed with gzip | find $FILE -type f -not -name '*.gz' -mtime 30 -exec gzip {} \; |
compresses all the files in the current folder with default depth | find . -depth -print | cpio -dump /backup |
Compresses all files listed in array $*, executing in background. | compress $* & |
Compress all files under /source directory tree using gzip with best compression method | find /source -type f -print0 | xargs -0 -n 1 -P $CORES gzip -9 |
Compress all files under current directory tree with gzip | find . -type f -print0 | xargs -0r gzip |
Compress all files with '.txt' extension under current directory | echo *.txt | xargs gzip -9 |
Compress and display the gzip compression ratio of every file on the system that is greater than 100000 bytes and ends in ".log" | sudo find / -xdev -type f -size +100000 -name "*.log" -exec gzip -v {} \; |
Compress and display the original filename of every file on the system that is greater than 100000 bytes and ends in ".log" | sudo find / -xdev -type f -size +100000 -name "*.log" -exec gzip {} \; -exec echo {} \; |
Compress every file in the current directory that matches "*cache.html" and keep the original file | gzip -k *cache.html |
Compress every file in the current directory tree that matches "*cache.html" and keep the original file | find . -type f -name "*cache.html" -exec gzip -k {} \; |
Compress every file in the current directory tree with gzip and keep file extensions the same | find folder -type f -exec gzip -9 {} \; -exec mv {}.gz {} \; |
Compresses file 'example.log' keeping original file in place. | bzip2 -k example.log |
Compress the file 'file' with 'bzip2' and append all output to the file 'logfile' and stdout | bzip2 file | tee -a logfile |
Compress files excluding *.Z files | find . \! -name "*.Z" -exec compress -f {} \; |
Compress from standard input and print the byte count preceded with 'gzip.' | echo gzip. $( gzip | wc -c ) |
Compress from standard input with gzip | gzip |
Compress in parallel regular files in the current directory tree that were last modified more than 7 days ago | find . -type f -mtime +7 | tee compressedP.list | xargs -I{} -P10 compress {} & |
Concatenate all files under the current directory and below that contain "test" in their names | find . -iname '*test*' -exec cat {} \; |
Concatenate files containing `test' in their names | find . -name '*test*' -exec cat {} \; |
Concatenate with a space every other line in "input.txt" | paste -s -d' \n' input.txt |
Connect as ssh user specified by variable USER to host whose IP address or host name is specified by HOST, and copy remote file specified by variable SRC to location on local host specified by variable DEST, disabling progress info but enabling debug info. | scp -qv $USER@$HOST:$SRC $DEST |
Connect to "$USER_AT_HOST" using connection sharing on "$SSHSOCKET" and request the master to exit | ssh -S "$SSHSOCKET" -O exit "$USER_AT_HOST" |
Connect to host "$USER_AT_HOST" in master mode in the background without executing any commands and set the ControlPath to "$SSHSOCKET" | ssh -M -f -N -o ControlPath="$SSHSOCKET" "$USER_AT_HOST" |
Connect to host "server_b" as ssh user "user" and copy local file "/my_folder/my_file.xml" to server_b's directory "/my_new_folder/". | scp -v /my_folder/my_file.xml user@server_b:/my_new_folder/ |
Connect to port 1234 of specified IP address or hostname as ssh user "user", and copy all visible files in /var/www/mywebsite/dumps/ on this host to local directory /myNewPathOnCurrentLocalMachine - this directory must already exist on local host. | scp -P 1234 user@[ip address or host name]:/var/www/mywebsite/dumps/* /var/www/myNewPathOnCurrentLocalMachine |
Connect to port 2222 of example.com as ssh user "user", and copy local file "/absolute_path/source-folder/some-file" to remote directory "/absolute_path/destination-folder" | scp -P 2222 /absolute_path/source-folder/some-file [email protected]:/absolute_path/destination-folder |
Continuously send "y" to all prompts of command "rm" | yes | rm |
Convert "1199092913" to dotted decimal IPv4 address | ping -c1 1199092913 | head -n1 | grep -Eow "[0-9]+[.][0-9]+[.][0-9]+[.][0-9]+" |
Convert ";" separated list "luke;yoda;leila" to new line separated list | echo "luke;yoda;leila" | tr ";" "\n" |
Convert "abc" to a string of hexadecimal bytes | echo abc | od -A n -v -t x1 | tr -d ' \n' |
Convert the contents of 'var1' variable to lowercase | var1=`echo $var1 | tr '[A-Z]' '[a-z]'` |
Convert the content of variable 'CLEAN' to small letters | CLEAN=`echo -n $CLEAN | tr A-Z a-z` |
Convert relative path "/x/y/../../a/b/z/../c/d" into absolute path with resolved symbolic links | readlink -f /x/y/../../a/b/z/../c/d |
Convert relative symbolic link "$link" to absolute symbolic link | ln -sf "$(readlink -f "$link")" "$link" |
Convert standard input into a dump of octal bytes without the first 8 bytes of address and count the unique results | od | cut -b 8- | xargs -n 1 | sort | uniq | wc -l |
cope *.mp3 files to /tmp/MusicFiles | find . -type f -name "*.mp3" -exec cp {} /tmp/MusicFiles \; |
Copies ""$project_dir"/iTunesArtwork", to the 'Payload/iTunesArtwork', rewriting files if necessary. | cp -f "$project_dir"/iTunesArtwork Payload/iTunesArtwork |
Copy "./export" recursively to "/path/to/webroot" preserving permissions | rsync -pr ./export /path/to/webroot |
Copy "/Users/username/path/on/machine/" to "[email protected]:/home/username/path/on/server/" and convert encoding from UTF-8-MAC to UTF-8 | rsync --iconv=UTF-8-MAC,UTF-8 /Users/username/path/on/machine/ '[email protected]:/home/username/path/on/server/' |
Copy "/home/username/path/on/server/" to "[email protected]:/Users/username/path/on/machine/" and convert encoding from UTF-8 to UTF-8-MAC | rsync --iconv=UTF-8,UTF-8-MAC /home/username/path/on/server/ '[email protected]:/Users/username/path/on/machine/' |
Copy "/new/x/y/z/" over the network to "user@remote:/pre_existing/dir/" preserving the directory hierarchy | rsync -a --relative /new/x/y/z/ user@remote:/pre_existing/dir/ |
Copy "/path/to/source" to '/path/to/dest' in remote "username@computer" | rsync -r /path/to/source username@computer:/path/to/dest |
Copy "6.3.3/6.3.3/macosx/bin/mybinary" to "~/work/binaries/macosx/6.3.3/" and create directory "~/work/binaries/macosx/6.3.3/" if "~/work/binaries/macosx/" exists | rsync 6.3.3/6.3.3/macosx/bin/mybinary ~/work/binaries/macosx/6.3.3/ |
Copy "fileName.txt" to all directories listed in "allFolders.txt" - names may not contain spaces. | cat allFolders.txt | xargs -n 1 cp fileName.txt |
Copy "some_file_name" to "destination_directory" and change ownership to "someuser:somegroup" | echo 'some_file_name' | cpio -p --owner someuser:somegroup destination_directory |
Copy "source" recursively to "destination" excluding "path1/to/exclude" and "path2/to/exclude" | rsync -av --exclude='path1/to/exclude' --exclude='path2/to/exclude' source destination |
Copy "src" to "dest" if "src" is newer than "dest" | rsync -u src dest |
Copy "src/prog.js" and "images/icon.jpg" to "/tmp/package" keeping relative path names | rsync -R src/prog.js images/icon.jpg /tmp/package |
Copy '/path/to/source' from remote "username@computer" to local "/path/to/dest" | rsync -r username@computer:/path/to/source /path/to/dest |
Copies 'libgtest_main.so' and 'libgtest.so' to '/usr/lib/', preserving all attributes, and copying symlinks as symlinks, without following in source files. | sudo cp -a libgtest_main.so libgtest.so /usr/lib/ |
Copies 'src' to 'dest' preserving overwriting the existing files. | cp -n src dest |
Copy *.mp3 files to /tmp/MusicFiles | find . -type f -name "*.mp3" -exec cp {} /tmp/MusicFiles \; |
Copy *.txt files from the dir/ directory tree along with their parent directories | find dir/ -name '*.txt' | xargs cp -a --target-directory=dir_txt/ --parents |
Copy /my/configfile to all empty directories of the $somedir directory tree | find "$somedir" -type d -empty -exec cp /my/configfile {} \; |
Copy the 3 specified files to /tmp/package, preserving/creating directory structure of each file as specified on command line. | cp --parents src/prog.js images/icon.jpg /tmp/package |