#!/bin/bash
while [ true ]
do
echo "$(top -b -o +%MEM -n 1 | head -n +8 | tail -n 1 | awk '{ print $12";"$10 }');$(date +'%H:%M:%S')" >> RAM.csv
if [ -z "$1" ]; then
sleep 1m
else
sleep "$1"
fi
done
ERROR OUTPUT : date: extra operand '%H:%M:%S'
what is wrong with this?
Below you find a mildly improved version of your script:
#!/usr/bin/env bash
# All output is appended to RAM.csv
exec >> RAM.csv
# Do an infinite loop
while :; do
# Get command with most used memory, print it in CSV format, append time
date "+$(ps -eo ucmd -o '%mem' --sort=-%mem h | awk -v OFS=";" '($1=$1);{exit}');%T"
# sleep $1 seconds, if not defined 1m
sleep "${1:-1m}"
done
Related
txt with more than 30000 records.
All records are one for line and is an IP like this:
192.168.0.1
192.168.0.2
192.168.0.3
192.168.0.4
192.168.0.5
192.168.0.6
192.168.0.7
192.168.0.8
192.168.0.9
192.168.0.10
I read each row in a bash script, and I need to run a curl like this:
while IFS= read -r line || [[ -n "$line" ]]; do
#check_site "$line"
resp=$(curl -i -m1 http://$line 2>&1)
echo "$resp" | grep -Eo "$ok" > /dev/null
if [ $? -ne 0 ]; then
#echo -e "failed: $line" >> "${logfile}"
echo -e "Command: curl -i -m1 http://$line 2>&1" >> "${outfile}"
echo -e "failed: $line:\n\n \"$resp\"\n\n" >> "${outfile}"
echo "$line" >> "${faillog}"
fi
done < "${FILE}"
Is there a method to run multiple lines simultaneously in my file to reduce the execution time?
I solved for the multiprocess in this way:
#export variable to be used into function
export outlog="/tmp/out.log"
export faillog="/tmp/fail.log"
export ok="(curl: \(7\) Failed to connect to)" # acceptable responses
# create function:
check_site() {
ip=$1
resp=$(curl -i -m1 http://$ip 2>&1)
echo "$resp" | grep -Eo "$ok" > /dev/null
if [ $? -ne 0 ]; then
echo -e "Command: curl -i -m1 http://$ip 2>&1" >> "${outlog}"
echo -e "Block failed: $ip:\n\n \"$resp\"\n\n" >> "${outlog}"
echo "$ip" >> "${faillog}"
fi
}
# call the function:
export -f check_site
parallel -j 252 -a "${FILE}" check_site
Xargs will do the trick. Wikipedia
This article describe approach to resolve parallel execution, it may help you:
Parallel execution in Bash
Example from the article:
#!/bin/bash
RANDOM=10
JOBS_COUNTER=0
MAX_CHILDREN=10
MY_PID=$$
for i in {1..100}
do
echo Cycle counter: $i
JOBS_COUNTER=$((`ps ax -Ao ppid | grep $MY_PID | wc -l`))
while [ $JOBS_COUNTER -ge $MAX_CHILDREN ]
do
JOBS_COUNTER=$((`ps ax -Ao ppid | grep $MY_PID | wc -l`))
echo Jobs counter: $JOBS_COUNTER
sleep 1
done
sleep $(($RANDOM % 30)) &
done
echo Finishing children ...
# wait for children here
while [ $JOBS_COUNTER -gt 1 ]
do
JOBS_COUNTER=$((`ps ax -Ao ppid | grep $MY_PID | wc -l`))
echo Jobs counter: $JOBS_COUNTER
sleep 1
done
echo Done
i'm trying to make a bash script that counts the newlines in an input. The first if statement (switch $0) works fine but the problem I'm having is trying to get it to read the WC of a file in a terminal argument.
e.g.
~$ ./script.sh
1
2
3
4
(User presses CTRL+D)
display word count here # answer is 5 - works fine
e.g.
~$ .script1.sh < script1.sh
WC here -(5)
~$ succesfully redirects the stdin from a file
but
e.g.
~$ ./script1.sh script1.sh script2.sh
WC displayed here for script1.sh
WC displayed here for script2.sh
NOTHING
~$
the problem I believe is the second if statement, instead of executing the script in the terminal it goes to the if statement and waits for a user input and its not giving back the echo statement.
Any help would be greatly appreciated since I cannot figure out why it won't work without the ~$ < operator.
#!/bin/bash
#!/bin/sh
read filename ## read provided filename
USAGE="Usage: $0 $1 $2..." ## switch statement
if [ "$#" == "0" ]; then
declare -i lines=0 words=0 chars=0
while IFS= read -r line; do
((lines++))
array=($line)
((words += ${#array[#]}))
((chars += ${#line} + 1)) # add 1 for the newline
done < /dev/stdin
fi
echo "$lines $words $chars $filename" ## filename doesn't print, just filler
### problem if statement####
if [ "$#" != "0" ]; then # space between [] IS VERY IMPORTANT
declare -i lines=0 words=0 chars=0
while IFS= read -r line; do
lines=$( grep -c '\n'<"filename") ##should use grep -c to compare only new lines in the filename. assign to variable line
words=$( grep -c '\n'<"filename")
chars=$( grep -c '\n'<"filename")
echo "$lines $words $chars"
#lets user press CTRL+D to end script and count the WC
fi
#!/bin/sh
set -e
if [ -t 0 ]; then
# We are *not* reading stdin from a pipe or a redirection.
# Get the counts from the files specified on the cmdline
if [ "$#" -eq 0 ]; then
echo "no files specified" >&2
exit 1
fi
cat "$#" | wc
else
# stdin is attached to a pipe or redirected from a file
wc
fi | { read lines words chars; echo "lines=$lines words=$words chars=$chars"; }
The variables from the read command only exist within the braces, due to the way the shell (some shells anyway) use subshells for commands in a pipeline. Typically, the solution for that is to redirect from a process substitution (bash/ksh).
This can be squashed down to
#!/bin/bash
[[ -t 0 ]] && files=true || files=false
read lines words chars < <({ ! $files && cat || cat "$#"; } | wc)
echo "lines=$lines words=$words chars=$chars"
a very quick demo of cmd | read x versus read x < <(cmd)
$ x=foo; echo bar | read x; echo $x
foo
$ x=foo; read x < <(echo bar); echo $x
bar
Use wc.
Maybe the simplest is to replace the second if block with a for.
$: cat tst
#! /bin/env bash
declare -i lines=0 words=0 chars=0
case "$#" in
0) wc ;;
*) for file in $*
do read lines words chars x <<< "$( wc $file )"
echo "$lines $words $chars $file"
done ;;
esac
$: cat foo
hello
world
and
goodbye cruel world!
$: tst < foo
6 6 40
$: tst foo tst
6 6 40 foo
9 38 206 tst
Hi Im making a script to do some rsync process, for the rsync process, Sys admin has created the script, when it run it is asking select options, so i want to create a script to pass that argument from script and run it from cron.
list of directories to rsync take from file.
filelist=$(cat filelist.txt)
for i in filelist;do
echo -e "3\nY" | ./rsync.sh $i
#This will create a rsync log file
so i check the some value of log file and if it is empty i moving to the second file. if the file is not empty, i have to start rsync process as below that will take more that 2 hours.
if [ a != 0 ];then
echo -e "3\nN" | ./rsync.sh $i
above rsync process need to send to the background and take next file to loop. i check with the screen command, but screen is not working with server. also i need to get the duration that take to run process and passing to the log, when i use the time command i am unable to pass the echo variable. Also need to send this to background and take next file. appreciate any suggestions to success this task.
Question
1. How to send argument with Time command
echo -e "3\nY" | time ./rsync.sh $i
above one not working
how to send this to background and take next file to rsync while running previous rsync process.
Full Code
#!/bin/bash
filelist=$(cat filelist.txt)
Lpath=/opt/sas/sas_control/scripts/Logs/rsync_logs
date=$(date +"%m-%d-%Y")
timelog="time_result/rsync_time.log-$date"
for i in $filelist;do
#echo $i
b_i=$(basename $i)
echo $b_i
echo -e "3\nY" | ./rsync.sh $i
f=$(cat $Lpath/$(ls -tr $Lpath| grep rsync-dry-run-$b_i | tail -1) | grep 'transferred:' | cut -d':' -f2)
echo $f
if [ $f != 0 ]; then
#date=$(date +"%D : %r")
start_time=`date +%s`
echo "$b_i-start:$start_time" >> $timelog
#time ./rsync.sh $i < echo -e "3\nY" 2> "./time_result/$b_i-$date" &
time { echo -e "3\nY" | ./rsync.sh $i; } 2> "./time_result/$b_i-$date"
end_time=`date +%s`
s_time=$(cat $timelog|grep "$b_i-start" |cut -d ':' -f2)
duration=$(($end_time-$s_time))
echo "$b_i duration:$duration" >> $timelog
fi
done
Your question is not very clear, but I'll try:
(1) If I understand you correctly, you want to time the rsync.
My first attempt would be to use echo xxxx | time rsycnc. On my bash, this was however broken (or not supposed to work?). I'm normally using Zsh instead of bash, and on zsht, this indeed runs fine.
If it is important for you to use bash, an alternative (since the time for the echo can likely be neglected) would be to time the whole pipe, i.e. time (echo xxxx | time rsync), or even simpler time rsync <(echo xxxx)
(2) To send a process to the background, add an & to the line. However, the time command produces of course output (that's it purpose), and you don't want to receive output from a program in background. The solution is to redirect the output:
(time rsync <(echo xxxx) >output.txt 2>error.txt) &
If you want to time something, you can use:
time sleep 3
If you want to time two things, you can do a compound statement like this (note semicolon after second sleep):
time { sleep 3; sleep 4; }
So, you can do this to time your echo (which will take no time at all) and your rsync:
time { echo "something" | rsync something ; }
If you want to do that in the background:
time { echo "something" | rsync something ; } &
Full Code
#!/bin/bash
filelist=$(cat filelist.txt)
Lpath=/opt/sas/sas_control/scripts/Logs/rsync_logs
date=$(date +"%m-%d-%Y")
timelog="time_result/rsync_time.log-$date"
for i in $filelist;do
#echo $i
b_i=$(basename $i)
echo $b_i
echo -e "3\nY" | ./rsync.sh $i
f=$(cat $Lpath/$(ls -tr $Lpath| grep rsync-dry-run-$b_i | tail -1) | grep 'transferred:' | cut -d':' -f2)
echo $f
if [ $f != 0 ]; then
#date=$(date +"%D : %r")
start_time=`date +%s`
echo "$b_i-start:$start_time" >> $timelog
#time ./rsync.sh $i < echo -e "3\nY" 2> "./time_result/$b_i-$date" &
time { echo -e "3\nY" | ./rsync.sh $i; } 2> "./time_result/$b_i-$date"
end_time=`date +%s`
s_time=$(cat $timelog|grep "$b_i-start" |cut -d ':' -f2)
duration=$(($end_time-$s_time))
echo "$b_i duration:$duration" >> $timelog
fi
done
I am trying to run a few bash scripts continually when I am logged in to my Linux Mint install. Adding them to startup applications doesnt appear to work, because they are not always running when I check. I also dont want to create multiple instances of the scripts so adding them to my .bashrc or a cronjob seems to be out. Any other suggestions?
An example script (warns me when my battery is below 30%):
#!/bin/bash
while :
do
#echo "starting script: $(date)">>battery_log
percent=$(upower -i /org/freedesktop/UPower/devices/battery_BAT0| grep -E "percentage" | grep -o '[0-9]\+')
cpu_temp=$(cat /sys/class/thermal/thermal_zone0/temp | awk '{print "deg C: "$1/1000}')
discharge=$(upower -i /org/freedesktop/UPower/devices/battery_BAT0| grep -E "state")
is_discharging=$(echo $discharge | grep -c discharging)
#echo "$percent"
#echo "$cpu_temp"
#echo hello | grep -c he
#if [echo $discharge | grep -c discharging -gt 0 ]; then
#echo "success"
#fi
#echo "$discharge"
if [ "$is_discharging" -gt 0 ]; then
echo "---discharging: $(date)">>battery_log
if [ "$percent" -lt 30 ]; then
#exec 2>>/home/king/Scripts/battery_log.txt
export DISPLAY=:0
#export XAUTHORITY=~otheruser/.Xauthority
#kdialog --msgbox "$(upower -i /org/freedesktop/UPower/devices/battery_BAT0| grep -E "state|to\ full|percentage") \n cpu temp: $(cat /sys/class/thermal/thermal_zone0/temp | awk '{print "deg C: "$1/1000}')"
kdialog --title "Low Battery" --passivepopup "$(upower -i /org/freedesktop/UPower/devices/battery_BAT0| grep -E "state|to\ full|percentage") \n cpu temp: $(cat /sys/class/thermal/thermal_zone0/temp | awk '{print "deg C: "$1/1000}')" 20
fi
fi
sleep 300 #5min
done
Before you run the script, check if an instance of your script is already running.
pgrep script.sh.
If it's already running, then you can exit, otherwise continue the script.
pgrep script.sh && exit should do the trick.
I have this
#! /bin/bash
cd ~
hostname=`hostname`
cat /opt/ip.txt | while read line;
do
# do something with $line here
RES=`ping -c 2 -q $line | grep "packet loss"`
echo "---" >> /opt/os-$hostname.txt
echo "---"
echo "$line $RES" >> /opt/os-$hostname.txt
echo "$line $RES"
done
How I can make the script multi-threaded? I would like to speed up the performance.
You can use the <(...) notation for starting a subprocess and then cat all the outputs together:
myping() {
ping -c 2 -q "$1" | grep "packet loss"
}
cat <(myping hostname1) <(myping hostname2) ...
To use a loop for this, you will need to build the command first:
cat /opt/ip.txt | {
command='cat'
while read line
do
command="$command "'<'"(myping $line)"
done
eval "$command"
}
If you really want the delimiting --- of your original, I propose to add an echo "---" in the myping.
If you want to append the output to a file as well, use tee:
eval "$command" | tee -a /opt/os-$hostname.txt
DELETED.
WAS UN USEFUL ? NO THREAD IN BASH.