check_patton: format code

check_simpana #both version]: added report number as argument
parent 1f7700d7
Pipeline #44145779 failed with stages
in 1 minute and 16 seconds
......@@ -78,10 +78,10 @@ Commands (supplied with -l argument):
So, for example, if you want to know the total number of processed calls the parameter will be "all-total"
Instead if you want to know only the current connected calls for the first isdn the parameter will be "1-connected"
interface
Retrieve the status of the attacheed port (ethernet, isdn, etc.)
Retrieve the status of the attacched port (ethernet, isdn, etc.)
This command accepts the interface number (index) as check parameter, if "S" param is omitted,
the script will output a summary of all interfaces found with the correspondent index so you can
easily determine which interface you want to monitor
easily determine which interface you want to monitor.
This command DOES NOT accepts "W" and "C" thresholds as it can't monitor values, rather only the current status, however it returns
UNKNOWN, OKAY, WARNING and CRITICAL based on the interface status.
......
......@@ -31,6 +31,7 @@ parser.add_argument('--U', metavar='user', help='(required) NTLM Username', requ
parser.add_argument('--P', metavar='pass', help='(required) NTLM Password', required=True)
parser.add_argument('--p', metavar='port', default='80', help='(optional) Webserver Port (default 80)')
parser.add_argument('--c', metavar='client', help='(required) Client Name to lookup status', required=True)
parser.add_argument('--d', metavar='datasetid', help='(required) DataSet ID (retrieved by intercepting xhr)', required=True)
parser.add_argument('--t', metavar='runtime', help='(optional) Max Running time (minutes) for a Job')
parser.add_argument('--f', metavar='timeframe', default='24', help='(optional) Get Backups data from the past XXX hours (default 24)')
parser.add_argument('--l', metavar='limit', default='50', help='(optional) Limit results to N (default 50)')
......@@ -40,7 +41,7 @@ args = parser.parse_args()
hostname = args.H+':'+args.p
loginUrl = 'http://'+hostname+'/webconsole/login/'
ssoUrl = 'http://'+hostname+'/webconsole/ssoLogin.do'
dataUrl = 'http://'+hostname+'/webconsole/proxy/cr/reportsplusengine/datasets/1924/data/'
dataUrl = 'http://'+hostname+'/webconsole/proxy/cr/reportsplusengine/datasets/'+args.d+'/data/'
username = args.U
password = args.P
clientName = args.c.lower()
......@@ -81,38 +82,42 @@ data_s1 = [
]
ssoRes = session.post(ssoUrl, headers=headers_s1, cookies=cookies_s1, data=data_s1)
login_cookie = ssoRes.cookies['LOGIN_COOKIE']
cookies_s2 = {
'JSESSIONID': jsess_cookie,
'csrf': csrf_cookie,
'LOGIN_COOKIE': login_cookie
}
if ssoRes.status_code == 200:
login_cookie = ssoRes.cookies['LOGIN_COOKIE']
headers_s2 = {
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Upgrade-Insecure-Requests': '1',
'DNT': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US;q=0.9,en;q=0.8',
}
cookies_s2 = {
'JSESSIONID': jsess_cookie,
'csrf': csrf_cookie,
'LOGIN_COOKIE': login_cookie
}
headers_s2 = {
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Upgrade-Insecure-Requests': '1',
'DNT': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US;q=0.9,en;q=0.8',
}
params_s2 = (
('fields', '[JobId] AS [JobId],[Client] AS [Client],[Duration(mins)] AS [Durationmins],[Job Status] AS [JobStatus], [Start Time] AS [StartTime],[End Time] AS [EndTime]'),
('orderby', '[JobId] Desc'),
('componentName', 'Job Details'),
('parameter.timeframe', '-PT'+timeframe+'H P0D'),
('parameter.useCSTimeZone', '1'),
('parameter.WindowStartTime', '00:00:00'),
('limit', joblimit)
)
dataRes = session.get(dataUrl, headers=headers_s2, params=params_s2, cookies=cookies_s2)
if dataRes.status_code is 200:
params_s2 = (
('fields', '[JobId] AS [JobId],[Client] AS [Client],[Duration(mins)] AS [Durationmins],[Job Status] AS [JobStatus], [Start Time] AS [StartTime],[End Time] AS [EndTime]'),
('orderby', '[JobId] Desc'),
('componentName', 'Job Details'),
('parameter.timeframe', '-PT'+timeframe+'H P0D'),
('parameter.useCSTimeZone', '1'),
('parameter.WindowStartTime', '00:00:00'),
('limit', joblimit)
)
dataRes = session.get(dataUrl, headers=headers_s2, params=params_s2, cookies=cookies_s2)
if 'errorCode' in dataRes:
print 'CRITICAL - Error querying data: '+dataRes['errorMessage']
sys.exit(2)
try:
xmlTree = ElementTree.fromstring(dataRes.content)
......@@ -141,9 +146,9 @@ if dataRes.status_code is 200:
except :
print 'WARNING - Error retrieving data from the Backup Server'
exit(1)
else:
print 'CRITICAL - Error response from the Backup Server, http status code: ' + str(dataRes.status_code)
exit(2)
print 'UNKNOWN - No Backup Job found for client in the last ' + timeframe + 'H'
exit(3)
\ No newline at end of file
print 'UNKNOWN - No Backup Job found for client in the last ' + timeframe + 'H'
exit(3)
else:
print 'CRITICAL - Cannot Login, maybe the credentials supplied are invalid?'
sys.exit(2)
\ No newline at end of file
......@@ -31,6 +31,7 @@ parser.add_argument('--U', metavar='user', help='(required) NTLM Username', requ
parser.add_argument('--P', metavar='pass', help='(required) NTLM Password', required=True)
parser.add_argument('--p', metavar='port', default='80', help='(optional) Webserver Port (default 80)')
parser.add_argument('--c', metavar='client', help='(required) Client Name to lookup status', required=True)
parser.add_argument('--d', metavar='datasetid', help='(required) DataSet ID (retrieved by intercepting xhr)', required=True)
parser.add_argument('--t', metavar='runtime', help='(optional) Max Running time (minutes) for a Job')
parser.add_argument('--f', metavar='timeframe', default='24', help='(optional) Get Backups data from the past XXX hours (default 24)')
parser.add_argument('--l', metavar='limit', default='50', help='(optional) Limit results to N (default 50)')
......@@ -40,7 +41,7 @@ args = parser.parse_args()
hostname = args.H+':'+args.p
loginUrl = 'http://'+hostname+'/webconsole/login/'
ssoUrl = 'http://'+hostname+'/webconsole/ssoLogin.do'
dataUrl = 'http://'+hostname+'/webconsole/proxy/cr/reportsplusengine/datasets/1924/data/'
dataUrl = 'http://'+hostname+'/webconsole/proxy/cr/reportsplusengine/datasets/'+args.d+'/data/'
username = args.U
password = args.P
clientName = args.c.lower()
......@@ -81,61 +82,69 @@ data_s1 = [
]
ssoRes = session.post(ssoUrl, headers=headers_s1, cookies=cookies_s1, data=data_s1)
login_cookie = ssoRes.cookies['LOGIN_COOKIE']
cookies_s2 = {
'JSESSIONID': jsess_cookie,
'csrf': csrf_cookie,
'LOGIN_COOKIE': login_cookie
}
headers_s2 = {
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Host': args.H,
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Accept': 'application/json, text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US;q=0.9,en;q=0.8',
'X-CSRF-Token': csrf_cookie
}
params_s2 = (
('fields', '[JobId] AS [JobId],[Client] AS [Client],[Duration(mins)] AS [Durationmins],[Job Status] AS [JobStatus], [Start Time] AS [StartTime],[End Time] AS [EndTime]'),
('orderby', '[JobId] Desc'),
('componentName', 'Job Details'),
('parameter.timeframe', '-PT'+timeframe+'H P0D'),
('parameter.useCSTimeZone', '1'),
('parameter.WindowStartTime', '00:00:00'),
('limit', joblimit)
)
dataRes = session.get(dataUrl, headers=headers_s2, params=params_s2, cookies=cookies_s2).json()
for job in dataRes['records']:
name = job[1]
runtime = job[2]
result = job[3]
endtime = job[5]
if (name.lower() == clientName):
if (result == 'Completed'):
print 'OK - Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(0)
elif (result == 'Failed'):
print 'CRITICAL - Job Failed, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(2)
elif (result == 'Running'):
if (args.t and int(runtime)>int(args.t)):
print 'WARNING - Job runtime is over max, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(1)
if ssoRes.status_code == 200:
login_cookie = ssoRes.cookies['LOGIN_COOKIE']
cookies_s2 = {
'JSESSIONID': jsess_cookie,
'csrf': csrf_cookie,
'LOGIN_COOKIE': login_cookie
}
headers_s2 = {
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache',
'Host': args.H,
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36',
'Accept': 'application/json, text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'en-US;q=0.9,en;q=0.8',
'X-CSRF-Token': csrf_cookie
}
params_s2 = (
('fields', '[JobId] AS [JobId],[Client] AS [Client],[Duration(mins)] AS [Durationmins],[Job Status] AS [JobStatus], [Start Time] AS [StartTime],[End Time] AS [EndTime]'),
('orderby', '[JobId] Desc'),
('componentName', 'Job Details'),
('parameter.timeframe', '-PT'+timeframe+'H P0D'),
('parameter.useCSTimeZone', '1'),
('parameter.WindowStartTime', '00:00:00'),
('limit', joblimit)
)
dataRes = session.get(dataUrl, headers=headers_s2, params=params_s2, cookies=cookies_s2).json()
if 'errorCode' in dataRes:
print 'CRITICAL - Error querying data: '+dataRes['errorMessage']
sys.exit(2)
for job in dataRes['records']:
name = job[1]
runtime = job[2]
result = job[3]
endtime = job[5]
if (name.lower() == clientName):
if (result == 'Completed'):
print 'OK - Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(0)
elif (result == 'Failed'):
print 'CRITICAL - Job Failed, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(2)
elif (result == 'Running'):
if (args.t and int(runtime)>int(args.t)):
print 'WARNING - Job runtime is over max, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(1)
else:
print 'UNKNOWN - Job still running, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(3)
else:
print 'UNKNOWN - Job still running, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(3)
else:
print 'WARNING - Unhandled job status, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(1)
print 'UNKNOWN - No Backup Job found for client in the last ' + str(timeframe) + 'H'
sys.exit(3)
\ No newline at end of file
print 'WARNING - Unhandled job status, Result: ' + result + ', RunTime: ' + str(runtime) + 'm, EndTime: ' + str(endtime) + ' UTC'
exit(1)
print 'UNKNOWN - No Backup Job found for client in the last ' + str(timeframe) + 'H'
sys.exit(3)
else:
print 'CRITICAL - Cannot Login, maybe the credentials supplied are invalid?'
sys.exit(2)
\ No newline at end of file
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment