Category: Python

Apr 12

python-scan-text-block-reverse

example finding a block.

start string and reverse search up to next string. then replacing a line inside the block

python example

#!/usr/bin/python
import re
import sys

#v0.9.6
fileName="listener_test.yml"
dir="./unregister"
variable="bar"
block_start='CodeUri: ' + dir
block_end='AWS::Serverless::Function'
rtext = '      AutoPublishCodeSha256: ' + variable + '\n'

with open("listener_test.yml") as ofile:
      lines=ofile.readlines()
      i = len(lines) - 1
      AWSFound = False
      CodeUriFound = False
      AutoFound = False
      unum = 0
      while i >= 0 and not AWSFound:
           if block_start in lines[i]:
             CodeUriFound = True
             unum = i
           if "AutoPublishCodeSha256:" in lines[i]:
             AutoFound = True
             unum = i
           if block_end in lines[i] and CodeUriFound:
             AWSFound = True

           i -= 1

if AutoFound:
  lines[unum] = rtext
else:
  lines.insert(unum - 1, rtext)

with open('listener_test_new.yml', 'w') as file:
  lines = "".join(lines)
  file.write(lines)

Comments Off on python-scan-text-block-reverse
comments

Mar 19

Quick Backup and Purge

I highly recommend using restic instead of what I am talking about here.

Mostly I am just documenting this for my own reference and this is not a great backup solution by any means. Also note:

  1. This script is creating backups local of course the idea would be to adapt the script to use NFS or even better object storage.
  2. This is just a staring point for example if you would like to write very small datasets (like /etc) and also purge older backups.
  3. Adapt for your own policies I have kind of used a gold policy here(7 daily, 4 weekly, 12 monthly, 5 yearly).
  4. Purging should perhaps rather be done by actual file dates and not by counting.
#!/usr/bin/python
#
#: Script Name  : tarBak.py
#: Author       : Riaan Rossouw
#: Date Created : March 13, 2019
#: Date Updated : March 13, 2019
#: Description  : Python Script to manage tar backups
#: Examples     : tarBak.py -t target -f folders -c
#:              : tarBak.py --target <backup folder> --folders <folders> --create

import optparse, os, glob, sys, re, datetime
import tarfile
import socket

__version__ = '0.9.1'
optdesc = 'This script is used to manage tar backups of files'

parser = optparse.OptionParser(description=optdesc,version=os.path.basename(__file__) + ' ' + __version__)
parser.formatter.max_help_position = 50
parser.add_option('-t', '--target', help='Specify Target', dest='target', action='append')
parser.add_option('-f', '--folders', help='Specify Folders', dest='folders', action='append')
parser.add_option('-c', '--create', help='Create a new backup', dest='create', action='store_true',default=False)
parser.add_option('-p', '--purge', help='Purge older backups per policy', dest='purge', action='store_true',default=False)
parser.add_option('-g', '--group', help='Policy group', dest='group', action='append')
parser.add_option('-l', '--list', help='List backups', dest='listall', action='store_true',default=False)
opts, args = parser.parse_args()

def make_tarfile(output_filename, source_dirs):
  with tarfile.open(output_filename, "w:gz") as tar:
    for source_dir in source_dirs:
      tar.add(source_dir, arcname=os.path.basename(source_dir))

def getBackupType(backup_time_created):
  utc,mt = str(backup_time_created).split('.')
  d = datetime.datetime.strptime(utc, '%Y-%m-%d %H:%M:%S').date()
  dt = d.strftime('%a %d %B %Y')

  if d.weekday() == 6:
    backup_t = 'WEEKLY'
  elif d.day == 1:
    backup_t = 'MONTHLY'
  elif ( (d.day == 1) and (d.mon == 1) ):
    backup_t = 'YEARLY'
  else:
    backup_t = 'DAILY'

  return (backup_t,dt)

def listBackups(target):
  print ("Listing backup files..")

  files = glob.glob(target + "*DAILY*")
  files.sort(key=os.path.getmtime, reverse=True)

  for file in files:
    print file
  
def purgeBackups(target, group):
  print ("Purging backup files..this needs testing and more logic for SILVER and BRONZE policies?")

  files = glob.glob(target + "*.tgz*")
  files.sort(key=os.path.getmtime, reverse=True)
  daily = 0
  weekly = 0
  monthly = 0
  yearly = 0
 
  for file in files:
    comment = ""
    if ( ("DAILY" in file) or ("WEEKLY" in file) or ("MONTHLY" in file) or ("YEARLY" in file) ):
      #t = file.split("-")[0]
      sub = re.search('files-(.+?)-2019', file)
      #print sub
      t = sub.group(1)
    else:
      t = "MANUAL"

    if t == "DAILY":
      comment = "DAILY"
      daily = daily + 1
      if daily > 7:
        comment = comment + " this one is more than 7 deleting"
        os.remove(file)
    elif t == "WEEKLY":
      comment = "Sun"
      weekly = weekly + 1
      if weekly > 4:
        comment = comment + " this one is more than 4 deleting"
        os.remove(file)
    elif t  == "MONTHLY":
      comment = "01"
      monthly = monthly + 1
      if monthly > 12:
       comment = comment + " this one is more than 12 deleting"
       os.remove(file)
    elif t  == "YEARLY":
      comment = "01"
      yearly = yearly + 1
      if yearly > 5:
       comment = comment + " this one is more than 5 deleting"
       os.remove(file)
    else:
      comment = " manual snapshot not purging"
      
    if  "this one " in comment:
      print ('DELETE: {:25}: {:25}'.format(file, comment) )

def createBackup(target, folders, group):
  print ("creating backup of " + str(folders))
  hostname = socket.gethostname()
  creationDate = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.0")
  t,ds = getBackupType(creationDate)
  BackupName = target + "/" + hostname + '-files-' + t + "-" + datetime.datetime.now().strftime("%Y%m%d-%H%MCST") + '.tgz'

  proceed = "SNAPSHOT NOT NEEDED AT THIS TIME PER THE POLICY"
  if ( group == "BRONZE") and ( (t == "MONTHLY") or (t == "YEARLY") ):
    proceed = "CLEAR TO SNAP" 
  elif ( group == "SILVER" and (t == "WEEKLY") or (t == "MONTHLY" ) or (t == "YEARLY") ):
    proceed = "CLEAR TO SNAP" 
  elif group == "GOLD":
    proceed = "CLEAR TO SNAP" 
  else:
    result = proceed
  
  make_tarfile(BackupName, folders)

def main():
  if opts.target:
    target = opts.target[0]
  else:
    print ("\n\n must specify target folder")
    exit(0)

  if opts.listall:
    listBackups(target)
  else:
    if opts.create:
      if opts.folders:
        folders = opts.folders[0].split(',')
      else:
        print ("\n\n must specify folders")
        exit(0)
      createBackup(target, folders, opts.group[0])

    if opts.purge:
      purgeBackups(target, opts.group[0])

if __name__ == '__main__':
  main()

Example cron entry. Use root if you need to backup files only accessible as root.

$ crontab -l | tail -1
0 5 * * * cd /Src/tarBak/ ; python tarBak.py -t /tmp/MyBackups/ -f '/home/rrosso,/var/spool/syslog' -c 2>&amp;1

Comments Off on Quick Backup and Purge
comments

Feb 14

Python3 and pip

I am converting some scripts to python3 and noticed the pip modules in use for python2 need to be added for python3. I am not using virtualenv so below is my fix on Ubuntu 17.10.

Missing module oci.

$ python3 OCI_Details.py -t ocid1.tenancy.oc1..aa...mn55ca
Traceback (most recent call last):
  File "OCI_Details.py", line 14, in <module>
    import oci,optparse,os
ModuleNotFoundError: No module named 'oci'

Python2 module is there.

$ pip list --format=columns | grep oci
oci             1.3.14 

Ubuntu has python3-pip

$ sudo apt install python3-pip
$ pip3 install oci
$ pip3 list --format=columns | grep oci
oci                   1.3.14   

Check my converted script.

$ python3 OCI_Details.py -t ocid1.tenancy.oc1..aaaaaa...5ca
OCI Details: 0.9.7
..

Comments Off on Python3 and pip
comments

Oct 14

DynamoDB Test

Boto3 and AWS DynamoDB usage...

http://boto3.readthedocs.io/en/latest/reference/services/dynamodb.html

$ cat dynamodbTest.py 
import boto3

#dynamodb = boto3.resource('dynamodb')
# Hard coding strings as credentials, not recommended. Use configs or env variables AWS_ACCESS_KEY, AWS_SECRET_KEY
dynamodb = boto3.resource(
    'dynamodb',
    aws_access_key_id='KEY_ID_REMOVED',
    aws_secret_access_key='ACCESS_KEY_REMOVED',
    region_name = 'us-east-1'
)

def create_table(tableName):
  table = dynamodb.create_table(
    TableName=tableName,
    KeySchema=[
        {
            'AttributeName': 'username', 
            'KeyType': 'HASH'
        },
        {
            'AttributeName': 'last_name', 
            'KeyType': 'RANGE'
        }
    ], 
    AttributeDefinitions=[
        {
            'AttributeName': 'username', 
            'AttributeType': 'S'
        }, 
        {
            'AttributeName': 'last_name', 
            'AttributeType': 'S'
        }, 
    ], 
    ProvisionedThroughput={
        'ReadCapacityUnits': 1, 
        'WriteCapacityUnits': 1
    }
  )

  table.meta.client.get_waiter('table_exists').wait(TableName=tableName)
  print 'Table item count: {}'.format(table.item_count)

def delete_table(tableName):
  table = dynamodb.Table(tableName)
  table.delete()

def put_item(tableName):
  table = dynamodb.Table(tableName)

  response = table.put_item(
   Item={
        'username': 'jdoe',
        'first_name': 'jane',
        'last_name': 'doe',
        'age': 20,
        'account_type': 'librarian',
    }
  )

  print response

def get_item(tableName):
  table = dynamodb.Table(tableName)

  response = table.get_item(
   Key={
        'username': 'jdoe',
        'last_name': 'doe'
    }
  )

  item = response['Item']
  name = item['first_name']

  print(item)
  print("Hello, {}" .format(name))

def update_item(tableName):
  table = dynamodb.Table(tableName)

  table.update_item(
    Key={
        'username': 'jdoe',
        'last_name': 'doe'
    },
    UpdateExpression='SET age = :val1',
    ExpressionAttributeValues={
        ':val1': 23
    }
  )

def delete_item(tableName):
  table = dynamodb.Table(tableName)

  table.delete_item(
    Key={
        'username': 'jdoe',
        'last_name': 'doe'
    }
  )

def batch_write(tableName):
  table = dynamodb.Table(tableName)

  with table.batch_writer() as batch:
    batch.put_item(
        Item={
            'account_type': 'end_user',
            'username': 'bbob',
            'first_name': 'billy',
            'last_name': 'bob',
            'age': 20,
            'address': {
                'road': '1 fake street',
                'city': 'Houston',
                'state': 'TX',
                'country': 'USA'
            }
        }
    )
    batch.put_item(
        Item={
            'account_type': 'librarian',
            'username': 'user1',
            'first_name': 'user1 first name',
            'last_name': 'user1 last name',
            'age': 20,
            'address': {
                'road': '10 fake street',
                'city': 'Dallas',
                'state': 'TX',
                'country': 'USA'
            }
        }
    )
    batch.put_item(
        Item={
            'account_type': 'end_user',
            'username': 'user2',
            'first_name': 'user2 first name',
            'last_name': 'user2 last name',
            'age': 23,
            'address': {
                'road': '12 fake street',
                'city': 'Austin',
                'province': 'TX',
                'state': 'USA'
            }
        }
    )

def create_multiple_items(tableName,itemCount):

  table = dynamodb.Table(tableName)

  with table.batch_writer() as batch:
    for i in range(itemCount):
        batch.put_item(
            Item={
                'account_type': 'anonymous',
                'username': 'user-' + str(i),
                'first_name': 'unknown',
                'last_name': 'unknown'
            }
        )


def query(tableName):
  from boto3.dynamodb.conditions import Key, Attr
  table = dynamodb.Table(tableName)

  response = table.query(
    KeyConditionExpression=Key('username').eq('user2')
  )

  items = response['Items']
  print(items)

def scan(tableName):
  from boto3.dynamodb.conditions import Key, Attr

  table = dynamodb.Table(tableName)

  response = table.scan(
    FilterExpression=Attr('age').gt(23)
  )

  items = response['Items']
  print(items)

  len(items)
  for x in range(len(items)): 
    items[x]['username']

def query_filter(tableName):
  from boto3.dynamodb.conditions import Key, Attr

  table = dynamodb.Table(tableName)

  response = table.scan(
    FilterExpression=Attr('first_name').begins_with('r') & Attr('account_type').eq('librarian')
  )

  items = response['Items']
  print(items)


# Comment/uncomment below to play with the different functions
#create_table('staff')

#put_item('staff')
#get_item('staff')
#update_item('staff')
#delete_item('staff')

#batch_write('staff')

#create_multiple_items('staff', 100)

#query('staff')
#scan('staff')
#query_filter('staff')

#delete_table('staff')

Comments Off on DynamoDB Test
comments

Jan 23

Date strings with inconsistent spaces

I frequently bump into manipulating very large log files and the date input strings are formatted poorly.

Couple problems for me here:
1. Input is like this "Sat Feb 6 03:25:01 2016". You can see there is a double space in front of 6. a "06" may have been more useful. The additional space gives python's strptime fits and I have to do something like this.
2. Sorting "Sat Feb ..." is not ideal so reformat it to something like "2016-02-06..." may work better down the line. Maybe in Excel or Calc.

import datetime

d = 'Sat Feb  6 03:25:01 2016'
#d = 'Sat Feb 19 03:25:01 2016'

if d[8:9] == ' ':
  new = list(d)
  new[8] = '0'
  d=''.join(new)

print "Useful date is: {dt}".format(dt=datetime.datetime.strptime(d,'%a %b %d %H:%M:%S %Y').strftime('%Y-%m-%d %H:%M:%S'))

Comments Off on Date strings with inconsistent spaces
comments

Jan 22

AWS API and Python Boto

Quick note on connection to EC2 to list instances.

- Ensure IAM User permissions. In my case I tried EC2FullAccess.
- Ensure you have your access and secret key handy.
- This example just cycle through regions and list any instances.

import argparse
import boto.ec2

access_key = ''
secret_key = ''

def get_ec2_instances(region):
    ec2_conn = boto.ec2.connect_to_region(region,
                aws_access_key_id=access_key,
                aws_secret_access_key=secret_key)
    reservations = ec2_conn.get_all_reservations()
    for reservation in reservations:    
        print region+':',reservation.instances

    for vol in ec2_conn.get_all_volumes():
        print region+':',vol.id

def main():
    regions = ['us-east-1','us-west-1','us-west-2','eu-west-1','sa-east-1',
                'ap-southeast-1','ap-southeast-2','ap-northeast-1']
    parser = argparse.ArgumentParser()
    parser.add_argument('access_key', help='Access Key');
    parser.add_argument('secret_key', help='Secret Key');
    args = parser.parse_args()
    global access_key
    global secret_key
    access_key = args.access_key
    secret_key = args.secret_key
    
    for region in regions: get_ec2_instances(region)

if __name__ =='__main__':main()

Example:

$ python list.py myaccess_key mysecret_key
us-east-1: [Instance:i-1aac5699]
us-east-1: vol-d121290e

Comments Off on AWS API and Python Boto
comments

Jan 22

Python Dict for Arrays

Some more examples of Python dicts and array general use.

import pprint

###  Example python dict as an associative multi array
###  Example is using an unique key
pp = pprint.PrettyPrinter(indent=2)

table = {}
table[1]={'LastName':'Sable','name':'Sam'}
table[2]={'LastName':'Sable','name':'Samantha'}
table[3]={'LastName':'Sable','name':'Stevie'}

pp.pprint(table)

print table[2]
print table[3]['name']

Comments Off on Python Dict for Arrays
comments

Jan 05

Oracle OVM rest api example

If you hate the Oracle OVM CLI and having to use expect scripts you may want to look into the web services API.  Note it looks like SOAP will be decommissioned soon so use REST.

$ cat ovm_using_rest.py 
import requests

s=requests.Session()
s.auth=('admin','pwd_removed')
s.verify=False #disables SSL certificate verification

s.headers.update({'Accept': 'application/json', 'Content-Type': 'application/json'})

baseUri='https://ovmm:7002/ovm/core/wsapi/rest'

print "\nServer List:"
print "##############"
r=s.get(baseUri+'/Server')
for i in r.json():
  # do something with the content
  print '{:20} {:20}'.format(i['serverRunState'],i['name'])

print "\nVM List:"
print "########"
r=s.get(baseUri+'/Vm')
for i in r.json():
  # do something with the content
  print '{:20} {:20}'.format(i['vmRunState'],i['name'])
  #print '{name} '.format(name=i['name'])

Output:

$ python ovm_using_rest.py 

Server List:
##############
/usr/lib/python2.7/dist-packages/urllib3/connectionpool.py:794: InsecureRequestWarning: Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.org/en/latest/security.html
  InsecureRequestWarning)
RUNNING              ovms2            
RUNNING              ovms3             
RUNNING              ovms1             

VM List:
########
TEMPLATE             EXALYTICS_BASE_LINUX_OEL5_GUEST_VM_TEMPLATE_2.2.0.0.0.el5
TEMPLATE             EXALYTICS_BASE_LINUX_OEL6_GUEST_VM_TEMPLATE_2.2.0.0.0.el6
RUNNING              OBIEXA3           
STOPPED              obiexa4           
[..]
STOPPED              EXALYTICS_BASE_LINUX_OEL5_GUEST_VM_TEMPLATE_2.0.1.4.0
TEMPLATE             OBIEE12C_TEMPLATE.0 
RUNNING              OBIEXA01         

1
comments

Dec 10

Nagios Downtime using a ServiceGroup

This is not a complete script. I was only interested in scheduling a SERVICEGROUP through the Nagios command file (NAGIOSCMD). This is for obvious reasons if you ever used Nagios in large environments. It is painful to schedule and cancel downtimes.

Since I wanted to be able to delete multiple downtime entries and I used a feature added somewhere in 3.x called DEL_DOWNTIME_BY_START_TIME_COMMENT. The cancellation of downtime is done by providing a Start date and a comment.

import sys, argparse, time
from datetime import datetime, timedelta

##  EXAMPLES
## # python nagios_downtime.py --action add --Servicegroup PRDVIPS --begin "2016-12-10 8:36" --duration 10 --author rrosso --comment "Test Scheduling Downtime in Nagios"
## [1481387741.0] SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;PRDVIPs;1481387760.0;1481388360.0;1;0;600;rrosso;Test Scheduling Downtime in Nagios
## # python nagios_downtime.py --action cancel --begin "2016-12-10 8:36" --comment="Test Scheduling Downtime in Nagios"
## [1481387769.0] DEL_DOWNTIME_BY_START_TIME_COMMENT;1481387760.0;Test Scheduling Downtime in Nagios
##

VERSION = "0.4"
VERDATE = "2016-12-10"

NAGIOSCMD =  "/usr/local/nagios/var/rw/nagios.cmd"
now = datetime.now()
cmd = '[' + str(time.mktime(now.timetuple())) + '] '
execline = ''

parser = argparse.ArgumentParser(description='Nagios Downtime Scheduler.')
parser.add_argument('--action', dest='action', help='use add or cancel as action for downtime entries', required=True)
parser.add_argument('--Servicegroup', dest='servicegroup', help ='Schedule Downtime a specific ServiceGroup')
parser.add_argument('--duration', dest='duration', help='Duration of downtime, in minutes.)
parser.add_argument('--begin', dest='begin', help='Beginning of Downtime. ex: 2016-12-10 18:10', required=True)
parser.add_argument('--author', dest='author', default='admin', help='Author: Who is scheduling the downtime?')
parser.add_argument('--comment', dest='comment', help='Comment: Reason for scheduling the downtime.', required=True)
parser.add_argument('--dryrun', action='store_true', help='Dry run.  Do not do anything but show action.')

args = parser.parse_args()

## need some argument checking here.  what is required what conflicts etc..
if (args.action not in ['add','cancel']):
  sys.exit(1)

if (args.begin != None):
  #check for proper format here...
  #beginTime = datetime.datetime(2016,12,8,13,0).strftime('%s')
  beginTime = datetime.strptime(args.begin,'%Y-%m-%d %H:%M')

if (args.action == 'add'):
  if (args.servicegroup):
    cmd = cmd + 'SCHEDULE_SERVICEGROUP_SVC_DOWNTIME;'
    endTime = beginTime + timedelta(minutes=int(args.duration))
    execline=cmd + 'PRDVIPs;' + str(time.mktime(beginTime.timetuple())) + ';' + str(time.mktime(endTime.timetuple())) + ';1;0;' + '600' +';' + args.author + ';' + args.comment + '\n'

if (args.action == 'cancel'):
  cmd = cmd + 'DEL_DOWNTIME_BY_START_TIME_COMMENT;'
  execline=cmd + str(time.mktime(beginTime.timetuple())) + ';' + args.comment + '\n'

print 'Nagios CMD interaction will be: ' + execline

if (args.dryrun):
  print "Note that this is a dry run ie so not committing transaction"
else:
  print "Note that this is not a dry run ie --dryrun was not used so committing transaction"
  f = open(NAGIOSCMD,'w')
  f.write(execline)

Comments Off on Nagios Downtime using a ServiceGroup
comments

Jun 23

Solaris Find Process Id tied to IP Address

Recently I needed to find out who is connecting to an Oracle database and at the same time I wanted to see the load the specific connection add to the CPU. So in short I needed IP Address and Port tied to a Unix Pid.

I wrote this quick and dirty python script.

#!/usr/bin/python
import subprocess

## No doubt you would want to exclude some non local or expected IP addresses
excludeIPs="10.2.16.86|10.2.16.62|10.2.16.83|\*.\*"

p = subprocess.Popen("/usr/bin/netstat -an | grep 1521 | awk '{print $2}' | egrep -v '" + excludeIPs + "'", stdout=subprocess.PIPE, shell=True)
nonlocals= p.stdout
 
if nonlocals <> '':
  p = subprocess.Popen("pfiles `ls /proc` 2>/dev/null", stdout=subprocess.PIPE, shell=True)
  try:
    outs, errs = p.communicate()
  except TimeoutExpired:
    p.kill()
    outs, errs = p.communicate()

  pfiles = outs

  for line in nonlocals:
    line=line.strip()
    (IP,port) = line.rsplit('.',1)
    print ("Going to find PID for connection with IP %s and port %s" % (IP,port) )

    for line in pfiles.splitlines():
      if line[:1].strip() <> '':
        pid = line
      if "port: " + port in line:
        print pid

I plan to enhance this script a little bit but for now it did exactly what I needed.

Comments Off on Solaris Find Process Id tied to IP Address
comments