How Python Saves My Time: Automating Repetitive Tasks in Everyday Work

SRLSEC ๐Ÿ‡ฎ๐Ÿ‡ณ
7 min readNov 28, 2024

--

Hello friends,

Today, I have a new and different topic to discuss: Python programming. I am currently working as a Network engineer in ISP and part-time I do bug hunting, Python saves me a lot of time because I have repetitive daily tasks that can be tedious and time-consuming. I am sure there have been plenty of times when you wished for a simpler way to approach and accomplish a mundane task. Repetitive tasks can be a drag and hamper your creativity and productivity in other areas of your work and life.

Why I Choose Python for Automation

Python is a popular programming language choice among software developers because of its easy-to-understand syntax. Python code mirrors the English language a lot more closely than many other programming languages. Additionally, Python code tends to be more compact and requires fewer lines of code than other languages to accomplish the same tasks. Another important feature of Python is its use in multiple fields, such as network automation and web scraping.

1. Internet Speed tester

As mentioned above, I work for an ISP, so I must check the speed test every morning. This script's primary purpose is to automate internet speed tests for multiple internet service providers (ISPs) and save the results to a CSV file for analysis.

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver import Chrome, ChromeOptions
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
import time
import urllib.parse
import pandas as pd
import warnings
import platform
import requests

# Determine if the OS is Windows or Linux
os_type = platform.system()


warnings.filterwarnings('ignore')

chrome_options = Options()
chrome_options.add_argument("--log-level=3")
chrome_options.add_argument("--start-maximized")
chrome_options.add_argument("--ignore-certificate-errors-spki-list")

if os_type == "Windows":
service = Service(executable_path='C:/chromedriver/chromedriver.exe')
elif os_type == "Linux":
service = Service(executable_path='/usr/local/bin/chromedriver')

chrome_options.add_argument("--ignore-certificate-errors-spki-list")
browser = webdriver.Chrome(service=service, options=chrome_options)



airtel_server = 'Bharti Airtel Ltd'
youfiber_server = 'YouFibre'
kvbl_server = 'Keralavision Broadband Ltd'

tablelist = []

def speed_test(server,connection_type):
# Open website
browser.get('https://www.speedtest.net/')
browser.maximize_window()
time.sleep(5)

# Connection type(single/multi)
# Multi
if connection_type == 'multi':
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[5]/div/div/a[1]").click()
# Signal
if connection_type == 'single':
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[5]/div/div/a[3]").click()

# Select server
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[4]/div/div[3]/div/div/div[4]/a").click()
username_input = browser.find_element("xpath","/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[7]/div/div/div/div[3]/form/div/input")
username_input.send_keys(server)
time.sleep(2)

if server == 'Keralavision Broadband Ltd':
try:
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[7]/div/div/div/div[3]/div/div/ul/li[2]/a").click()
except:
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[7]/div/div/div/div[3]/div/div/ul/li[1]/a").click()
time.sleep(2)
else:
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[3]/div/div[7]/div/div/div/div[3]/div/div/ul/li[1]/a").click()
time.sleep(2)

# Click go button to start speed test
browser.find_element('xpath',"/html/body/div[3]/div/div[3]/div/div/div/div[2]/div[3]/div[1]/a/span[4]").click()
time.sleep(50)

# Get Download and Upload speed from page
download_speed = browser.find_element('class name','download-speed').text
upload_speed = browser.find_element('class name','upload-speed').text
time.sleep(1)

print(download_speed)
print(upload_speed)



return download_speed, upload_speed


def get_public_ip():
response = requests.get('https://httpbin.org/ip')
if response.status_code == 200:
return response.json().get('origin')
return None


public_ip = get_public_ip()
if public_ip:
print(f"Your public IP is: {public_ip}")
else:
print("Could not retrieve public IP")

kvbl_multi_download, kvbl_multi_upload = speed_test(kvbl_server,connection_type='multi')
kvbl_single_download, kvbl_single_upload = speed_test(kvbl_server,connection_type='single')

data = {
"mutil_download": f'{kvbl_multi_download} Mbps',
"mutil_upload": f'{kvbl_multi_upload} Mbps',
"single_download": f'{kvbl_single_download} Mbps',
"single_upload": f'{kvbl_single_upload} Mbps',
}

tablelist.append(data)

airtel_multi_download, airtel_multi_upload = speed_test(airtel_server,connection_type='multi')
airtel_single_download, airtel_single_upload = speed_test(airtel_server,connection_type='single')


data = {
"mutil_download": f'{airtel_multi_download} Mbps',
"mutil_upload": f'{airtel_multi_upload} Mbps',
"single_download": f'{airtel_single_download} Mbps',
"single_upload": f'{airtel_single_upload} Mbps',
}

tablelist.append(data)

youfiber_multi_download, youfiber_multi_upload = speed_test(youfiber_server,connection_type='multi')
youfiber_single_download, youfiber_single_upload = speed_test(youfiber_server,connection_type='single')

data = {
"mutil_download": f'{youfiber_multi_download} Mbps',
"mutil_upload": f'{youfiber_multi_upload} Mbps',
"single_download": f'{youfiber_single_download} Mbps',
"single_upload": f'{youfiber_single_upload} Mbps',
}



tablelist.append(data)

df = pd.DataFrame(tablelist)
df.to_csv(f'output.csv')

2. Ticket creator

I create around 30 tickets daily and submit work reports. On average, it takes 45 minutes to complete this task manually. This is the most time-consuming and boring part of my work. This Python script automates the process of creating customer tickets and scraping their details from a web application using Selenium for browser automation and openpyxl for Excel file manipulation.

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
import time
import pandas as pd
import warnings
import json
from openpyxl import Workbook, load_workbook
import time
from datetime import datetime
import sys
import platform, os

# Determine if the OS is Windows or Linux
os_type = platform.system()

warnings.filterwarnings('ignore')

chrome_options = Options()
chrome_options.add_argument("--log-level=3")
chrome_options.add_argument("--start-maximized")

#chrome_options.add_argument("--headless")
if os_type == "Windows":
service = Service(executable_path='C:/chromedriver/chromedriver.exe')
elif os_type == "Linux":
service = Service(executable_path='/usr/local/bin/chromedriver')

chrome_options.add_argument("--ignore-certificate-errors-spki-list")
browser = webdriver.Chrome(service=service, options=chrome_options)

user = 'username'
pwd = 'password'

def login():
browser.get('https://redacted.com/staff/engineer/login')
browser.find_element('name','email').send_keys(user)
browser.find_element('name','password').send_keys(pwd)
browser.find_element('class name','btn-login').click()

time.sleep(2)
print("\n============================== Login Success ==============================\n")


def customer_ticket_create(user_id,issue_category,issue_type,description,ticket_priority,ticket_status,ticket_cause):
browser.get('https://redacted.com/create-ticket')

try:
browser.find_element("xpath",f"//*[@id='select2-ticket_for-container']").click()
issue = browser.find_element("xpath","//*[@class='select2-search__field']")
issue.send_keys("Customer")
browser.find_element("xpath",f"//ul[@id='select2-ticket_for-results']/li[text()='Customer']").click()
browser.find_element("xpath",'//*[@id="azTab1"]/div/div/div/div/div/div/div/div[3]/div/div/button').click()
time.sleep(4)
browser.find_element("xpath",f"//select[@name='issue_category']/option[text()='{issue_category}']").click()
time.sleep(4)
username_input = browser.find_element("xpath","//*[@id='existing_customer_container']/div/div[1]/div/input")
username_input.send_keys(user_id)
browser.find_element("xpath","//*[@id='existing_customer_container']/div/div[2]/button").click()
time.sleep(7)
browser.find_element("xpath",f"//*[@id='select2-issue_type-container']").click()
time.sleep(3)
issue = browser.find_element("xpath","//*[@class='select2-search__field']")
issue.send_keys(issue_type)
time.sleep(3)
browser.find_element("xpath",f"//ul[@id='select2-issue_type-results']/li[text()='{issue_type}']").click()
time.sleep(2)
browser.find_element("xpath",f"//*[@id='select2-team_id-container']").click()
time.sleep(2)
issue = browser.find_element("xpath","//*[@class='select2-search__field']")
issue.send_keys("TEAM_FOP KERALA")
time.sleep(1)
browser.find_element("xpath",f"//ul[@id='select2-team_id-results']/li[text()='TEAM_FOP KERALA']").click()
username_input = browser.find_element("xpath","//*[@id='text_area']")
username_input.send_keys(description)
time.sleep(2)
browser.find_element("xpath",f"//*[@name='ticket_status']/option[text()='{ticket_status}']").click()
time.sleep(2)

try:
browser.find_element("xpath",f"//*[@id='select2-close_cause_id-container']").click()
time.sleep(2)
issue = browser.find_element("xpath","//*[@class='select2-search__field']")
issue.send_keys(f"{ticket_cause}")
time.sleep(1)
browser.find_element("xpath",f"//ul[@id='select2-close_cause_id-results']/li[text()='{ticket_cause}']").click()
time.sleep(2)
except:
print("Error in Ticket close cause")

browser.find_element("xpath","//button[contains(@class, 'btn-primary') and text()='Submit']").click()

time.sleep(3)

button = browser.find_element('class name', 'btn.btn-success')
ticket_link = button.get_attribute('href')

except:
ticket_link = 'not_created'

return ticket_link

def ticket_scraper(ticket_link):
browser.get(ticket_link)
time.sleep(5)


description = browser.find_element('xpath','/html/body/div[4]/div[1]/div/div[2]/div/div[2]/div/div[1]/div[2]/div/div[1]/div[2]/div/div[1]/div[2]/p').text
uname = browser.find_element('xpath','/html/body/div[4]/div[1]/div/div[2]/div/div[2]/div/div[1]/div[2]/div/div[1]/div[1]/div[2]/div/div/p[1]/a').text
created_date = browser.find_element('xpath','/html/body/div[4]/div[1]/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div/div/table/tbody/tr[23]/td[2]').text
completed_date = browser.find_element('xpath','/html/body/div[4]/div[1]/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div/div/table/tbody/tr[25]/td[2]').text
issue_type = browser.find_element('xpath','/html/body/div[4]/div[1]/div/div[2]/div/div[2]/div/div[2]/div[2]/div/div/div/div/table/tbody/tr[7]/td[2]').text
lco = browser.find_element('class name','msg-list-div01').text
lco_id = lco[:8]

return ticket_id, created_date, completed_date, lco_id


def main():
workbook = load_workbook(filename="input.xlsx")

# Select a sheet name and active the sheet
workbook.active = workbook['Sheet1']
sheet = workbook['Sheet1'] # Replace 'Sheet1' with the actual sheet name

sheet = workbook.active

tablelist = []
for i in range(sheet.min_row + 1, sheet.max_row + 1):
user_id = sheet.cell(row=i, column=1).value
issue_type = sheet.cell(row=i, column=2).value
description = sheet.cell(row=i, column=3).value
ticket_cause = sheet.cell(row=i, column=4).value
issue_category = sheet.cell(row=i, column=5).value
ticket_priority = sheet.cell(row=i, column=6).value
ticket_status = sheet.cell(row=i, column=7).value


ticket_link = customer_ticket_create(user_id,issue_category,issue_type,description,ticket_priority,ticket_status,ticket_cause)

if ticket_link == 'not_created':
ticket_id = ''
created_date = ''
completed_date = ''
lco_id = ''
else:
ticket_id, created_date, completed_date, lco_id = ticket_scraper(ticket_link)

data = {
"user_id": user_id,
"lco_id": lco_id,
"issue_type": issue_type,
"blank": "",
"description": description,
"t_no": ticket_id,
"completed_date": completed_date,

}

print(f"{user_id} - {ticket_id}")

tablelist.append(data)

df = pd.DataFrame(tablelist)
df.to_excel('output.xlsx', header=True, index=False)

if __name__=="__main__":
try :
start = time.time()

login()
main()

now = datetime.now()
end = time.time()
hours, rem = divmod(end-start, 3600)
minutes, seconds = divmod(rem, 60)

print(now.strftime('\n' + "=============== COMPLETED - %d/%m/%Y %H:%M:%S")+ " {:0>2}:{:0>2}:{:05.2f}".format(int(hours),int(minutes),seconds) + ' ===============' + '\n')

except KeyboardInterrupt:
print(f'\nKeyboard Interrupt.\n')
sys.exit(130)

3. Customer count finder

This is another script for automating data scraping from a website using Selenium, capturing customer counts for different statuses (Active, Inactive, Suspended) based on input LCO IDs from a text file.

from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.chrome.service import Service
import time
import urllib.parse
import sys
import pandas as pd
import warnings
import platform

# Determine if the OS is Windows or Linux
os_type = platform.system()

warnings.filterwarnings('ignore')

username = 'username'
password = 'password'

if os_type == "Windows":
service = Service(executable_path='C:/chromedriver/chromedriver.exe')
elif os_type == "Linux":
service = Service(executable_path='/usr/local/bin/chromedriver')

chrome_options = Options()
chrome_options.add_argument("--log-level=3")
chrome_options.add_argument("--ignore-certificate-errors-spki-list")
browser = webdriver.Chrome(service=service, options=chrome_options)

def login():
browser.get('https://redacted.com')
username_input = browser.find_element('name','txtUserName').send_keys(username)
password_input = browser.find_element('name','txtPassword').send_keys(password)
search_btn = browser.find_element('id','save')
search_btn.click()

time.sleep(2)

def customercount_finder():
file1 = open('input_lcoids.txt', 'r')
lines = file1.readlines()

tablelist = []

print("========================= STARTED =========================\n")

for line in lines:
lcoid = line.strip()

browser.get('https://redacted.com/Accounts.aspx')

browser.find_element("xpath","//select[@name='ctl00$ContentPlaceHolder1$ddl_list']/option[text()='Code']").click()

time.sleep(3)

browser.find_element('name','ctl00$ContentPlaceHolder1$txtserch').clear()
username = browser.find_element('name','ctl00$ContentPlaceHolder1$txtserch')
username.send_keys(lcoid)
browser.find_element('id','ContentPlaceHolder1_btnserch').click()

time.sleep(34)

active_total = 0
suspended_total = 0
inactive_total = 0

for i in range(10):
try:
user = browser.find_element('id',f'ContentPlaceHolder1_dl_total_lblnameT_{i}').text
if user == 'Active':
active_total = browser.find_element('id',f'ContentPlaceHolder1_dl_total_lblTotalT_{i}').text
active_total = str(active_total)[2:]
if user == 'Inactive':
inactive_total = browser.find_element('id',f'ContentPlaceHolder1_dl_total_lblTotalT_{i}').text
inactive_total = str(inactive_total)[2:]
if user == 'Suspended':
suspended_total = browser.find_element('id',f'ContentPlaceHolder1_dl_total_lblTotalT_{i}').text
suspended_total = str(suspended_total)[2:]

list.append(i)
except:
pass

data = {
"lco_id": lcoid,
"Active": active_total,
"Inactive": inactive_total,
"Suspended": suspended_total,
}
print(data)
tablelist.append(data)

timestr = time.strftime("%Y%m%d-%H%M%S")

df = pd.DataFrame(tablelist)
df.to_csv(f'lco/{timestr}.csv')

print("\n======================== COMPLETED ======================== \n")

login()
customercount_finder()

4. Switch configuration auto backup

This Python script automates retrieving the current configuration from multiple network devices (e.g., switches or routers) using SSH and saving the configurations to local files. The script establishes SSH connections through a jump host to securely access each network device and fetch its configuration, then writes the retrieved configurations to a text file.

import time
import warnings
import paramiko
from datetime import datetime

warnings.filterwarnings('ignore')

j_user = 'JUMP_HOST_USERNAME'
j_pass = 'JUMP_HOST_PASSWORD'
j_host = 'JUMP_HOST_IP'

SW3 = {'ip': 'SWITCH_3_IP', 'username': 'user', 'password': 'pass123'}
SW2 = {'ip': 'SWITCH_2_IP', 'username': 'user', 'password': 'pass123'}
SW1 = {'ip': 'SWITCH_1_IP', 'username': 'user', 'password': 'pass123'}

all_devices = [SW1, SW2, SW3]

# Get current date in 'YYYY-MM-DD' format
current_date = datetime.now().strftime("%Y-%m-%d")

for device in all_devices:
sw_user = device.get('username')
sw_pass = device.get('password')
sw_ip = device.get('ip')
print(f"Connecting to {sw_ip}")

try:
# Connect to jump host
vm = paramiko.SSHClient()
vm.set_missing_host_key_policy(paramiko.AutoAddPolicy())
vm.connect(j_host, username=j_user, password=j_pass)
vm_transport = vm.get_transport()
dest_addr = (sw_ip, 22)
local_addr = (j_host, 22)
vm_channel = vm_transport.open_channel("direct-tcpip", dest_addr, local_addr)

# Connect to network device via the jump host
jhost = paramiko.SSHClient()
jhost.set_missing_host_key_policy(paramiko.AutoAddPolicy())
jhost.connect(sw_ip, username=sw_user, password=sw_pass, sock=vm_channel)

# Request an interactive shell session on this channel
commands = jhost.invoke_shell()
print(f"Connected to {sw_ip}")

# Send the command to get the configuration
commands.send("display current-configuration\n")

# Wait for the command output to return
time.sleep(10) # Optionally replace this with a timeout check
output = commands.recv(65535)
output = output.decode("utf-8")

# Create the output file name using current date and switch the IP with date
filename = f"{sw_ip}_{current_date}.txt"

# Write the output to a file with the new filename
with open(filename, 'w') as f:
print(output, file=f)

# Close the connections
jhost.close()
vm.close()

except Exception as e:
print(f"Failed to connect or retrieve config from {sw_ip}: {e}")
if 'vm' in locals():
vm.close()
if 'jhost' in locals():
jhost.close()

Thanks for reading. Do clap and share if you like.

Sign up to discover human stories that deepen your understanding of the world.

Free

Distraction-free reading. No ads.

Organize your knowledge with lists and highlights.

Tell your story. Find your audience.

Membership

Read member-only stories

Support writers you read most

Earn money for your writing

Listen to audio narrations

Read offline with the Medium app

--

--

SRLSEC ๐Ÿ‡ฎ๐Ÿ‡ณ
SRLSEC ๐Ÿ‡ฎ๐Ÿ‡ณ

No responses yet

Write a response