import paramiko, time, socks, re, os
from getpass import getpass
import pandas as pd
import openpyxl
from tqdm import tqdm
from concurrent.futures import as_completed
from openpyxl.utils.dataframe import dataframe_to_rows
from concurrent.futures import ThreadPoolExecutor, as_completed
import threading
def juniper_parse_fixed(df, up_t):
df.loc[0, "uptime"] = up_t[0]
return df
def parse_fix(df1):
fpc_row = df1[df1["item"].astype(str).str.startswith("FPC")].index
ps_row = df1[df1["item"].astype(str).str.startswith("Power Supply")].index
df1["Pair"] = None
ps_idx = 0
pair = 1
for fpc_idx in fpc_row:
df1.loc[fpc_idx,"Pair"] = pair
ps_to_add = 2 if len(ps_row) - ps_idx>=2 else 1
for _ in range(ps_to_add):
if ps_idx < len(ps_row):
df1.loc[ps_row[ps_idx],"Pair"] = pair
ps_idx += 1
pair += 1
return df1
def re_ver(data):
ex2200_match = re.search(r"ex2200", data)
ex8208_match = re.search(r"ex8208", data)
ex4200_match = re.search(r"ex4200", data)
if ex2200_match or ex8208_match or ex4200_match:
match2 = re.search(r"JUNOS Base OS boot \[(\w{1,2}.\w{1,4}.\w{1,2})\]",data)
match1 = re.search(r"Junos: (\S+)",data)
if match2:
return match2.group(1)
else:
return match1.group(1)
else:
match1 = re.search(r"Junos: (\S+)",data)
match2 = re.search(r"JUNOS Base OS boot \[(\w{1,2}.\w{1,4}.\w{1,2})\]",data)
if match1:
return match1.group(1)
else:
return match2.group(1)
def intersperse_dfs(df1, df2, key):
df1.set_index(key, inplace=True)
df2.set_index(key, inplace=True)
df_combind = df1.combine_first(df2).sort_index().reset_index()
return df_combind
def keyword_box(data, fpc, power):
lines = data.split("\n")
fp = []
po = []
vir = re.compile(r"\((\S+\s+\S+)\)\s+\S+\s+\S+\s+\S+\s+\S+\s+(Master)")
v1,v5 = "",""
for line in lines:
vir_m = vir.search(line)
if vir_m:
v1,v5 = vir_m.group(1), vir_m.group(2)
break
for line in lines:
words = line.split()
if fpc in words:
idx = words.index(fpc)
if len(words) >= idx+7:
com1 = words[idx+0]+ " "+words[idx+1]
com2 = words[idx+2] + " " + words[idx+3]
fo = words[idx+4]
fi = words[idx+5]
si = words[idx+6]
se = v5 if com1 == v1 else ""
fp.append((com1, com2,fo,fi,si,se))
if power in words:
idx = words.index(power)
if len(words) >= idx+7:
com1 = words[idx+0]+" "+words[idx+1]+" "+words[idx+2]
com2 = words[idx+3]+" "+words[idx+4]
fi = words[idx+5]
si = words[idx+6]
se = words[idx+7]
if se =="PS" and len(words) >= idx +10:
se = words[idx+7]+" "+words[idx+8]+" "+words[idx+9]
po.append((com1, com2, fi, si, se))
return fp, po
def keyword_q5000(data):
lines = data.split("\n")
q5000_f = []
q5000_p = []
q5000_r = []
fpc = "FPC"
power = "Power"
fan = re.compile(r"(Fan Tray\s+\S+,)\s+(\S+\s+\S+\s+\S+\s+\S+\s+\S+\s+\S+)?")
cha = re.compile(r"(Chassis)\s+(\S+)\s+(\S+)?")
for line in lines:
words = line.split()
if power in words:
idx = words.index(power)
if len(words) >= idx+7:
pcom1 = words[idx+0]+" "+words[idx+1]+" "+words[idx+2]
pcom2 = words[idx+3]+" "+words[idx+4]
pfi = words[idx+5]
psi = words[idx+6]
pse = words[idx+7]
q5000_p.append((pcom1, pcom2, pfi, psi, pse))
if fpc in words:
idx = words.index(fpc)
if len(words) >= idx+6:
fcom1 = words[idx+0]+" "+words[idx+1]
fcom2 = words[idx+2]+ " " +words[idx+3]
ffo = words[idx+4]
ffi = words[idx+5]
ffe = words[idx+6]
fff = ""
q5000_f.append((fcom1, fcom2,ffo,ffi,ffe,fff))
fan_m = fan.search(line)
if fan_m:
fan1,fan5 = fan_m.group(1),fan_m.group(2)
fan2 = ""
fan3 = ""
fan4 = ""
q5000_r.append((fan1,fan2,fan3,fan4,fan5))
df_q5000_r = pd.DataFrame(q5000_r, columns=["item","rev_ver","part_num","serial","model"])
df_q5000_f = pd.DataFrame(q5000_f, columns=["item","rev_ver","part_num","serial","model","VC"])
df_q5000_p = pd.DataFrame(q5000_p, columns=["item","rev_ver","part_num","serial","model"])
return df_q5000_r,df_q5000_f,df_q5000_p
def keyword_q10000(data):
lines = data.split("\n")
q10000_c = []
q10000_f = []
q10000_p = []
q10000_r = []
fpc = "FPC"
power = "Power"
cb = "CB"
cha = re.compile(r"(Chassis)\s+(\S+)\s+(\S+)?")
for line in lines:
words = line.split()
if power in words:
idx = words.index(power)
if len(words) >= idx+7:
pcom1 = words[idx+0]+" "+words[idx+1]+" "+words[idx+2]
pcom2 = words[idx+3]+" "+words[idx+4]
pfi = words[idx+5]
psi = words[idx+6]
pse = words[idx+7]
q10000_p.append((pcom1, pcom2, pfi, psi, pse))
if fpc in words:
idx = words.index(fpc)
if len(words) >= idx+6:
fcom1 = words[idx+0]+" "+words[idx+1]
fcom2 = words[idx+2]+ " " +words[idx+3]
ffo = words[idx+4]
ffi = words[idx+5]
ffe = words[idx+6]
fff = ""
q10000_f.append((fcom1, fcom2,ffo,ffi,ffe,fff))
if cb in words:
idx = words.index(cb)
if len(words) >= idx+7:
rcom1 = words[idx+0]+" "+words[idx+1]
rcom2 = words[idx+2]+ " " +words[idx+3]
rfo = words[idx+4]
rfi = words[idx+5]
rfe = words[idx+6]
q10000_r.append((rcom1,rcom2,rfo,rfi,rfe))
cha_m = cha.search(line)
if cha_m:
c1,c4,c5 = cha_m.group(1),cha_m.group(2),cha_m.group(3)
c2 = " "
c3 = " "
q10000_c.append((c1,c2,c3,c4,c5))
df_q10000_c = pd.DataFrame(q10000_c, columns=["item","rev_ver","part_num","serial","model"])
df_q10000_r = pd.DataFrame(q10000_r, columns=["item","rev_ver","part_num","serial","model"])
df_q10000_f = pd.DataFrame(q10000_f, columns=["item","rev_ver","part_num","serial","model","VC"])
df_q10000_p = pd.DataFrame(q10000_p, columns=["item","rev_ver","part_num","serial","model"])
return df_q10000_c,df_q10000_r,df_q10000_f,df_q10000_p
def keyword_ex8200(data):
lines = data.split("\n")
e8200_c = []
e8200_f = []
e8200_p = []
e8200_r = []
fpc = "FPC"
power = "PSU"
cb = "CB"
cha = re.compile(r"(Chassis)\s+(\S+\s+\S+)\s+(\S+)\s+(\S+)?")
for line in lines:
words = line.split()
if power in words:
idx = words.index(power)
if len(words) >= idx+7:
pcom1 = words[idx+0]+" "+words[idx+1]
pcom2 = words[idx+2]+" "+words[idx+3]
pfi = words[idx+4]
psi = words[idx+5]
pse = words[idx+6]
e8200_p.append((pcom1, pcom2, pfi, psi, pse))
if fpc in words:
idx = words.index(fpc)
if len(words) >= idx+6:
fcom1 = words[idx+0]+" "+words[idx+1]
fcom2 = words[idx+2]+ " " +words[idx+3]
ffo = words[idx+4]
ffi = words[idx+5]
ffe = words[idx+6]
fff = ""
e8200_f.append((fcom1, fcom2,ffo,ffi,ffe,fff))
if cb in words:
idx = words.index(cb)
if len(words) >= idx+7:
rcom1 = words[idx+0]+" "+words[idx+1]
rcom2 = words[idx+2]+ " " +words[idx+3]
rfo = words[idx+4]
rfi = words[idx+5]
rfe = words[idx+6]
e8200_r.append((rcom1,rcom2,rfo,rfi,rfe))
cha_m = cha.search(line)
if cha_m:
c1,c2,c4,c5 = cha_m.group(1),cha_m.group(2),cha_m.group(3),cha_m.group(4)
c3 = " "
e8200_c.append((c1,c2,c3,c4,c5))
df_e8200_c = pd.DataFrame(e8200_c, columns=["item","rev_ver","part_num","serial","model"])
df_e8200_r = pd.DataFrame(e8200_r, columns=["item","rev_ver","part_num","serial","model"])
df_e8200_f = pd.DataFrame(e8200_f, columns=["item","rev_ver","part_num","serial","model","VC"])
df_e8200_p = pd.DataFrame(e8200_p, columns=["item","rev_ver","part_num","serial","model"])
return df_e8200_c,df_e8200_r,df_e8200_f,df_e8200_p
def keyword_ex9200(data):
lines = data.split("\n")
e9200_cb = []
e9200_c = []
e9200_f = []
e9200_p = []
e9200_r = []
fpc = "FPC"
power = "PEM"
routing = "Routing"
cha = re.compile(r"(Chassis)\s+(\S+)\s+(\S+)?")
cb = re.compile(r"(CB \S+)\s+(\S+\s+\S+)\s+(\S+)\s+(\S+)\s+(\S+)?")
for line in lines:
words = line.split()
if power in words:
idx = words.index(power)
if len(words) >= idx+7:
pcom1 = words[idx+0]+" "+words[idx+1]
pcom2 = words[idx+2]+" "+words[idx+3]
pfi = words[idx+4]
psi = words[idx+5]
pse = words[idx+6]
e9200_p.append((pcom1, pcom2, pfi, psi, pse))
if fpc in words:
idx = words.index(fpc)
if len(words) >= idx+6:
fcom1 = words[idx+0]+" "+words[idx+1]
fcom2 = words[idx+2]+ " " +words[idx+3]
ffo = words[idx+4]
ffi = words[idx+5]
ffe = words[idx+6]
fff = ""
e9200_f.append((fcom1, fcom2,ffo,ffi,ffe,fff))
if routing in words:
idx = words.index(routing)
if len(words) >= idx+7:
rcom1 = words[idx+0]+" "+words[idx+1]+" "+words[idx+2]
rcom2 = words[idx+3]+ " " +words[idx+4]
rfo = words[idx+5]
rfi = words[idx+6]
rfe = words[idx+7]
e9200_r.append((rcom1,rcom2,rfo,rfi,rfe))
cha_m = cha.search(line)
if cha_m:
c1,c4,c5 = cha_m.group(1),cha_m.group(2),cha_m.group(3)
c2 = " "
c3 = " "
e9200_c.append((c1,c2,c3,c4,c5))
cb_m = cb.search(line)
if cb_m:
c1,c2,c3,c4,c5 = cb_m.group(1),cb_m.group(2),cb_m.group(3),cb_m.group(4),cb_m.group(5)
e9200_cb.append((c1,c2,c3,c4,c5))
df_e9200_cb = pd.DataFrame(e9200_cb, columns=["item","rev_ver","part_num","serial","model"])
df_e9200_c = pd.DataFrame(e9200_c, columns=["item","rev_ver","part_num","serial","model"])
df_e9200_r = pd.DataFrame(e9200_r, columns=["item","rev_ver","part_num","serial","model"])
df_e9200_f = pd.DataFrame(e9200_f, columns=["item","rev_ver","part_num","serial","model","VC"])
df_e9200_p = pd.DataFrame(e9200_p, columns=["item","rev_ver","part_num","serial","model"])
return df_e9200_c,df_e9200_r,df_e9200_f,df_e9200_p,df_e9200_cb
def juniper_parse(ip_a, u_id, u_pas, lock, ho, ve, fp, po):
try:
model = re.compile(r"Model: (\S+)")
#socks config
sock=socks.socksocket()
sock.set_proxy(
proxy_type=socks.SOCKS5,
addr="Proxy IP",
port=proxy Port,
username="Proxy ID",
password="Proxy Password")
#sock ssh connect
sock.connect((ip_a,22))
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(ip_a, username=u_id, password=u_pas,sock=sock)
#print(f"{ip_a} Connected Complete")
stdin,stdout,stderr = ssh.exec_command("show version | no-more")
show_version = stdout.read().decode().strip()
stdin,stdout,stderr = ssh.exec_command("show system uptime | no-more")
show_uptime = stdout.read().decode().strip()
data = show_uptime + show_version
m=model.search(data).group(1)
if m=="ex9208" or m=="ex9214":
stdin,stdout,stderr = ssh.exec_command("show chassis hardware models | no-more")
show_chassis_model = stdout.read().decode().strip()
stdin,stdout,stderr = ssh.exec_command("show chassis hardware | match chassis")
show_chassis = stdout.read().decode().strip()
data2 = show_chassis_model + show_chassis
else:
stdin,stdout,stderr = ssh.exec_command("show chassis hardware | no-more")
show_chassis_hard = stdout.read().decode().strip()
stdin,stdout,stderr = ssh.exec_command("show virtual-chassis")
show_virtual = stdout.read().decode().strip()
data2 = show_chassis_hard + show_virtual
data = data+data2
ssh.close()
with open("juniper_rawdata.txt", 'a',newline="") as f:
f.write(str(data))
host = re.compile(r"Hostname: (\S+)")
up = re.compile(r"System booted: \d{1,4}-\d{1,2}-\d{1,2} \d{1,2}:\d{1,2}:\d{1,2} \w{1,3} \((\w{1,6} \d{1,2}:\d{1,2} ago)\)")
fpc = "FPC"
power = "Power"
h=host.search(data).group(1)
m=model.search(data).group(1)
v = re_ver(data)
if m=="ex8208":
df_e8200_c,df_e8200_r,df_e8200_f, df_e8200_p = keyword_ex8200(data)
df_c = df_e8200_c
df_fp = df_e8200_f
df_po = df_e8200_p
df_router_data = df_e8200_r
df_cb = None
elif "ex92" in m:
df_e9200_c,df_e9200_r,df_e9200_f, df_e9200_p, df_e9200_cb = keyword_ex9200(data)
df_c = df_e9200_c
df_fp = df_e9200_f
df_po = df_e9200_p
df_router_data = df_e9200_r
df_cb = df_e9200_cb
elif m == "qfx10008":
df_q10000_c,df_q10000_r,df_q10000_f,df_q10000_p = keyword_q10000(data)
df_c = df_q10000_c
df_fp = df_q10000_f
df_po = df_q10000_p
df_router_data = df_q10000_r
df_cb = None
elif "qfx51" in m or "qfx5200" in m :
df_q5000_r,df_q5000_f,df_q5000_p = keyword_q5000(data)
df_c = None
df_fp = df_q5000_f
df_po = df_q5000_p
df_router_data = df_q5000_r
df_cb = None
else:
fp,po = keyword_box(data,fpc,power)
df_router_data = None
df_c = None
df_cb = None
df_fp = pd.DataFrame(fp, columns=["item","rev_ver","part_num","serial","model","VC"])
df_po = pd.DataFrame(po, columns=["item","rev_ver","part_num","serial","model"])
up_t = [up.search(data).group(1)]
raw_data1 = {"hostname" : [],"ipaddr" : [],"version" : [],"item" : [],"rev_ver" : [],
"part_num" : [],"serial" : [],"model": [],"uptime" : [], "VC" :[]}
if df_c is not None:
for _, row in df_c.iterrows():
raw_data1["hostname"].append(h)
raw_data1["version"].append(v)
raw_data1["ipaddr"].append(ip_a)
raw_data1["item"].append(row["item"])
raw_data1["rev_ver"].append(row["rev_ver"])
raw_data1["part_num"].append(row["part_num"])
raw_data1["serial"].append(row["serial"])
raw_data1["model"].append(row["model"])
raw_data1["uptime"].append("")
raw_data1["VC"].append("")
for _,row in df_fp.iterrows():
raw_data1["hostname"].append(h)
raw_data1["version"].append(v)
raw_data1["ipaddr"].append(ip_a)
raw_data1["item"].append(row["item"])
raw_data1["rev_ver"].append(row["rev_ver"])
raw_data1["part_num"].append(row["part_num"])
raw_data1["serial"].append(row["serial"])
raw_data1["model"].append(row["model"])
raw_data1["uptime"].append("")
raw_data1["VC"].append(row["VC"])
for _,row in df_po.iterrows():
raw_data1["hostname"].append(h)
raw_data1["version"].append(v)
raw_data1["ipaddr"].append(ip_a)
raw_data1["item"].append(row["item"])
raw_data1["rev_ver"].append(row["rev_ver"])
raw_data1["part_num"].append(row["part_num"])
raw_data1["serial"].append(row["serial"])
raw_data1["model"].append(row["model"])
raw_data1["uptime"].append("")
raw_data1["VC"].append(" ")
if df_router_data is not None:
for _, row in df_router_data.iterrows():
raw_data1["hostname"].append(h)
raw_data1["version"].append(v)
raw_data1["ipaddr"].append(ip_a)
raw_data1["item"].append(row["item"])
raw_data1["rev_ver"].append(row["rev_ver"])
raw_data1["part_num"].append(row["part_num"])
raw_data1["serial"].append(row["serial"])
raw_data1["model"].append(row["model"])
raw_data1["uptime"].append("")
raw_data1["VC"].append("")
if df_cb is not None:
for _, row in df_cb.iterrows():
raw_data1["hostname"].append(h)
raw_data1["version"].append(v)
raw_data1["ipaddr"].append(ip_a)
raw_data1["item"].append(row["item"])
raw_data1["rev_ver"].append(row["rev_ver"])
raw_data1["part_num"].append(row["part_num"])
raw_data1["serial"].append(row["serial"])
raw_data1["model"].append(row["model"])
raw_data1["uptime"].append("")
raw_data1["VC"].append("")
df1 = pd.DataFrame(raw_data1)
df1 = juniper_parse_fixed(df1, up_t)
df2 = parse_fix(df1)
df_none = df2[df2["Pair"].isna()]
df_not_none = df2[df2["Pair"].notna()]
df_not_none = df_not_none.sort_values(by=["Pair", "item"])
sorted_df = pd.concat([df_not_none, df_none])
sorted_df = sorted_df.drop("Pair", axis=1)
df_chassis = sorted_df[sorted_df["item"] == "Chassis"]
df_reset = sorted_df[sorted_df["item"] != "Chassis"]
df = pd.concat([df_chassis, df_reset])
# print(df)
with lock:
wb = openpyxl.load_workbook("juniper_test.xlsx")
ws = wb["Sheet1"]
rows = dataframe_to_rows(df, index=False, header=False)
for r_idx,row in enumerate(rows, ws.max_row+1):
for c_idx, value in enumerate(row, 1):
ws.cell(row=r_idx, column=c_idx, value=value)
wb.save("juniper_test.xlsx")
except paramiko.AuthenticationException:
print("###### Invalid Username or Password ######\n")
except paramiko.ssh_exception.SSHException as e:
print("something is wrong with SSH : {}".format(e))
except Exception as ex:
print("연결오류",ex)
def main():
with open ("switch_list.txt") as f:
content = [line.split() for line in f.read().splitlines()]
file = "juniper_rawdata.txt"
if os.path.isfile(file):
os.remove(file)
print("RAW data file removed")
else:
print("raw data file not found keep going")
ho = []
ve = []
fp = []
po = []
max_threads = 20
thread_pool = ThreadPoolExecutor(max_workers=max_threads)
lock = threading.Lock()
futures = [thread_pool.submit(juniper_parse, device[0],device[1], device[2], lock, ho, ve, fp, po)for device in content]
for future in tqdm(as_completed(futures), total=len(futures), desc="Processing devices"):
future.result()
if __name__ == "__main__":
print ("we are ready to start")
main()
주니퍼 네트워크장비 자산정보 파싱 코드
기존 인보크 쉘을 사용했던 코드에서 command_EXEC 사용으로 동작 시간이 줄었으며 테스크 바 적용으로 작업의 진행상황을 확인 할수 있다
switch_list.txt 에 IP ID PASS 순으로 한줄에 입력하면 공백을 기준으로 3개로 분리하여 IP ID PASS 로 구분하여 접속 시도를 한다
여전히 juniper_test.xlsx 파일은 코드가 존재하는 파일과 동일 폴더에 존재하여야 한다 juniper_rawdata.txt에는 커맨드 입력 후 나온 출력값이 그대로 들어가 있어 편집된 데이터와 비교를 가능하게 해준다
VC인 장비를 불러오게되면
FPC
power supply 0
power supply 1
형태로 정리 하게 되며 각 FPC 별로 Power supply 2개를 셋트로 묶어서 출력하기 때문에 VC 장비를 구분하기 쉬우며 Master FPC에는 UP time 열 옆에 Master라고 표기 되며
VC 장비 정리 때문인지 QFX 10008 역시 같은 형태로 표기된다는 단점이 존재한다 이부분은 좀 더 공부해서 Fix 하여야 할 부분이다
이 코드가 사용 가능한 장비로는 EX9208 9214 8208 QFX 1008 5120 5200 5100 EX 4300 4200 3400 2200 에서 정상적으로 자산정보가 파싱 됨을 확인 하였다
다만 코드 사용시 모든 junos는 15버전 이상이여야 한다 15버전 이하일경우 정상 파싱 되는 기종은 2200과 8208뿐이다
해당 코드가 파라미코로 작성된 이유는 ssh Proxy인 Socks를 넷미코가 지원하지 않기때문이다.
'파이썬 코드' 카테고리의 다른 글
API 수집 예시(시스코 넥서스) (0) | 2024.08.21 |
---|---|
파이썬 자동화 -6- (0) | 2023.03.20 |
network automation -5- (0) | 2023.03.14 |
Network Automation -4- (0) | 2023.02.24 |
networkautomation -3- (0) | 2023.02.23 |