cisco_fmc_api_via_excel_operations
/
fmc_main_script.py
1629 строк · 71.3 Кб
1#!/usr/bin/python3
2'''
3Add/Delete/Modify netobjects, rangeobjects, hostobjects to different domains
4Add/Delete/Modify groups with objects
5'''
6from get_func import get_all_domains_data, get_all_objects_with_domains, get_all_objects_for_domain, get_all_groups_info, get_all_devices, get_domain_uuid, get_object_data, check_if_object_already_exist, get_all_detailed_groups_for_domain, get_all_objects_for_domain_no_check_ids
7from put_func import post_network_objects, post_range_objects, post_host_objects, post_groups_objects, del_groups, put_networkgroups, del_objects, put_object, post_url_objects, put_urlgroups, post_urlgroups_objects, del_urlgroups
8from fmc_create_xlsx_obj_groups import create_xlsx_and_sheets, write_group_objects_to_xlsx, write_hosts_network_objects_to_xlsx, write_urlgrps_to_xlsx
9from fmc_excel_diff_input_output import create_diff_excel_file
10import pandas as pd
11import json
12import cfg
13from openpyxl import Workbook
14from openpyxl import load_workbook
15import logging
16import re
17import copy
18import os
19from datetime import datetime
20
21
22
23logging.basicConfig(format='%(threadName)s %(name)s %(levelname)s: %(message)s',level=logging.INFO)
24
25
26def check_parent_group_non_override(obj, domain_name):
27all_detailed_networkgroups = cfg.all_detailed_networkgroups
28parent_groups_dict = dict()
29for networkgroup, networkgroup_data in all_detailed_networkgroups[domain_name].items():
30if not networkgroup_data['overridable']:
31try:
32for index, object in enumerate(networkgroup_data['objects']):
33if object['name'] == obj:
34parent_groups_dict.update({networkgroup: index})
35except KeyError as error:
36logging.info(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} networkgroup_data objects of {networkgroup_data} do not exist')
37errors_filename = 'outputs/errors.txt'
38with open(errors_filename, "a") as f:
39f.write(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} networkgroup_data objects of {networkgroup_data} do not exist\n Error: {error}\n')
40# logging.info(f'networkgroups = {parent_groups_dict}')
41return parent_groups_dict
42
43
44def check_parent_urlgroup(obj, domain_name):
45parent_groups_dict = dict()
46for urlgroup, urlgroup_data in cfg.all_obj_domain.get(domain_name).get('urlgroups').items():
47if not urlgroup_data['overridable']:
48try:
49for index, object in enumerate(urlgroup_data['objects']):
50if object['name'] == obj:
51parent_groups_dict.update({urlgroup: index})
52except KeyError as error:
53logging.info(
54f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} urlgroup_data objects of {urlgroup_data} do not exist')
55errors_filename = 'outputs/errors.txt'
56with open(errors_filename, "a") as f:
57f.write(
58f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} urlgroup_data objects of {urlgroup_data} do not exist\n Error: {error}\n')
59# logging.info(f'urlgroups = {parent_groups_dict}')
60return parent_groups_dict
61
62
63def create_del_json_non_override(parent_group_name, obj_del_index, obj, domain_name):
64all_detailed_networkgroups = cfg.all_detailed_networkgroups
65
66stripped_json_for_put = dict()
67old_parent_group_json = dict()
68new_parent_group_json = dict()
69old_parent_group_json = all_detailed_networkgroups[domain_name][parent_group_name]
70del old_parent_group_json['objects'][obj_del_index]
71new_parent_group_json = old_parent_group_json
72
73stripped_json_for_put.update({'id': new_parent_group_json['id']})
74stripped_json_for_put.update({'overridable': new_parent_group_json['overridable']})
75stripped_json_for_put.update({'description': new_parent_group_json['description']})
76stripped_json_for_put.update({'objects': new_parent_group_json['objects']})
77stripped_json_for_put.update({'name': new_parent_group_json['name']})
78stripped_json_for_put.update({'type': new_parent_group_json['type']})
79
80return stripped_json_for_put
81
82
83def create_del_json_urlgrp_non_override(parent_group_name, obj_del_index, obj, domain_name):
84
85stripped_json_for_put = dict()
86old_parent_group_json = dict()
87new_parent_group_json = dict()
88old_parent_group_json = cfg.all_obj_domain[domain_name]['urlgroups'][parent_group_name]
89del old_parent_group_json['objects'][obj_del_index]
90new_parent_group_json = old_parent_group_json
91
92stripped_json_for_put.update({'id': new_parent_group_json['id']})
93stripped_json_for_put.update({'overridable': new_parent_group_json['overridable']})
94stripped_json_for_put.update({'description': new_parent_group_json['description']})
95stripped_json_for_put.update({'objects': new_parent_group_json['objects']})
96stripped_json_for_put.update({'name': new_parent_group_json['name']})
97stripped_json_for_put.update({'type': new_parent_group_json['type']})
98
99return stripped_json_for_put
100
101
102def check_if_object_value_changed(object_name, obj_value, domain_name):
103all_obj_domain = cfg.all_obj_domain
104system_objects = cfg.system_objects
105
106if object_name in system_objects:
107return False
108
109if all_obj_domain:
110# object_type = check_object_type(object_name)
111for object_type in cfg.object_types:
112if all_obj_domain[domain_name].get(object_type).get(object_name):
113if object_type == 'urls':
114object_value_in_fmc = all_obj_domain[domain_name][object_type][object_name].get('url')
115else:
116object_value_in_fmc = all_obj_domain[domain_name][object_type][object_name].get('value')
117if object_value_in_fmc != obj_value:
118return True
119
120
121def del_group_from_parent_group(domain_name, ws):
122"""
123del_group_from_parent_group delete group from parent group.
1241st: check whether the group (to be deleted) is within any group, if no - just delete it
1252nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
1263rd: create json of the parent group without the group (to be deleted)
1274th: PUT via API the created json via request to the parent group ID
1285th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
129
130:param domain_name: domain name
131:type domain_name: str()
132:param ws: sheet which has groups and objects listed
133:type ws: openpyxl class object
134"""
135try:
136uuid = get_domain_uuid(domain_name)['uuid']
137except TypeError:
138errors_filename = 'outputs/errors.txt'
139with open(errors_filename, "a") as f:
140f.write(f'Domain {domain_name} do not exist\n')
141
142''' get size of xlsx sheet objects'''
143max_row = ws.max_row
144max_column = ws.max_column
145
146''' check for empty lines in Excel '''
147max_value = copy.deepcopy(max_row)
148for i in reversed(range(1, max_value+1)):
149some_data = ws.cell(row=i, column=1).value
150if some_data:
151max_row = i
152break
153elif not some_data:
154max_row -= 1
155
156
157add_modify_non_override_payload = dict()
158add_delete_payload = dict()
159
160for i in range(2, max_row+1):
161group_name = ws.cell(row=i, column=1).value
162obj = ws.cell(row=i, column=2).value
163action = ws.cell(row=i, column=3).value
164
165if group_name:
166group_name = group_name.strip()
167if obj:
168obj = obj.strip()
169if action:
170action = action.strip()
171
172if action:
173del_group_in_non_override_group = [
174obj,
175action == 'delete']
176
177if all(del_group_in_non_override_group):
178# if obj.startswith(OBJECT_GROUP_NAME_START):
179parent_group = check_parent_group_non_override(
180obj, domain_name)
181if parent_group:
182for parent_group_name, obj_del_index in parent_group.items():
183if parent_group_name == group_name:
184new_parent_group_json = create_del_json_non_override(
185parent_group_name, obj_del_index, obj, domain_name)
186if len(new_parent_group_json['objects']) > 0:
187add_modify_non_override_payload.update({new_parent_group_json['name']: new_parent_group_json})
188else:
189add_delete_payload.update({new_parent_group_json['name']: new_parent_group_json})
190
191if add_modify_non_override_payload:
192for group_name, group_data in add_modify_non_override_payload.items():
193if len(group_data['objects']) > 0:
194try:
195put_networkgroups(group_data, domain_name)
196except KeyError:
197logging.info(
198f'domain = {domain_name} do not has Group objects to modify')
199if add_delete_payload:
200for group_name, group_data in add_delete_payload.items():
201try:
202del_groups(group_data, domain_name)
203except KeyError:
204logging.info(
205f'domain = {domain_name} do not has Group objects to modify')
206# logging.info(f"\n{50*'#'}"
207# f'\n{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} Finished checking DEL request to groups in {domain_name}\n'
208# f"{50*'#'}")
209if any([add_modify_non_override_payload, add_delete_payload]):
210''' update all_objects cause '''
211update_all_objects(domain_name)
212update_all_networkgroups(domain_name)
213
214
215def do_ask_user_input(user_prompt):
216"""
217Default action - No
218Return y or n
219"""
220input_y_or_n = 'user input'
221while (input_y_or_n != 'y' and input_y_or_n != 'n'):
222input_y_or_n = input(user_prompt).lower()
223if not input_y_or_n:
224# Enter
225input_y_or_n = 'n'
226return input_y_or_n
227
228
229def remove_space_obj(obj):
230obj_space_match = re.search(
231r'(?P<begin>.+?(?=\s+))'
232r'\s+'
233r'(?P<end>.+)', obj)
234if obj_space_match:
235obj = f'{obj_space_match.group("begin")}{obj_space_match.group("end")}'
236return obj
237
238
239def get_group_data_from_detailed_networgroups(group_names, domain_name):
240all_detailed_networkgroups = cfg.all_detailed_networkgroups
241
242tmp_dict = dict()
243for group in group_names:
244group_objects = all_detailed_networkgroups.get(domain_name).get(group).get('objects')
245tmp_list = [item.get('name') for item in group_objects]
246tmp_dict.setdefault(group, sorted(tmp_list))
247return tmp_dict
248
249
250def compare_group_data_to_avoid_uneeded_puts(ws, domain_name):
251"""
252Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
253"""
254df = pd.DataFrame(ws.values)
255new_header = df.iloc[0]
256df = df[1:]
257df = df.rename(columns=new_header)
258
259""" temp list to include all data exported out of the DataFrame """
260list_sorted_groups_False_Add = list()
261dict_sorted_groups_for_comparison = dict()
262df_groups_sort = df.groupby(['object_group_name', 'action'])
263
264for group, group_data1 in df_groups_sort:
265if (group[1] == 'add'):
266temp_group = df_groups_sort.get_group(group)
267temp = temp_group.to_dict('records')
268list_sorted_groups_False_Add.append(temp)
269tmp_list = [item.get('object') for item in temp]
270dict_sorted_groups_for_comparison.update({group[0]: sorted(tmp_list)})
271
272group_names = list(dict_sorted_groups_for_comparison)
273
274dict_group_data_from_detailed_networgroups = get_group_data_from_detailed_networgroups(group_names, domain_name)
275different_groups = list()
276for group, group_data in dict_sorted_groups_for_comparison.items():
277if group_data == dict_group_data_from_detailed_networgroups.get(group):
278continue
279else:
280different_groups.append(group)
281return different_groups
282
283
284def change_hosts_nets_in_parent_group(domain_name, ws):
285"""
286del_hosts_nets_from_parent_group delete group from parent group.
2871st: check whether the group (to be deleted) is within any group, if no - just delete it
2882nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
2893rd: create json of the parent group without the group (to be deleted)
2904th: PUT via API the created json via request to the parent group ID
2915th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
292
293:param domain_name: domain name
294:type domain_name: str()
295:param ws: sheet which has groups and objects listed
296:type ws: openpyxl class object
297"""
298try:
299uuid = get_domain_uuid(domain_name)['uuid']
300except TypeError as error:
301logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
302errors_filename = 'outputs/errors.txt'
303with open(errors_filename, "a") as f:
304f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
305
306''' get size of xlsx sheet objects'''
307max_row = ws.max_row
308max_column = ws.max_column
309
310''' check for empty lines in Excel '''
311max_value = copy.deepcopy(max_row)
312for i in reversed(range(1, max_value+1)):
313some_data = ws.cell(row=i, column=1).value
314if some_data:
315max_row = i
316break
317elif not some_data:
318max_row -= 1
319
320add_modify_non_override_payload = dict()
321add_delete_payload = dict()
322
323for i in range(2, max_row+1):
324group_name = ws.cell(row=i, column=1).value
325obj = ws.cell(row=i, column=2).value
326action = ws.cell(row=i, column=3).value
327
328if group_name:
329group_name = group_name.strip()
330if obj:
331obj = obj.strip()
332if action:
333action = action.strip()
334if action:
335del_obj_in_non_override_group = [
336obj,
337action == 'delete']
338if all(del_obj_in_non_override_group):
339# if any([obj.startswith(OBJECT_HOST_NAME_START), obj.startswith(OBJECT_SUBNET_NAME_START), obj.startswith(OBJECT_RANGE_NAME_START)]):
340parent_group = check_parent_group_non_override(
341obj, domain_name)
342if parent_group:
343for parent_group_name, obj_del_index in parent_group.items():
344if parent_group_name == group_name:
345new_parent_group_json = create_del_json_non_override(
346parent_group_name, obj_del_index, obj, domain_name)
347if len(new_parent_group_json['objects']) > 0:
348add_modify_non_override_payload.update(
349{new_parent_group_json['name']: new_parent_group_json})
350else:
351add_delete_payload.update(
352{new_parent_group_json['name']: new_parent_group_json})
353if add_modify_non_override_payload:
354for group_name, group_data in add_modify_non_override_payload.items():
355if len(group_data['objects']) > 0:
356try:
357put_networkgroups(group_data, domain_name)
358except KeyError:
359logging.info(
360f'domain = {domain_name} do not has Group objects to modify')
361if add_delete_payload:
362for group_name, group_data in add_delete_payload.items():
363try:
364del_groups(group_data, domain_name)
365except KeyError:
366logging.info(
367f'domain = {domain_name} do not has Group objects to modify')
368if any([add_modify_non_override_payload, add_delete_payload]):
369''' update all_objects cause new hosts have been just added '''
370update_all_objects(domain_name)
371update_all_networkgroups(domain_name)
372
373
374def change_urls_in_group(domain_name, ws):
375"""
376change_urls_in_group delete group from parent group.
3771st: check whether the group (to be deleted) is within any group, if no - just delete it
3782nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
3793rd: create json of the parent group without the group (to be deleted)
3804th: PUT via API the created json via request to the parent group ID
3815th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
382
383:param domain_name: domain name
384:type domain_name: str()
385:param ws: sheet which has groups and objects listed
386:type ws: openpyxl class object
387"""
388try:
389uuid = get_domain_uuid(domain_name)['uuid']
390except TypeError as error:
391logging.info(
392f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
393errors_filename = 'outputs/errors.txt'
394with open(errors_filename, "a") as f:
395f.write(
396f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
397
398''' get size of xlsx sheet objects'''
399max_row = ws.max_row
400max_column = ws.max_column
401
402''' check for empty lines in Excel '''
403max_value = copy.deepcopy(max_row)
404for i in reversed(range(1, max_value+1)):
405some_data = ws.cell(row=i, column=1).value
406if some_data:
407max_row = i
408break
409elif not some_data:
410max_row -= 1
411
412add_modify_non_override_payload = dict()
413add_delete_payload = dict()
414
415for i in range(2, max_row+1):
416group_name = ws.cell(row=i, column=1).value
417obj = ws.cell(row=i, column=2).value
418action = ws.cell(row=i, column=3).value
419
420if group_name:
421group_name = group_name.strip()
422if obj:
423obj = obj.strip()
424if action:
425action = action.strip()
426if action:
427del_obj_in_non_override_group = [
428obj,
429action == 'delete']
430if all(del_obj_in_non_override_group):
431# if any([obj.startswith(OBJECT_HOST_NAME_START), obj.startswith(OBJECT_SUBNET_NAME_START), obj.startswith(OBJECT_RANGE_NAME_START)]):
432parent_group = check_parent_urlgroup(
433obj, domain_name)
434if parent_group:
435for parent_group_name, obj_del_index in parent_group.items():
436if parent_group_name == group_name:
437new_parent_group_json = create_del_json_urlgrp_non_override(
438parent_group_name, obj_del_index, obj, domain_name)
439if len(new_parent_group_json['objects']) > 0:
440add_modify_non_override_payload.update(
441{new_parent_group_json['name']: new_parent_group_json})
442else:
443add_delete_payload.update(
444{new_parent_group_json['name']: new_parent_group_json})
445if add_modify_non_override_payload:
446for group_name, group_data in add_modify_non_override_payload.items():
447if len(group_data['objects']) > 0:
448try:
449put_urlgroups(group_data, domain_name)
450except KeyError:
451logging.info(
452f'domain = {domain_name} do not has Group objects to modify')
453if add_delete_payload:
454for group_name, group_data in add_delete_payload.items():
455try:
456del_urlgroups(group_data, domain_name)
457except KeyError:
458logging.info(
459f'domain = {domain_name} do not has Group objects to modify')
460if any([add_modify_non_override_payload]):
461''' update all_objects cause new objects were added '''
462update_all_objects(domain_name)
463
464
465
466def del_hosts_nets_for_obj_sheet(domain_name, ws):
467"""
468del_hosts_nets_from_parent_group delete group from parent group.
4691st: check whether the group (to be deleted) is within any group, if no - just delete it
4702nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
4713rd: create json of the parent group without the group (to be deleted)
4724th: PUT via API the created json via request to the parent group ID
4735th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
474
475:param domain_name: domain name
476:type domain_name: str()
477:param ws: sheet which has groups and objects listed
478:type ws: openpyxl class object
479"""
480
481try:
482uuid = get_domain_uuid(domain_name)['uuid']
483except TypeError as error:
484logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
485errors_filename = 'outputs/errors.txt'
486with open(errors_filename, "a") as f:
487f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
488
489''' get size of xlsx sheet objects'''
490max_row = ws.max_row
491max_column = ws.max_column
492
493''' check for empty lines in Excel '''
494max_value = copy.deepcopy(max_row)
495for i in reversed(range(1, max_value+1)):
496some_data = ws.cell(row=i, column=1).value
497if some_data:
498max_row = i
499break
500elif not some_data:
501max_row -= 1
502
503add_modify_non_override_payload = dict()
504add_delete_payload = dict()
505add_delete_obj_payload = dict()
506
507"""
508Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
509"""
510df = pd.DataFrame(ws.values)
511new_header = df.iloc[0]
512df = df[1:]
513df = df.rename(columns=new_header)
514
515""" temp list to include all data exported out of the DataFrame """
516list_sorted_groups_Del = list()
517
518df_groups_sort = df.groupby(['object_name', 'action'])
519
520for group, group_data1 in df_groups_sort:
521if (group[1] == 'delete'):
522temp_group = df_groups_sort.get_group(group)
523temp = temp_group.to_dict('records')
524list_sorted_groups_Del.append(temp)
525
526for groups_desc in list_sorted_groups_Del:
527check_object_name = groups_desc[0].get('object_name')
528# if any([check_object_name.startswith(OBJECT_HOST_NAME_START), check_object_name.startswith(OBJECT_SUBNET_NAME_START), check_object_name.startswith(OBJECT_RANGE_NAME_START)]):
529parent_group = check_parent_group_non_override(check_object_name, domain_name)
530if parent_group:
531logging.info(f'Warning! Object {check_object_name} is binded with group {parent_group}. Please remove it from the group membership first!')
532errors_filename = 'outputs/errors.txt'
533with open(errors_filename, "a") as f:
534f.write(f'Warning! Object {check_object_name} is binded with group {parent_group}. Please remove it from the group membership first!')
535continue
536else:
537try:
538if check_if_object_already_exist(check_object_name, domain_name):
539object_name, object_data = get_object_data(check_object_name, domain_name)
540add_delete_obj_payload.update({object_name: object_data})
541except KeyError as error:
542logging.info(f'Object {check_object_name} do not exist')
543errors_filename = 'outputs/errors.txt'
544with open(errors_filename, "a") as f:
545f.write(f'Object {check_object_name} do not exist\n Error: {error}\n')
546
547if add_modify_non_override_payload:
548for group_name, group_data in add_modify_non_override_payload.items():
549if len(group_data['objects']) > 0:
550try:
551put_networkgroups(group_data, domain_name)
552except KeyError:
553logging.info(
554f'domain = {domain_name} do not has Group objects to modify')
555
556if add_delete_payload:
557for group_name, group_data in add_delete_payload.items():
558try:
559del_groups(group_data, domain_name)
560except KeyError:
561logging.info(
562f'domain = {domain_name} do not has Group objects to modify')
563if add_delete_obj_payload:
564for obj_name, obj_data in add_delete_obj_payload.items():
565try:
566del_objects(obj_data, domain_name)
567except KeyError:
568logging.info(
569f'domain = {domain_name} do not has Group objects to modify')
570
571if any([add_modify_non_override_payload, add_delete_payload]):
572''' update all_objects cause new hosts have been just added '''
573update_all_objects(domain_name)
574update_all_networkgroups(domain_name)
575
576
577def match_space(object_name):
578space_match = re.search(
579r'\s+', object_name
580)
581if space_match:
582return True
583else:
584return False
585
586
587def object_change_function(domain_name, ws):
588"""
589object_change_function function to add, delete objects
590
591:param domain_name: domain name of the domain to change object
592:type domain_name: str
593:param ws: Excel sheet where objects are provided
594:type ws: openpyxl class
595"""
596all_obj_domain = cfg.all_obj_domain
597
598domains_add_hostobj = dict()
599domains_add_netobj = dict()
600domains_add_rangeobj = dict()
601domains_add_urlobj = dict()
602domains_modify_hostobj = dict()
603domains_modify_netobj = dict()
604domains_modify_rangeobj = dict()
605domains_modify_urlobj = dict()
606
607max_row = ws.max_row
608max_column = ws.max_column
609
610''' check for empty lines in Excel '''
611max_value = copy.deepcopy(max_row)
612for i in reversed(range(1, max_value+1)):
613some_data = ws.cell(row=i, column=1).value
614if some_data:
615max_row = i
616break
617elif not some_data:
618max_row -= 1
619
620add_hostobject_payload = list()
621add_netobject_payload = list()
622add_rangeobject_payload = list()
623add_urlobject_payload = list()
624modify_hostobject_payload = list()
625modify_netobject_payload = list()
626modify_rangeobject_payload = list()
627modify_urlobject_payload = list()
628
629for i in range(2, max_row+1):
630object_name = ws.cell(row=i, column=1).value
631if object_name:
632object_name = object_name.strip()
633if match_space(object_name):
634print(f'SPACE is present in object_name name {object_name}')
635remove_space = do_ask_user_input('Remove SPACE from the object_name [y/N] ') == 'y'
636if remove_space:
637while (match_space(object_name)):
638object_name = remove_space_obj(object_name)
639
640obj = ws.cell(row=i, column=2).value
641object_action = ws.cell(row=i, column=3).value
642object_type = ws.cell(row=i, column=4).value
643if object_action:
644object_action = object_action.lower()
645if object_type:
646object_type = object_type.lower()
647if object_name:
648object_name = object_name.strip()
649# object_type = check_object_type(object_name)
650if obj:
651obj = obj.strip()
652host_add_non_exist = all(
653[
654# check_object_type(object_name) == 'host',
655object_type == 'host',
656not check_if_object_already_exist(object_name, domain_name),
657object_action == 'add'])
658network_add_non_exist = all(
659[
660# check_object_type(object_name) == 'network',
661object_type == 'network',
662not check_if_object_already_exist(object_name, domain_name),
663object_action == 'add'])
664range_add_non_exist = all(
665[
666# check_object_type(object_name) == 'range',
667object_type == 'range',
668not check_if_object_already_exist(object_name, domain_name),
669object_action == 'add'])
670url_add_non_exist = all(
671[
672# check_object_type(object_name) == 'range',
673object_type == 'url',
674not check_if_object_already_exist(object_name, domain_name),
675object_action == 'add'])
676host_add_exist = all(
677[
678# check_object_type(object_name) == 'host',
679object_type == 'host',
680check_if_object_already_exist(object_name, domain_name),
681object_action == 'modify'])
682network_add_exist = all(
683[
684# check_object_type(object_name) == 'network',
685object_type == 'network',
686check_if_object_already_exist(object_name, domain_name),
687object_action == 'modify'])
688range_add_exist = all(
689[
690# check_object_type(object_name) == 'range',
691object_type == 'range',
692check_if_object_already_exist(object_name, domain_name),
693object_action == 'modify'])
694url_add_exist = all(
695[
696# check_object_type(object_name) == 'range',
697object_type == 'url',
698check_if_object_already_exist(object_name, domain_name),
699object_action == 'modify'])
700
701if host_add_non_exist:
702hostObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
703add_hostobject_payload.append(json.loads(hostObject))
704
705elif network_add_non_exist:
706netObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
707add_netobject_payload.append(json.loads(netObject))
708
709elif range_add_non_exist:
710rangeObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
711add_rangeobject_payload.append(json.loads(rangeObject))
712
713elif url_add_non_exist:
714urlObject = f'{{"name": "{object_name}","url": "{obj}","type": "{object_type}"}}'
715add_urlobject_payload.append(json.loads(urlObject))
716
717elif host_add_exist:
718if check_if_object_value_changed(object_name, obj, domain_name):
719hostObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
720modify_hostobject_payload.append(json.loads(hostObject))
721
722elif network_add_exist:
723if check_if_object_value_changed(object_name, obj, domain_name):
724netObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
725modify_netobject_payload.append(json.loads(netObject))
726
727elif range_add_exist:
728if check_if_object_value_changed(object_name, obj, domain_name):
729rangeObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
730modify_rangeobject_payload.append(json.loads(rangeObject))
731
732elif url_add_exist:
733if check_if_object_value_changed(object_name, obj, domain_name):
734urlObject = f'{{"name": "{object_name}","url": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
735modify_urlobject_payload.append(json.loads(urlObject))
736
737if add_hostobject_payload:
738domains_add_hostobj.update({domain_name: {'hostobject': add_hostobject_payload}})
739if add_netobject_payload:
740domains_add_netobj.update({domain_name: {'netobject': add_netobject_payload}})
741if add_rangeobject_payload:
742domains_add_rangeobj.update({domain_name: {'rangeobject': add_rangeobject_payload}})
743if add_urlobject_payload:
744domains_add_urlobj.update({domain_name: {'urlobject': add_urlobject_payload}})
745
746if modify_hostobject_payload:
747domains_modify_hostobj.update({domain_name: {'hostobject': modify_hostobject_payload}})
748if modify_netobject_payload:
749domains_modify_netobj.update({domain_name: {'netobject': modify_netobject_payload}})
750if modify_rangeobject_payload:
751domains_modify_rangeobj.update({domain_name: {'rangeobject': modify_rangeobject_payload}})
752if modify_urlobject_payload:
753domains_modify_urlobj.update({domain_name: {'urlobject': modify_urlobject_payload}})
754
755''' now to POST our list of network objects '''
756for domain, domain_data in domains_add_netobj.items():
757try:
758if domain_data:
759if len(domain_data['netobject']) < 1000:
760logging.info(f'Starting to add {len(domain_data["netobject"])} network objects')
761post_network_objects(domain_data['netobject'], get_domain_uuid(domain)['uuid'])
762elif len(domain_data['netobject']) >= 1000:
763max_chunks = int(len(domain_data['netobject'])/1000 + 1)
764for item in range(max_chunks):
765globals()['net_chunk_list%s' % item] = list()
766chunk_size_counter = 0
767chunk_counter = 0
768for index, item in enumerate(domain_data['netobject']):
769if index == 1000 + chunk_size_counter:
770chunk_size_counter += 1000
771chunk_counter += 1
772if index < 1000 + chunk_size_counter:
773globals()['net_chunk_list%s' % chunk_counter].append(item)
774for chunk in range(max_chunks):
775logging.info(f'Starting to add {len(globals()["net_chunk_list%s" % chunk])} network objects')
776post_network_objects(globals()['net_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
777
778except KeyError:
779logging.info(
780f'domain = {domain} do not has Network objects to add')
781pass
782''' now to POST our list of range objects '''
783for domain, domain_data in domains_add_rangeobj.items():
784try:
785if domain_data:
786if len(domain_data['rangeobject']) < 1000:
787logging.info(f'Starting to add {len(domain_data["rangeobject"])} range objects')
788post_range_objects(domain_data['rangeobject'], get_domain_uuid(domain)['uuid'])
789elif len(domain_data['rangeobject']) >= 1000:
790max_chunks = int(len(domain_data['rangeobject'])/1000 + 1)
791for item in range(max_chunks):
792globals()['range_chunk_list%s' % item] = list()
793chunk_size_counter = 0
794chunk_counter = 0
795for index, item in enumerate(domain_data['rangeobject']):
796if index == 1000 + chunk_size_counter:
797chunk_size_counter += 1000
798chunk_counter += 1
799if index < 1000 + chunk_size_counter:
800globals()['range_chunk_list%s' % chunk_counter].append(item)
801for chunk in range(max_chunks):
802logging.info(f'Starting to add {len(globals()["range_chunk_list%s" % chunk])} range objects')
803post_range_objects(globals()['range_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
804except KeyError:
805logging.info(f'domain = {domain} do not has Range objects to add')
806pass
807''' now to POST our list of host objects '''
808for domain, domain_data in domains_add_hostobj.items():
809try:
810if domain_data:
811if len(domain_data['hostobject']) < 1000:
812logging.info(f'Starting to add {len(domain_data["hostobject"])} host objects')
813post_host_objects(domain_data['hostobject'], get_domain_uuid(domain)['uuid'])
814elif len(domain_data['hostobject']) >= 1000:
815max_chunks = int(len(domain_data['hostobject'])/1000 + 1)
816for item in range(max_chunks):
817globals()['host_chunk_list%s' % item] = list()
818chunk_size_counter = 0
819chunk_counter = 0
820for index, item in enumerate(domain_data['hostobject']):
821if index == 1000 + chunk_size_counter:
822chunk_size_counter += 1000
823chunk_counter += 1
824if index < 1000 + chunk_size_counter:
825globals()['host_chunk_list%s' % chunk_counter].append(item)
826for chunk in range(max_chunks):
827logging.info(f'Starting to add {len(globals()["host_chunk_list%s" % chunk])} host objects')
828post_host_objects(globals()['host_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
829except KeyError:
830logging.info(f'domain = {domain} do not has Host objects to add')
831''' now to POST our list of url objects '''
832for domain, domain_data in domains_add_urlobj.items():
833try:
834if domain_data:
835if len(domain_data['urlobject']) < 1000:
836logging.info(
837f'Starting to add {len(domain_data["urlobject"])} url objects')
838post_url_objects(
839domain_data['urlobject'], get_domain_uuid(domain)['uuid'])
840elif len(domain_data['urlobject']) >= 1000:
841max_chunks = int(len(domain_data['urlobject'])/1000 + 1)
842for item in range(max_chunks):
843globals()['url_chunk_list%s' % item] = list()
844chunk_size_counter = 0
845chunk_counter = 0
846for index, item in enumerate(domain_data['urlobject']):
847if index == 1000 + chunk_size_counter:
848chunk_size_counter += 1000
849chunk_counter += 1
850if index < 1000 + chunk_size_counter:
851globals()['url_chunk_list%s' %
852chunk_counter].append(item)
853for chunk in range(max_chunks):
854logging.info(
855f'Starting to add {len(globals()["url_chunk_list%s" % chunk])} url objects')
856post_url_objects(
857globals()['url_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
858
859except KeyError:
860logging.info(
861f'domain = {domain} do not has Network objects to add')
862pass
863''' now to PUT our list of network objects '''
864for domain, domain_data in domains_modify_netobj.items():
865try:
866if domain_data:
867for item in domain_data.get('netobject'):
868put_object(item,get_domain_uuid(domain)['uuid'])
869except KeyError:
870logging.info(
871f'domain = {domain} do not has Network objects to modify')
872pass
873for domain, domain_data in domains_modify_hostobj.items():
874try:
875if domain_data:
876for item in domain_data.get('hostobject'):
877put_object(item,get_domain_uuid(domain)['uuid'])
878except KeyError:
879logging.info(
880f'domain = {domain} do not has Host objects to modify')
881pass
882for domain, domain_data in domains_modify_rangeobj.items():
883try:
884if domain_data:
885for item in domain_data.get('rangeobject'):
886put_object(item,get_domain_uuid(domain)['uuid'])
887except KeyError:
888logging.info(
889f'domain = {domain} do not has Range objects to modify')
890pass
891for domain, domain_data in domains_modify_urlobj.items():
892try:
893if domain_data:
894for item in domain_data.get('urlobject'):
895put_object(item, get_domain_uuid(domain)['uuid'])
896except KeyError:
897logging.info(
898f'domain = {domain} do not has Url objects to modify')
899pass
900
901if any([add_netobject_payload, add_rangeobject_payload, add_hostobject_payload, add_urlobject_payload]):
902''' update all_objects cause new hosts have been just added '''
903update_all_objects(domain_name)
904update_all_networkgroups(domain_name)
905
906
907def create_json_put_obj(group_data, group_object_data, override_object=False):
908group_data_for_json = {}
909if override_object:
910pass
911else:
912new_objects = list()
913for item in group_object_data:
914new_objects.append(item)
915for element in cfg.all_detailed_networkgroups[group_data['domain_name']][group_data['name']]['objects']:
916new_objects.append(element)
917group_data_for_json.update({"objects": new_objects})
918group_data_for_json.update({"type": "NetworkGroup"})
919group_data_for_json.update({"name": group_data['name']})
920object_name, object_data = get_object_data(group_data['name'], group_data['domain_name'])
921group_data_for_json.update({"id": object_data['id']})
922return group_data_for_json
923
924
925def create_json_url_put_obj(group_data, group_object_data, override_object=False):
926group_data_for_json = {}
927if override_object:
928pass
929else:
930new_objects = list()
931for item in group_object_data:
932new_objects.append(item)
933for element in cfg.all_obj_domain[group_data['domain_name']]['urlgroups'][group_data['name']]['objects']:
934new_objects.append(element)
935group_data_for_json.update({"objects": new_objects})
936group_data_for_json.update({"type": "UrlGroup"})
937group_data_for_json.update({"name": group_data['name']})
938object_name, object_data = get_object_data(
939group_data['name'], group_data['domain_name'])
940group_data_for_json.update({"id": object_data['id']})
941return group_data_for_json
942
943def create_json_group_obj(group_data, group_object_data, override_object=False):
944if all([group_data, group_object_data]):
945group_data_for_json = {}
946group_data_for_json.update({"objects": group_object_data})
947group_data_for_json.update({"type": "NetworkGroup"})
948group_data_for_json.update({"name": group_data['name']})
949return group_data_for_json
950
951
952def create_json_urlgroup_obj(group_data, group_object_data, override_object=False):
953if all([group_data, group_object_data]):
954group_data_for_json = {}
955group_data_for_json.update({"objects": group_object_data})
956group_data_for_json.update({"type": "UrlGroup"})
957group_data_for_json.update({"name": group_data['name']})
958return group_data_for_json
959
960def remove_space_from_name(group_name):
961if group_name:
962group_name = group_name.strip()
963if match_space(group_name):
964print(f'SPACE is present in group name {group_name}')
965# remove_space = do_ask_user_input('Remove SPACE from the group name [y/N] ') == 'y'
966# if remove_space:
967# while (match_space(group_name)):
968# group_name = remove_space_obj(group_name)
969while (match_space(group_name)):
970group_name = remove_space_obj(group_name)
971return group_name
972
973
974def get_objects_data_for_group(object_data):
975tmp_dict = dict()
976if object_data:
977tmp_dict.update({'id': object_data.get('id'), 'type': object_data.get('type'), 'name': object_data.get('name') })
978return tmp_dict
979
980
981def objects_non_override_to_group(domain_name, ws):
982"""
983read xlsx, create groups of objects
984
985:param domain_name: domain name for each domain in FMC
986:type domain_name: str
987:param ws: Excel sheet name (should be linked with domain name)
988:type ws: openpyxl class object
989"""
990domains_add_groupobj = dict()
991domains_del_groupobj = dict()
992domains_add_override_group = dict()
993
994groupObject = dict()
995try:
996uuid = get_domain_uuid(domain_name)['uuid']
997except TypeError as error:
998logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
999errors_filename = 'outputs/errors.txt'
1000with open(errors_filename, "a") as f:
1001f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
1002
1003''' get size of xlsx sheet objects'''
1004max_row = ws.max_row
1005max_column = ws.max_column
1006
1007''' check for empty lines in Excel '''
1008max_value = copy.deepcopy(max_row)
1009for i in reversed(range(1, max_value+1)):
1010some_data = ws.cell(row=i, column=1).value
1011if some_data:
1012max_row = i
1013break
1014elif not some_data:
1015max_row -= 1
1016
1017
1018add_group_payload = list()
1019add_modify_group_payload = list()
1020del_group_payload = list()
1021
1022
1023"""
1024Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
1025"""
1026df = pd.DataFrame(ws.values)
1027new_header = df.iloc[0]
1028df = df[1:]
1029df = df.rename(columns=new_header)
1030
1031""" temp list to include all data exported out of the DataFrame """
1032list_sorted_groups_False_Add = list()
1033
1034df_groups_sort = df.groupby(['object_group_name','action'])
1035
1036for group, group_data1 in df_groups_sort:
1037if (group[1] == 'add'):
1038temp_group = df_groups_sort.get_group(group)
1039temp = temp_group.to_dict('records')
1040list_sorted_groups_False_Add.append(temp)
1041
1042for groups_desc in list_sorted_groups_False_Add:
1043
1044check_group_name = groups_desc[0].get('object_group_name')
1045check_object_type = groups_desc[0].get('type')
1046group_data = dict()
1047group_object_data = list()
1048
1049if not check_if_object_already_exist(check_group_name, domain_name):
1050
1051for obj in groups_desc:
1052group_name = obj.get('object_group_name')
1053group_name = remove_space_from_name(group_name)
1054obj_name = obj.get('object')
1055obj_name = remove_space_from_name(obj_name)
1056
1057if not check_if_object_already_exist(obj_name, domain_name):
1058logging.info(
1059f'Error: object {obj_name} for group {check_group_name} do NOT exist!')
1060errors_filename = 'outputs/errors.txt'
1061with open(errors_filename, "a") as f:
1062f.write(
1063f'Object {obj_name} do not exist\n')
1064continue
1065
1066'''So, new line has new group. Thus we need:
10671) Record objects and data for the previous group
10682) Don't drop new group data and new object and record them to group_pre_data'''
1069
1070try:
1071object_name, object_data = get_object_data(obj_name, domain_name)
1072object_data_for_group = get_objects_data_for_group(object_data)
1073group_object_data.append(object_data_for_group)
1074except KeyError as error:
1075logging.info(f'Object {obj_name} do not exist')
1076errors_filename = 'outputs/errors.txt'
1077with open(errors_filename, "a") as f:
1078f.write(f'Object {obj_name} do not exist\n Error: {error}\n')
1079
1080group_data.update({'name': group_name})
1081
1082group_data.update({'domain_name': domain_name})
1083group_data.update({'objects': group_object_data})
1084
1085if group_object_data:
1086'''Create json-like structure (using dict) for API request'''
1087group_objects_json = create_json_group_obj(
1088group_data,
1089group_object_data)
1090
1091
1092'''add to common list which would be run by API request later'''
1093add_group_payload.append(
1094{group_name: group_objects_json})
1095
1096
1097elif check_if_object_already_exist(check_group_name, domain_name):
1098
1099for obj in groups_desc:
1100group_name = obj.get('object_group_name')
1101group_name = remove_space_from_name(group_name)
1102obj_name = obj.get('object')
1103obj_name = remove_space_from_name(obj_name)
1104
1105if not check_if_object_already_exist(obj_name, domain_name):
1106f'Error: object {obj_name} for group {check_group_name} do NOT exist!'
1107errors_filename = 'outputs/errors.txt'
1108with open(errors_filename, "a") as f:
1109f.write(
1110f'Object {obj_name} DO not exist\n')
1111continue
1112
1113'''So, new line has new group. Thus we need:
11141) Record objects and data for the previous group
11152) Don't drop new group data and new object and record them to group_pre_data'''
1116
1117try:
1118object_name, object_data = get_object_data(obj_name, domain_name)
1119object_data_for_group = get_objects_data_for_group(object_data)
1120group_object_data.append(object_data_for_group)
1121except KeyError as error:
1122logging.info(f'Object {obj_name} do not exist')
1123errors_filename = 'outputs/errors.txt'
1124with open(errors_filename, "a") as f:
1125f.write(f'Object {obj_name} do not exist\n Error: {error}\n')
1126
1127group_data.update({'name': group_name})
1128
1129group_data.update({'domain_name': domain_name})
1130group_data.update({'objects': group_object_data})
1131
1132if group_object_data:
1133'''Create json-like structure (using dict) for API request'''
1134group_objects_json = create_json_put_obj(
1135group_data,
1136group_object_data)
1137
1138'''add to common list which would be run by API request later'''
1139add_modify_group_payload.append(
1140{group_name: group_objects_json})
1141
1142if add_group_payload:
1143'''Merge same group data from multiple dictionaries from add_group_payload list into one dictionary with groups values list '''
1144all_groups_dict = dict()
1145for group_data in add_group_payload:
1146lst1 = list()
1147for group_name, group_value in group_data.items():
1148if group_name in all_groups_dict:
1149for element in all_groups_dict[group_name]:
1150lst1.append(element)
1151lst1.append(group_data[group_name])
1152all_groups_dict.update({group_name: lst1})
1153else:
1154lst1.append(group_data[group_name])
1155all_groups_dict.update({group_name: lst1})
1156''' Merge same group data into one dictionary for API request '''
1157all_in_one_dict = dict()
1158for group, group_lists in all_groups_dict.items():
1159group_objects = list()
1160group_data_dict = dict()
1161for item in group_lists:
1162group_objects += item.get('objects')
1163for key, value in item.items():
1164group_data_dict.update({key: value})
1165group_data_dict.update({'objects': group_objects})
1166all_in_one_dict.update({group: group_data_dict})
1167
1168payload_list = []
1169for group, group_data in all_in_one_dict.items():
1170payload_list.append(group_data)
1171
1172domains_add_groupobj.update(
1173{domain_name: {'groupObject': payload_list}})
1174if del_group_payload:
1175domains_del_groupobj.update(
1176{domain_name: {'groupObject': del_group_payload}})
1177
1178if add_modify_group_payload:
1179''' now to PUT our list of group objects '''
1180for item in add_modify_group_payload:
1181for group, group_data in item.items():
1182try:
1183if group_data:
1184put_networkgroups(group_data, domain_name)
1185except KeyError:
1186logging.info(
1187f'domain = {domain_name} do not has Group objects to add')
1188if domains_add_groupobj:
1189''' now to POST our list of group objects '''
1190for domain, domain_data in domains_add_groupobj.items():
1191try:
1192if domain_data:
1193post_groups_objects(
1194domain_data['groupObject'],
1195get_domain_uuid(domain)['uuid'])
1196except KeyError:
1197logging.info(
1198f'domain = {domain} do not has Group objects to add')
1199
1200if any([domains_add_groupobj, domains_add_override_group]):
1201update_all_objects(domain_name)
1202update_all_networkgroups(domain_name)
1203
1204
1205def url_objects_groups(domain_name, ws):
1206"""
1207read xlsx, create groups of objects
1208
1209:param domain_name: domain name for each domain in FMC
1210:type domain_name: str
1211:param ws: Excel sheet name (should be linked with domain name)
1212:type ws: openpyxl class object
1213"""
1214domains_add_groupobj = dict()
1215domains_del_groupobj = dict()
1216
1217try:
1218uuid = get_domain_uuid(domain_name)['uuid']
1219except TypeError as error:
1220logging.info(
1221f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
1222errors_filename = 'outputs/errors.txt'
1223with open(errors_filename, "a") as f:
1224f.write(
1225f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
1226
1227''' get size of xlsx sheet objects'''
1228max_row = ws.max_row
1229max_column = ws.max_column
1230
1231''' check for empty lines in Excel '''
1232max_value = copy.deepcopy(max_row)
1233for i in reversed(range(1, max_value+1)):
1234some_data = ws.cell(row=i, column=1).value
1235if some_data:
1236max_row = i
1237break
1238elif not some_data:
1239max_row -= 1
1240
1241add_group_payload = list()
1242add_modify_group_payload = list()
1243del_group_payload = list()
1244
1245"""
1246Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
1247"""
1248df = pd.DataFrame(ws.values)
1249new_header = df.iloc[0]
1250df = df[1:]
1251df = df.rename(columns=new_header)
1252
1253""" temp list to include all data exported out of the DataFrame """
1254list_sorted_groups_False_Add = list()
1255
1256df_groups_sort = df.groupby(['url_group_name', 'action'])
1257
1258for group, group_data1 in df_groups_sort:
1259if (group[1] == 'add'):
1260temp_group = df_groups_sort.get_group(group)
1261temp = temp_group.to_dict('records')
1262list_sorted_groups_False_Add.append(temp)
1263
1264for groups_desc in list_sorted_groups_False_Add:
1265
1266check_group_name = groups_desc[0].get('url_group_name')
1267group_data = dict()
1268group_object_data = list()
1269
1270if not check_if_object_already_exist(check_group_name, domain_name):
1271
1272for obj in groups_desc:
1273group_name = obj.get('url_group_name')
1274group_name = remove_space_from_name(group_name)
1275obj_name = obj.get('url')
1276obj_name = remove_space_from_name(obj_name)
1277
1278if not check_if_object_already_exist(obj_name, domain_name):
1279logging.info(f'Error: url {obj_name} do NOT exist!')
1280errors_filename = 'outputs/errors.txt'
1281with open(errors_filename, "a") as f:
1282f.write(
1283f'Object {obj_name} do not exist\n')
1284continue
1285
1286'''So, new line has new group. Thus we need:
12871) Record objects and data for the previous group
12882) Don't drop new group data and new object and record them to group_pre_data'''
1289
1290try:
1291object_name, object_data = get_object_data(
1292obj_name, domain_name)
1293object_data_for_group = get_objects_data_for_group(
1294object_data)
1295group_object_data.append(object_data_for_group)
1296except KeyError as error:
1297logging.info(f'url {obj_name} do not exist')
1298errors_filename = 'outputs/errors.txt'
1299with open(errors_filename, "a") as f:
1300f.write(
1301f'Object {obj_name} do not exist\n Error: {error}\n')
1302
1303group_data.update({'name': group_name})
1304
1305group_data.update({'domain_name': domain_name})
1306group_data.update({'objects': group_object_data})
1307
1308if group_object_data:
1309'''Create json-like structure (using dict) for API request'''
1310group_objects_json = create_json_urlgroup_obj(
1311group_data,
1312group_object_data)
1313
1314'''add to common list which would be run by API request later'''
1315add_group_payload.append(
1316{group_name: group_objects_json})
1317
1318elif check_if_object_already_exist(check_group_name, domain_name):
1319
1320for obj in groups_desc:
1321group_name = obj.get('url_group_name')
1322group_name = remove_space_from_name(group_name)
1323obj_name = obj.get('url')
1324obj_name = remove_space_from_name(obj_name)
1325
1326if not check_if_object_already_exist(obj_name, domain_name):
1327f'Error: object {obj_name} for group {check_group_name} do NOT exist!'
1328errors_filename = 'outputs/errors.txt'
1329with open(errors_filename, "a") as f:
1330f.write(
1331f'Object {obj_name} DO not exist\n')
1332continue
1333
1334'''So, new line has new group. Thus we need:
13351) Record objects and data for the previous group
13362) Don't drop new group data and new object and record them to group_pre_data'''
1337
1338try:
1339object_name, object_data = get_object_data(
1340obj_name, domain_name)
1341object_data_for_group = get_objects_data_for_group(
1342object_data)
1343group_object_data.append(object_data_for_group)
1344except KeyError as error:
1345logging.info(f'Object {obj_name} do not exist')
1346errors_filename = 'outputs/errors.txt'
1347with open(errors_filename, "a") as f:
1348f.write(
1349f'Object {obj_name} do not exist\n Error: {error}\n')
1350
1351group_data.update({'name': group_name})
1352
1353group_data.update({'domain_name': domain_name})
1354group_data.update({'objects': group_object_data})
1355
1356if group_object_data:
1357'''Create json-like structure (using dict) for API request'''
1358group_objects_json = create_json_url_put_obj(
1359group_data,
1360group_object_data)
1361
1362'''add to common list which would be run by API request later'''
1363add_modify_group_payload.append(
1364{group_name: group_objects_json})
1365
1366if add_group_payload:
1367'''Merge same group data from multiple dictionaries from add_group_payload list into one dictionary with groups values list '''
1368all_groups_dict = dict()
1369for group_data in add_group_payload:
1370lst1 = list()
1371for group_name, group_value in group_data.items():
1372if group_name in all_groups_dict:
1373for element in all_groups_dict[group_name]:
1374lst1.append(element)
1375lst1.append(group_data[group_name])
1376all_groups_dict.update({group_name: lst1})
1377else:
1378lst1.append(group_data[group_name])
1379all_groups_dict.update({group_name: lst1})
1380''' Merge same group data into one dictionary for API request '''
1381all_in_one_dict = dict()
1382for group, group_lists in all_groups_dict.items():
1383group_objects = list()
1384group_data_dict = dict()
1385for item in group_lists:
1386group_objects += item.get('objects')
1387for key, value in item.items():
1388group_data_dict.update({key: value})
1389group_data_dict.update({'objects': group_objects})
1390all_in_one_dict.update({group: group_data_dict})
1391
1392payload_list = []
1393for group, group_data in all_in_one_dict.items():
1394payload_list.append(group_data)
1395
1396domains_add_groupobj.update(
1397{domain_name: {'groupObject': payload_list}})
1398if del_group_payload:
1399domains_del_groupobj.update(
1400{domain_name: {'groupObject': del_group_payload}})
1401
1402if add_modify_group_payload:
1403''' now to PUT our list of group objects '''
1404for item in add_modify_group_payload:
1405for group, group_data in item.items():
1406try:
1407if group_data:
1408put_urlgroups(group_data, domain_name)
1409except KeyError:
1410logging.info(
1411f'domain = {domain_name} do not has UrlGroup objects to add')
1412if domains_add_groupobj:
1413''' now to POST our list of group objects '''
1414for domain, domain_data in domains_add_groupobj.items():
1415try:
1416if domain_data:
1417post_urlgroups_objects(
1418domain_data['groupObject'],
1419get_domain_uuid(domain)['uuid'])
1420except KeyError:
1421logging.info(
1422f'domain = {domain} do not has UrlGroup objects to add')
1423if any([domains_add_groupobj, add_modify_group_payload, add_group_payload]):
1424update_all_objects(domain_name)
1425
1426
1427def check_xlsx_sheet_empty_lines(ws, max_row):
1428''' check for empty lines in Excel '''
1429max_value = copy.deepcopy(max_row)
1430for i in reversed(range(1, max_value+1)):
1431some_data = ws.cell(row=i, column=1).value
1432if some_data:
1433max_row = i
1434break
1435elif not some_data:
1436max_row -= 1
1437return max_row
1438
1439
1440def update_all_objects(domain_name):
1441"""
1442update_all_objects update all_jbjects with uuid, name, description, links to self
1443
1444:param domain_name: domain_name (function shoudl be run for each domain)
1445:type domain_name: str
1446:return: return all updated objects, return data shoudl be updated to all_objects global dictionary
1447:rtype: dict()
1448"""
1449# domain_obj_domain, domain_ids = get_all_objects_for_domain(domain_name)
1450domain_obj_domain, domain_ids = get_all_objects_for_domain_no_check_ids(domain_name)
1451merged_all_obj_domain = dict()
1452for domain_name, objects_data in cfg.all_obj_domain.items():
1453tmp_hosts = dict()
1454tmp_ranges = dict()
1455tmp_networks = dict()
1456tmp_urls = dict()
1457tmp_networkgroups = dict()
1458tmp_urlgroups = dict()
1459
1460if domain_obj_domain.get(domain_name):
1461if domain_obj_domain.get(domain_name).get('hosts'):
1462tmp_hosts = {**objects_data.get('hosts'), **domain_obj_domain.get(domain_name).get('hosts')}
1463else:
1464tmp_hosts = objects_data.get('hosts')
1465else:
1466tmp_hosts = objects_data.get('hosts')
1467if domain_obj_domain.get(domain_name):
1468if domain_obj_domain.get(domain_name).get('ranges'):
1469tmp_ranges = {**objects_data.get('ranges'), **domain_obj_domain.get(domain_name).get('ranges')}
1470else:
1471tmp_ranges = objects_data.get('ranges')
1472else:
1473tmp_ranges = objects_data.get('ranges')
1474if domain_obj_domain.get(domain_name):
1475if domain_obj_domain.get(domain_name).get('networks'):
1476tmp_networks = {**objects_data.get('networks'), **domain_obj_domain.get(domain_name).get('networks')}
1477else:
1478tmp_networks = objects_data.get('networks')
1479else:
1480tmp_networks = objects_data.get('networks')
1481if domain_obj_domain.get(domain_name):
1482if domain_obj_domain.get(domain_name).get('urls'):
1483tmp_urls = {**objects_data.get('urls'), **domain_obj_domain.get(domain_name).get('urls')}
1484else:
1485tmp_urls = objects_data.get('urls')
1486else:
1487tmp_urls = objects_data.get('urls')
1488if domain_obj_domain.get(domain_name):
1489if domain_obj_domain.get(domain_name).get('networkgroups'):
1490tmp_networkgroups = {**objects_data.get('networkgroups'), **domain_obj_domain.get(domain_name).get('networkgroups')}
1491else:
1492tmp_networkgroups = objects_data.get('networkgroups')
1493else:
1494tmp_networkgroups = objects_data.get('networkgroups')
1495if domain_obj_domain.get(domain_name):
1496if domain_obj_domain.get(domain_name).get('urlgroups'):
1497tmp_urlgroups = {**objects_data.get('urlgroups'), **domain_obj_domain.get(domain_name).get('urlgroups')}
1498else:
1499tmp_urlgroups = objects_data.get('urlgroups')
1500else:
1501tmp_urlgroups = objects_data.get('urlgroups')
1502merged_all_obj_domain.update({domain_name: {'hosts': tmp_hosts, 'ranges': tmp_ranges, 'networks': tmp_networks, 'urls': tmp_urls, 'networkgroups': tmp_networkgroups, 'urlgroups': tmp_urlgroups}})
1503
1504merged_all_ids_domain = dict()
1505if domain_ids:
1506for domain_name, object_id_dicts in cfg.all_ids_domain.items():
1507tmp_dict = dict()
1508if all([object_id_dicts, domain_ids.get(domain_name)]):
1509tmp_dict = { **object_id_dicts, **domain_ids.get(domain_name)}
1510elif all([object_id_dicts, not domain_ids.get(domain_name)]):
1511tmp_dict = object_id_dicts
1512elif all([not object_id_dicts, domain_ids.get(domain_name)]):
1513tmp_dict = domain_ids.get(domain_name)
1514merged_all_ids_domain.update({domain_name: tmp_dict})
1515cfg.all_obj_domain = merged_all_obj_domain
1516cfg.all_ids_domain = merged_all_ids_domain
1517
1518
1519def update_all_networkgroups(domain_name):
1520"""
1521update_all_networkgroups update all_networkgroups
1522
1523:param domain_name: domain_name (function shoudl be run for each domain)
1524:type domain_name: str
1525:return: return all updated objects, return data shoudl be updated to all_objects global dictionary
1526:rtype: dict()
1527"""
1528domain_detailed_networkgroups = get_all_detailed_groups_for_domain(domain_name)
1529
1530merged_detailed_networkgroups_domain = dict()
1531for domain_name, objects_data in cfg.all_detailed_networkgroups.items():
1532tmp_networkgroups = dict()
1533if domain_detailed_networkgroups.get(domain_name):
1534tmp_networkgroups = {**domain_detailed_networkgroups.get(domain_name), **objects_data}
1535else:
1536tmp_networkgroups = cfg.all_detailed_networkgroups.get(domain_name)
1537merged_detailed_networkgroups_domain.update({domain_name: tmp_networkgroups})
1538
1539cfg.all_detailed_networkgroups = merged_detailed_networkgroups_domain
1540
1541
1542def sort_sheets_by_creation(all_sheets):
1543"""
1544Create 4 lists to sort xlsx data by order:
1545first: create objects
1546second: create groups
1547third: assing objects into groups
1548"""
1549sheet_process_order = []
1550group_list = []
1551objects_list = []
1552objects_into_groups_list = []
1553for sheet in all_sheets:
1554names = sheet.strip().split('.')
1555if 'groups' in names:
1556objects_into_groups_list.append(sheet)
1557else:
1558objects_list.append(sheet)
1559sheet_process_order = objects_list + group_list + objects_into_groups_list
1560return sheet_process_order
1561
1562
1563if __name__ == "__main__":
1564cfg.init()
1565output_dir = 'outputs'
1566try:
1567os.makedirs(output_dir)
1568except OSError as e:
1569pass
1570errors_filename = 'outputs/errors.txt'
1571with open(errors_filename, "w") as f:
1572f.write('')
1573
1574cfg.all_domains_json = get_all_domains_data()
1575cfg.all_obj_domain, cfg.all_ids_domain = get_all_objects_with_domains()
1576cfg.all_devices = get_all_devices()
1577cfg.all_detailed_networkgroups = get_all_groups_info()
1578
1579if cfg.check_if_file_exist(cfg.input_xlsx):
1580''' read and parse data out of the XLSX Commutation map '''
1581wb = Workbook()
1582wb = load_workbook(cfg.input_xlsx, read_only=False)
1583all_sheets = wb.sheetnames
1584
1585sorted_sheets = sort_sheets_by_creation(all_sheets)
1586cfg.sorted_sheets = sorted_sheets
1587
1588# diff_before_filename = f'{cfg.diff_before_filename}-{datetime.now().strftime("%Y-%m-%d-%H-%M-%S")}.xlsx'
1589# create_diff_excel_file(diff_before_filename)
1590
1591for sheet in sorted_sheets:
1592ws = wb[sheet]
1593names = sheet.strip().split('.')
1594if not 'groups' in names and not 'urlgrps' in names:
1595domain_name = sheet.replace('.','/')
1596object_change_function(domain_name, ws)
1597elif 'groups' in names:
1598if names[-1] == 'groups':
1599names.remove(names[-1])
1600domain_name = '/'.join(names)
1601del_group_from_parent_group(domain_name, ws)
1602change_hosts_nets_in_parent_group(domain_name, ws)
1603objects_non_override_to_group(domain_name, ws)
1604elif 'urlgrps' in names:
1605if names[-1] == 'urlgrps':
1606names.remove(names[-1])
1607domain_name = '/'.join(names)
1608change_urls_in_group(domain_name, ws)
1609url_objects_groups(domain_name, ws)
1610for sheet in sorted_sheets:
1611ws = wb[sheet]
1612names = sheet.strip().split('.')
1613if not 'groups' in names and not 'urlgrps' in names:
1614domain_name = sheet.replace('.', '/')
1615del_hosts_nets_for_obj_sheet(domain_name, ws)
1616wb.close()
1617
1618create_xlsx_and_sheets(cfg.output_xlsx)
1619write_hosts_network_objects_to_xlsx(cfg.output_xlsx)
1620write_group_objects_to_xlsx(cfg.output_xlsx)
1621write_urlgrps_to_xlsx(cfg.output_xlsx)
1622# diff_after_filename = f'{cfg.diff_after_filename}-{datetime.now().strftime("%Y-%m-%d-%H-%M-%S")}.xlsx'
1623# create_diff_excel_file(diff_after_filename)
1624logging.info(f'\n {datetime.now().strftime("%Y-%m-%d %H:%M:%S")} Execution has been completed with no major exceptions. Done.\n')
1625else:
1626create_xlsx_and_sheets(cfg.input_xlsx)
1627write_hosts_network_objects_to_xlsx(cfg.input_xlsx)
1628write_group_objects_to_xlsx(cfg.input_xlsx)
1629write_urlgrps_to_xlsx(cfg.input_xlsx)