cisco_fmc_api_via_excel_operations

Форк
0
1629 строк · 71.3 Кб
1
#!/usr/bin/python3
2
'''
3
Add/Delete/Modify netobjects, rangeobjects, hostobjects to different domains
4
Add/Delete/Modify groups with objects
5
'''
6
from get_func import get_all_domains_data, get_all_objects_with_domains, get_all_objects_for_domain, get_all_groups_info, get_all_devices, get_domain_uuid, get_object_data, check_if_object_already_exist, get_all_detailed_groups_for_domain, get_all_objects_for_domain_no_check_ids
7
from put_func import post_network_objects, post_range_objects, post_host_objects, post_groups_objects, del_groups, put_networkgroups, del_objects, put_object, post_url_objects, put_urlgroups, post_urlgroups_objects, del_urlgroups
8
from fmc_create_xlsx_obj_groups import create_xlsx_and_sheets, write_group_objects_to_xlsx, write_hosts_network_objects_to_xlsx, write_urlgrps_to_xlsx
9
from fmc_excel_diff_input_output import create_diff_excel_file
10
import pandas as pd
11
import json
12
import cfg
13
from openpyxl import Workbook
14
from openpyxl import load_workbook
15
import logging
16
import re
17
import copy
18
import os
19
from datetime import datetime
20

21

22

23
logging.basicConfig(format='%(threadName)s %(name)s %(levelname)s: %(message)s',level=logging.INFO)
24

25

26
def check_parent_group_non_override(obj, domain_name):
27
    all_detailed_networkgroups = cfg.all_detailed_networkgroups
28
    parent_groups_dict = dict()
29
    for networkgroup, networkgroup_data in all_detailed_networkgroups[domain_name].items():
30
        if not networkgroup_data['overridable']:
31
            try:
32
                for index, object in enumerate(networkgroup_data['objects']):
33
                    if object['name'] == obj:
34
                        parent_groups_dict.update({networkgroup: index})                       
35
            except KeyError as error:
36
                logging.info(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} networkgroup_data objects of {networkgroup_data} do not exist')
37
                errors_filename = 'outputs/errors.txt'
38
                with open(errors_filename, "a") as f:
39
                    f.write(f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} networkgroup_data objects of {networkgroup_data} do not exist\n Error: {error}\n')
40
    # logging.info(f'networkgroups = {parent_groups_dict}')
41
    return parent_groups_dict
42

43

44
def check_parent_urlgroup(obj, domain_name):
45
    parent_groups_dict = dict()
46
    for urlgroup, urlgroup_data in cfg.all_obj_domain.get(domain_name).get('urlgroups').items():
47
        if not urlgroup_data['overridable']:
48
            try:
49
                for index, object in enumerate(urlgroup_data['objects']):
50
                    if object['name'] == obj:
51
                        parent_groups_dict.update({urlgroup: index})
52
            except KeyError as error:
53
                logging.info(
54
                    f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} urlgroup_data objects of {urlgroup_data} do not exist')
55
                errors_filename = 'outputs/errors.txt'
56
                with open(errors_filename, "a") as f:
57
                    f.write(
58
                        f'{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} urlgroup_data objects of {urlgroup_data} do not exist\n Error: {error}\n')
59
    # logging.info(f'urlgroups = {parent_groups_dict}')
60
    return parent_groups_dict
61

62

63
def create_del_json_non_override(parent_group_name, obj_del_index, obj, domain_name):
64
    all_detailed_networkgroups = cfg.all_detailed_networkgroups
65
    
66
    stripped_json_for_put = dict()
67
    old_parent_group_json = dict()
68
    new_parent_group_json = dict()
69
    old_parent_group_json = all_detailed_networkgroups[domain_name][parent_group_name]
70
    del old_parent_group_json['objects'][obj_del_index]
71
    new_parent_group_json = old_parent_group_json
72
    
73
    stripped_json_for_put.update({'id': new_parent_group_json['id']})
74
    stripped_json_for_put.update({'overridable': new_parent_group_json['overridable']})
75
    stripped_json_for_put.update({'description': new_parent_group_json['description']})
76
    stripped_json_for_put.update({'objects': new_parent_group_json['objects']})
77
    stripped_json_for_put.update({'name': new_parent_group_json['name']})
78
    stripped_json_for_put.update({'type': new_parent_group_json['type']})
79

80
    return stripped_json_for_put
81

82

83
def create_del_json_urlgrp_non_override(parent_group_name, obj_del_index, obj, domain_name):
84
        
85
    stripped_json_for_put = dict()
86
    old_parent_group_json = dict()
87
    new_parent_group_json = dict()
88
    old_parent_group_json = cfg.all_obj_domain[domain_name]['urlgroups'][parent_group_name]
89
    del old_parent_group_json['objects'][obj_del_index]
90
    new_parent_group_json = old_parent_group_json
91
    
92
    stripped_json_for_put.update({'id': new_parent_group_json['id']})
93
    stripped_json_for_put.update({'overridable': new_parent_group_json['overridable']})
94
    stripped_json_for_put.update({'description': new_parent_group_json['description']})
95
    stripped_json_for_put.update({'objects': new_parent_group_json['objects']})
96
    stripped_json_for_put.update({'name': new_parent_group_json['name']})
97
    stripped_json_for_put.update({'type': new_parent_group_json['type']})
98

99
    return stripped_json_for_put
100

101

102
def check_if_object_value_changed(object_name, obj_value, domain_name):
103
    all_obj_domain = cfg.all_obj_domain
104
    system_objects = cfg.system_objects
105

106
    if object_name in system_objects:
107
        return False
108

109
    if all_obj_domain:
110
        # object_type = check_object_type(object_name)
111
        for object_type in cfg.object_types:
112
            if all_obj_domain[domain_name].get(object_type).get(object_name):
113
                if object_type == 'urls':
114
                    object_value_in_fmc = all_obj_domain[domain_name][object_type][object_name].get('url')
115
                else:
116
                    object_value_in_fmc = all_obj_domain[domain_name][object_type][object_name].get('value')
117
                if object_value_in_fmc != obj_value:
118
                    return True
119
    
120

121
def del_group_from_parent_group(domain_name, ws):
122
    """
123
    del_group_from_parent_group delete group from parent group.
124
    1st: check whether the group (to be deleted) is within any group, if no - just delete it
125
    2nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
126
    3rd: create json of the parent group without the group (to be deleted)
127
    4th: PUT via API the created json via request to the parent group ID
128
    5th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
129
    
130
    :param domain_name: domain name
131
    :type domain_name: str()
132
    :param ws: sheet which has groups and objects listed
133
    :type ws: openpyxl class object
134
    """
135
    try:
136
        uuid = get_domain_uuid(domain_name)['uuid']
137
    except TypeError:
138
        errors_filename = 'outputs/errors.txt'
139
        with open(errors_filename, "a") as f:
140
            f.write(f'Domain {domain_name} do not exist\n')
141

142
    ''' get size of xlsx sheet objects'''
143
    max_row = ws.max_row
144
    max_column = ws.max_column
145
    
146
    ''' check for empty lines in Excel '''
147
    max_value = copy.deepcopy(max_row)
148
    for i in reversed(range(1, max_value+1)):
149
        some_data = ws.cell(row=i, column=1).value
150
        if some_data:
151
            max_row = i
152
            break
153
        elif not some_data:
154
            max_row -= 1      
155
    
156
    
157
    add_modify_non_override_payload = dict()
158
    add_delete_payload = dict()
159
   
160
    for i in range(2, max_row+1):
161
        group_name = ws.cell(row=i, column=1).value
162
        obj = ws.cell(row=i, column=2).value
163
        action = ws.cell(row=i, column=3).value
164
        
165
        if group_name:
166
            group_name = group_name.strip()
167
        if obj:
168
            obj = obj.strip()
169
        if action:
170
            action = action.strip()
171
        
172
        if action:
173
            del_group_in_non_override_group = [
174
                obj, 
175
                action == 'delete']
176
            
177
            if all(del_group_in_non_override_group):
178
                # if obj.startswith(OBJECT_GROUP_NAME_START):
179
                parent_group = check_parent_group_non_override(
180
                    obj, domain_name)
181
                if parent_group:
182
                    for parent_group_name, obj_del_index in parent_group.items():
183
                        if parent_group_name == group_name:
184
                            new_parent_group_json = create_del_json_non_override(
185
                                parent_group_name, obj_del_index, obj, domain_name)
186
                            if len(new_parent_group_json['objects']) > 0:
187
                                add_modify_non_override_payload.update({new_parent_group_json['name']: new_parent_group_json})
188
                            else:
189
                                add_delete_payload.update({new_parent_group_json['name']: new_parent_group_json})
190

191
    if add_modify_non_override_payload:
192
        for group_name, group_data in add_modify_non_override_payload.items():
193
            if len(group_data['objects']) > 0:
194
                try:
195
                    put_networkgroups(group_data, domain_name)
196
                except KeyError:
197
                    logging.info(
198
                        f'domain = {domain_name} do not has Group objects to modify')
199
    if add_delete_payload:
200
        for group_name, group_data in add_delete_payload.items():
201
            try:
202
                del_groups(group_data, domain_name)
203
            except KeyError:
204
                logging.info(
205
                    f'domain = {domain_name} do not has Group objects to modify')
206
    # logging.info(f"\n{50*'#'}"
207
    #              f'\n{datetime.now().strftime("%Y-%m-%d %H:%M:%S")} Finished checking DEL request to groups in {domain_name}\n'
208
    #              f"{50*'#'}")
209
    if any([add_modify_non_override_payload, add_delete_payload]):
210
        ''' update all_objects cause '''
211
        update_all_objects(domain_name)
212
        update_all_networkgroups(domain_name)
213

214

215
def do_ask_user_input(user_prompt):
216
    """
217
    Default action - No
218
    Return y or n
219
    """
220
    input_y_or_n = 'user input'
221
    while (input_y_or_n != 'y' and input_y_or_n != 'n'):
222
        input_y_or_n = input(user_prompt).lower()
223
        if not input_y_or_n:
224
            # Enter
225
            input_y_or_n = 'n'
226
    return input_y_or_n
227

228

229
def remove_space_obj(obj):
230
    obj_space_match = re.search(
231
        r'(?P<begin>.+?(?=\s+))'
232
        r'\s+'
233
        r'(?P<end>.+)', obj)
234
    if obj_space_match:
235
        obj = f'{obj_space_match.group("begin")}{obj_space_match.group("end")}'
236
    return obj
237

238

239
def get_group_data_from_detailed_networgroups(group_names, domain_name):
240
    all_detailed_networkgroups = cfg.all_detailed_networkgroups
241
    
242
    tmp_dict = dict()
243
    for group in group_names:
244
        group_objects = all_detailed_networkgroups.get(domain_name).get(group).get('objects')
245
        tmp_list = [item.get('name') for item in group_objects]
246
        tmp_dict.setdefault(group, sorted(tmp_list))
247
    return tmp_dict
248

249

250
def compare_group_data_to_avoid_uneeded_puts(ws, domain_name):
251
    """
252
    Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
253
    """
254
    df = pd.DataFrame(ws.values)
255
    new_header = df.iloc[0]
256
    df = df[1:]
257
    df = df.rename(columns=new_header)
258

259
    """ temp list to include all data exported out of the DataFrame """
260
    list_sorted_groups_False_Add = list()
261
    dict_sorted_groups_for_comparison = dict()
262
    df_groups_sort = df.groupby(['object_group_name', 'action'])
263

264
    for group, group_data1 in df_groups_sort:
265
        if (group[1] == 'add'):
266
            temp_group = df_groups_sort.get_group(group)
267
            temp = temp_group.to_dict('records')
268
            list_sorted_groups_False_Add.append(temp)
269
            tmp_list = [item.get('object') for item in temp]
270
            dict_sorted_groups_for_comparison.update({group[0]: sorted(tmp_list)})
271
    
272
    group_names = list(dict_sorted_groups_for_comparison)
273
    
274
    dict_group_data_from_detailed_networgroups = get_group_data_from_detailed_networgroups(group_names, domain_name)
275
    different_groups = list()
276
    for group, group_data in dict_sorted_groups_for_comparison.items():
277
        if group_data == dict_group_data_from_detailed_networgroups.get(group):
278
            continue
279
        else:
280
            different_groups.append(group)
281
    return different_groups    
282
        
283

284
def change_hosts_nets_in_parent_group(domain_name, ws):
285
    """
286
    del_hosts_nets_from_parent_group delete group from parent group.
287
    1st: check whether the group (to be deleted) is within any group, if no - just delete it
288
    2nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
289
    3rd: create json of the parent group without the group (to be deleted)
290
    4th: PUT via API the created json via request to the parent group ID
291
    5th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
292
    
293
    :param domain_name: domain name
294
    :type domain_name: str()
295
    :param ws: sheet which has groups and objects listed
296
    :type ws: openpyxl class object
297
    """
298
    try:
299
        uuid = get_domain_uuid(domain_name)['uuid']
300
    except TypeError as error:
301
        logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
302
        errors_filename = 'outputs/errors.txt'
303
        with open(errors_filename, "a") as f:
304
            f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
305

306
    ''' get size of xlsx sheet objects'''
307
    max_row = ws.max_row
308
    max_column = ws.max_column
309

310
    ''' check for empty lines in Excel '''
311
    max_value = copy.deepcopy(max_row)
312
    for i in reversed(range(1, max_value+1)):
313
        some_data = ws.cell(row=i, column=1).value
314
        if some_data:
315
            max_row = i
316
            break
317
        elif not some_data:
318
            max_row -= 1
319

320
    add_modify_non_override_payload = dict()
321
    add_delete_payload = dict()
322

323
    for i in range(2, max_row+1):
324
        group_name = ws.cell(row=i, column=1).value
325
        obj = ws.cell(row=i, column=2).value
326
        action = ws.cell(row=i, column=3).value
327

328
        if group_name:
329
            group_name = group_name.strip()
330
        if obj:
331
            obj = obj.strip()
332
        if action:
333
            action = action.strip()
334
        if action:
335
            del_obj_in_non_override_group = [
336
                obj,
337
                action == 'delete']
338
            if all(del_obj_in_non_override_group):
339
                # if any([obj.startswith(OBJECT_HOST_NAME_START), obj.startswith(OBJECT_SUBNET_NAME_START), obj.startswith(OBJECT_RANGE_NAME_START)]):
340
                parent_group = check_parent_group_non_override(
341
                    obj, domain_name)
342
                if parent_group:
343
                    for parent_group_name, obj_del_index in parent_group.items():
344
                        if parent_group_name == group_name:
345
                            new_parent_group_json = create_del_json_non_override(
346
                                parent_group_name, obj_del_index, obj, domain_name)
347
                            if len(new_parent_group_json['objects']) > 0:
348
                                add_modify_non_override_payload.update(
349
                                    {new_parent_group_json['name']: new_parent_group_json})
350
                            else:
351
                                add_delete_payload.update(
352
                                    {new_parent_group_json['name']: new_parent_group_json})
353
    if add_modify_non_override_payload:
354
        for group_name, group_data in add_modify_non_override_payload.items():
355
            if len(group_data['objects']) > 0:
356
                try:
357
                    put_networkgroups(group_data, domain_name)
358
                except KeyError:
359
                    logging.info(
360
                        f'domain = {domain_name} do not has Group objects to modify')
361
    if add_delete_payload:
362
        for group_name, group_data in add_delete_payload.items():
363
            try:
364
                del_groups(group_data, domain_name)
365
            except KeyError:
366
                logging.info(
367
                    f'domain = {domain_name} do not has Group objects to modify')
368
    if any([add_modify_non_override_payload, add_delete_payload]):
369
        ''' update all_objects cause new hosts have been just added '''
370
        update_all_objects(domain_name)
371
        update_all_networkgroups(domain_name)
372

373

374
def change_urls_in_group(domain_name, ws):
375
    """
376
    change_urls_in_group delete group from parent group.
377
    1st: check whether the group (to be deleted) is within any group, if no - just delete it
378
    2nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
379
    3rd: create json of the parent group without the group (to be deleted)
380
    4th: PUT via API the created json via request to the parent group ID
381
    5th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
382
    
383
    :param domain_name: domain name
384
    :type domain_name: str()
385
    :param ws: sheet which has groups and objects listed
386
    :type ws: openpyxl class object
387
    """
388
    try:
389
        uuid = get_domain_uuid(domain_name)['uuid']
390
    except TypeError as error:
391
        logging.info(
392
            f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
393
        errors_filename = 'outputs/errors.txt'
394
        with open(errors_filename, "a") as f:
395
            f.write(
396
                f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
397

398
    ''' get size of xlsx sheet objects'''
399
    max_row = ws.max_row
400
    max_column = ws.max_column
401

402
    ''' check for empty lines in Excel '''
403
    max_value = copy.deepcopy(max_row)
404
    for i in reversed(range(1, max_value+1)):
405
        some_data = ws.cell(row=i, column=1).value
406
        if some_data:
407
            max_row = i
408
            break
409
        elif not some_data:
410
            max_row -= 1
411

412
    add_modify_non_override_payload = dict()
413
    add_delete_payload = dict()
414

415
    for i in range(2, max_row+1):
416
        group_name = ws.cell(row=i, column=1).value
417
        obj = ws.cell(row=i, column=2).value
418
        action = ws.cell(row=i, column=3).value
419

420
        if group_name:
421
            group_name = group_name.strip()
422
        if obj:
423
            obj = obj.strip()
424
        if action:
425
            action = action.strip()
426
        if action:
427
            del_obj_in_non_override_group = [
428
                obj,
429
                action == 'delete']
430
            if all(del_obj_in_non_override_group):
431
                # if any([obj.startswith(OBJECT_HOST_NAME_START), obj.startswith(OBJECT_SUBNET_NAME_START), obj.startswith(OBJECT_RANGE_NAME_START)]):
432
                parent_group = check_parent_urlgroup(
433
                    obj, domain_name)
434
                if parent_group:
435
                    for parent_group_name, obj_del_index in parent_group.items():
436
                        if parent_group_name == group_name:
437
                            new_parent_group_json = create_del_json_urlgrp_non_override(
438
                                parent_group_name, obj_del_index, obj, domain_name)
439
                            if len(new_parent_group_json['objects']) > 0:
440
                                add_modify_non_override_payload.update(
441
                                    {new_parent_group_json['name']: new_parent_group_json})
442
                            else:
443
                                add_delete_payload.update(
444
                                    {new_parent_group_json['name']: new_parent_group_json})
445
    if add_modify_non_override_payload:
446
        for group_name, group_data in add_modify_non_override_payload.items():
447
            if len(group_data['objects']) > 0:
448
                try:
449
                    put_urlgroups(group_data, domain_name)
450
                except KeyError:
451
                    logging.info(
452
                        f'domain = {domain_name} do not has Group objects to modify')
453
    if add_delete_payload:
454
        for group_name, group_data in add_delete_payload.items():
455
            try:
456
                del_urlgroups(group_data, domain_name)
457
            except KeyError:
458
                logging.info(
459
                    f'domain = {domain_name} do not has Group objects to modify')
460
    if any([add_modify_non_override_payload]):
461
        ''' update all_objects cause new objects were added '''
462
        update_all_objects(domain_name)
463

464
        
465

466
def del_hosts_nets_for_obj_sheet(domain_name, ws):
467
    """
468
    del_hosts_nets_from_parent_group delete group from parent group.
469
    1st: check whether the group (to be deleted) is within any group, if no - just delete it
470
    2nd: if in group, get group elements (method: all_detailed_networkgroups = get_all_groups_info())
471
    3rd: create json of the parent group without the group (to be deleted)
472
    4th: PUT via API the created json via request to the parent group ID
473
    5th: in case the parent group consists from only one element(which is to be deleted), then delete the whole parent group
474
    
475
    :param domain_name: domain name
476
    :type domain_name: str()
477
    :param ws: sheet which has groups and objects listed
478
    :type ws: openpyxl class object
479
    """
480
    
481
    try:
482
        uuid = get_domain_uuid(domain_name)['uuid']
483
    except TypeError as error:
484
        logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
485
        errors_filename = 'outputs/errors.txt'
486
        with open(errors_filename, "a") as f:
487
            f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
488

489
    ''' get size of xlsx sheet objects'''
490
    max_row = ws.max_row
491
    max_column = ws.max_column
492

493
    ''' check for empty lines in Excel '''
494
    max_value = copy.deepcopy(max_row)
495
    for i in reversed(range(1, max_value+1)):
496
        some_data = ws.cell(row=i, column=1).value
497
        if some_data:
498
            max_row = i
499
            break
500
        elif not some_data:
501
            max_row -= 1
502
            
503
    add_modify_non_override_payload = dict()
504
    add_delete_payload = dict()
505
    add_delete_obj_payload = dict()
506

507
    """
508
    Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
509
    """    
510
    df = pd.DataFrame(ws.values)
511
    new_header = df.iloc[0]
512
    df = df[1:]
513
    df = df.rename(columns=new_header)
514
    
515
    """ temp list to include all data exported out of the DataFrame """
516
    list_sorted_groups_Del = list()
517
    
518
    df_groups_sort = df.groupby(['object_name', 'action'])
519
    
520
    for group, group_data1 in df_groups_sort:
521
        if (group[1] == 'delete'):
522
            temp_group = df_groups_sort.get_group(group)
523
            temp = temp_group.to_dict('records')
524
            list_sorted_groups_Del.append(temp)
525
           
526
    for groups_desc in list_sorted_groups_Del:
527
        check_object_name = groups_desc[0].get('object_name')
528
        # if any([check_object_name.startswith(OBJECT_HOST_NAME_START), check_object_name.startswith(OBJECT_SUBNET_NAME_START), check_object_name.startswith(OBJECT_RANGE_NAME_START)]):
529
        parent_group = check_parent_group_non_override(check_object_name, domain_name)
530
        if parent_group:
531
            logging.info(f'Warning! Object {check_object_name} is binded with group {parent_group}. Please remove it from the group membership first!')
532
            errors_filename = 'outputs/errors.txt'
533
            with open(errors_filename, "a") as f:
534
                f.write(f'Warning! Object {check_object_name} is binded with group {parent_group}. Please remove it from the group membership first!')
535
            continue
536
        else:
537
            try:
538
                if check_if_object_already_exist(check_object_name, domain_name):
539
                    object_name, object_data = get_object_data(check_object_name, domain_name)
540
                    add_delete_obj_payload.update({object_name: object_data})
541
            except KeyError as error:
542
                logging.info(f'Object {check_object_name} do not exist')
543
                errors_filename = 'outputs/errors.txt'
544
                with open(errors_filename, "a") as f:
545
                    f.write(f'Object {check_object_name} do not exist\n Error: {error}\n')
546

547
    if add_modify_non_override_payload:
548
        for group_name, group_data in add_modify_non_override_payload.items():
549
            if len(group_data['objects']) > 0:
550
                try:
551
                    put_networkgroups(group_data, domain_name)
552
                except KeyError:
553
                    logging.info(
554
                        f'domain = {domain_name} do not has Group objects to modify')
555

556
    if add_delete_payload:
557
        for group_name, group_data in add_delete_payload.items():
558
            try:
559
                del_groups(group_data, domain_name)
560
            except KeyError:
561
                logging.info(
562
                    f'domain = {domain_name} do not has Group objects to modify')
563
    if add_delete_obj_payload:
564
        for obj_name, obj_data in add_delete_obj_payload.items():
565
            try:
566
                del_objects(obj_data, domain_name)
567
            except KeyError:
568
                logging.info(
569
                    f'domain = {domain_name} do not has Group objects to modify')
570

571
    if any([add_modify_non_override_payload, add_delete_payload]):
572
        ''' update all_objects cause new hosts have been just added '''
573
        update_all_objects(domain_name)
574
        update_all_networkgroups(domain_name)
575

576

577
def match_space(object_name):
578
    space_match = re.search(
579
        r'\s+', object_name
580
    )
581
    if space_match:
582
        return True
583
    else:
584
        return False
585

586

587
def object_change_function(domain_name, ws):
588
    """
589
    object_change_function function to add, delete objects
590

591
    :param domain_name: domain name of the domain to change object
592
    :type domain_name: str
593
    :param ws: Excel sheet where objects are provided
594
    :type ws: openpyxl class
595
    """
596
    all_obj_domain = cfg.all_obj_domain
597
    
598
    domains_add_hostobj = dict()
599
    domains_add_netobj = dict()
600
    domains_add_rangeobj = dict()
601
    domains_add_urlobj = dict()
602
    domains_modify_hostobj = dict()
603
    domains_modify_netobj = dict()
604
    domains_modify_rangeobj = dict()
605
    domains_modify_urlobj = dict()
606
  
607
    max_row = ws.max_row
608
    max_column = ws.max_column
609

610
    ''' check for empty lines in Excel '''
611
    max_value = copy.deepcopy(max_row)
612
    for i in reversed(range(1, max_value+1)):
613
        some_data = ws.cell(row=i, column=1).value
614
        if some_data:
615
            max_row = i
616
            break
617
        elif not some_data:
618
            max_row -= 1
619

620
    add_hostobject_payload = list()
621
    add_netobject_payload = list()
622
    add_rangeobject_payload = list()
623
    add_urlobject_payload = list()
624
    modify_hostobject_payload = list()
625
    modify_netobject_payload = list()
626
    modify_rangeobject_payload = list()
627
    modify_urlobject_payload = list()
628
    
629
    for i in range(2, max_row+1):
630
        object_name = ws.cell(row=i, column=1).value
631
        if object_name:
632
            object_name = object_name.strip()
633
            if match_space(object_name):
634
                print(f'SPACE is present in object_name name {object_name}')
635
                remove_space = do_ask_user_input('Remove SPACE from the object_name [y/N] ') == 'y'
636
                if remove_space:
637
                    while (match_space(object_name)):
638
                        object_name = remove_space_obj(object_name)
639

640
        obj = ws.cell(row=i, column=2).value
641
        object_action = ws.cell(row=i, column=3).value
642
        object_type = ws.cell(row=i, column=4).value
643
        if object_action:
644
            object_action = object_action.lower()
645
        if object_type:
646
            object_type = object_type.lower()
647
        if object_name:
648
            object_name = object_name.strip()
649
        # object_type = check_object_type(object_name)
650
        if obj:
651
            obj = obj.strip()
652
            host_add_non_exist = all(
653
                [
654
                #  check_object_type(object_name) == 'host',
655
                object_type == 'host',
656
                not check_if_object_already_exist(object_name, domain_name),
657
                 object_action == 'add'])            
658
            network_add_non_exist = all(
659
                [
660
                # check_object_type(object_name) == 'network',
661
                object_type == 'network',
662
                not check_if_object_already_exist(object_name, domain_name),
663
                 object_action == 'add'])
664
            range_add_non_exist = all(
665
                [
666
                # check_object_type(object_name) == 'range',
667
                object_type == 'range',
668
                not check_if_object_already_exist(object_name, domain_name),
669
                 object_action == 'add'])
670
            url_add_non_exist = all(
671
                [
672
                # check_object_type(object_name) == 'range',
673
                object_type == 'url',
674
                not check_if_object_already_exist(object_name, domain_name),
675
                 object_action == 'add'])
676
            host_add_exist = all(
677
                [
678
                #  check_object_type(object_name) == 'host',
679
                object_type == 'host',
680
                check_if_object_already_exist(object_name, domain_name),
681
                 object_action == 'modify'])            
682
            network_add_exist = all(
683
                [
684
                    # check_object_type(object_name) == 'network',
685
                 object_type == 'network',
686
                 check_if_object_already_exist(object_name, domain_name),
687
                 object_action == 'modify'])
688
            range_add_exist = all(
689
                [
690
                # check_object_type(object_name) == 'range',
691
                object_type == 'range',
692
                check_if_object_already_exist(object_name, domain_name),
693
                object_action == 'modify'])
694
            url_add_exist = all(
695
                [
696
                # check_object_type(object_name) == 'range',
697
                object_type == 'url',
698
                check_if_object_already_exist(object_name, domain_name),
699
                object_action == 'modify'])
700

701
            if host_add_non_exist:
702
                hostObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
703
                add_hostobject_payload.append(json.loads(hostObject))
704
                
705
            elif network_add_non_exist:
706
                netObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
707
                add_netobject_payload.append(json.loads(netObject))
708

709
            elif range_add_non_exist:
710
                rangeObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}"}}'
711
                add_rangeobject_payload.append(json.loads(rangeObject))
712

713
            elif url_add_non_exist:
714
                urlObject = f'{{"name": "{object_name}","url": "{obj}","type": "{object_type}"}}'
715
                add_urlobject_payload.append(json.loads(urlObject))
716

717
            elif host_add_exist:
718
                if check_if_object_value_changed(object_name, obj, domain_name):
719
                    hostObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
720
                    modify_hostobject_payload.append(json.loads(hostObject))
721
            
722
            elif network_add_exist:
723
                if check_if_object_value_changed(object_name, obj, domain_name):
724
                    netObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
725
                    modify_netobject_payload.append(json.loads(netObject))
726
            
727
            elif range_add_exist:
728
                if check_if_object_value_changed(object_name, obj, domain_name):
729
                    rangeObject = f'{{"name": "{object_name}","value": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
730
                    modify_rangeobject_payload.append(json.loads(rangeObject))
731
            
732
            elif url_add_exist:
733
                if check_if_object_value_changed(object_name, obj, domain_name):
734
                    urlObject = f'{{"name": "{object_name}","url": "{obj}","type": "{object_type}","id": "{all_obj_domain[domain_name][f"{object_type}s"].get(object_name).get("id")}"}}'
735
                    modify_urlobject_payload.append(json.loads(urlObject))
736

737
    if add_hostobject_payload:
738
        domains_add_hostobj.update({domain_name: {'hostobject': add_hostobject_payload}})
739
    if add_netobject_payload:
740
        domains_add_netobj.update({domain_name: {'netobject': add_netobject_payload}})
741
    if add_rangeobject_payload:
742
        domains_add_rangeobj.update({domain_name: {'rangeobject': add_rangeobject_payload}})
743
    if add_urlobject_payload:
744
        domains_add_urlobj.update({domain_name: {'urlobject': add_urlobject_payload}})        
745

746
    if modify_hostobject_payload:
747
        domains_modify_hostobj.update({domain_name: {'hostobject': modify_hostobject_payload}})
748
    if modify_netobject_payload:
749
        domains_modify_netobj.update({domain_name: {'netobject': modify_netobject_payload}})
750
    if modify_rangeobject_payload:
751
        domains_modify_rangeobj.update({domain_name: {'rangeobject': modify_rangeobject_payload}})
752
    if modify_urlobject_payload:
753
        domains_modify_urlobj.update({domain_name: {'urlobject': modify_urlobject_payload}})
754
        
755
    ''' now to POST our list of network objects '''
756
    for domain, domain_data in domains_add_netobj.items():
757
        try:
758
            if domain_data:
759
                if len(domain_data['netobject']) < 1000:
760
                    logging.info(f'Starting to add {len(domain_data["netobject"])} network objects')
761
                    post_network_objects(domain_data['netobject'], get_domain_uuid(domain)['uuid'])
762
                elif len(domain_data['netobject']) >= 1000:
763
                    max_chunks = int(len(domain_data['netobject'])/1000 + 1)
764
                    for item in range(max_chunks):
765
                        globals()['net_chunk_list%s' % item] = list()
766
                    chunk_size_counter = 0
767
                    chunk_counter = 0
768
                    for index, item in enumerate(domain_data['netobject']):
769
                        if index == 1000 + chunk_size_counter:
770
                            chunk_size_counter += 1000
771
                            chunk_counter += 1
772
                        if index < 1000 + chunk_size_counter:
773
                            globals()['net_chunk_list%s' % chunk_counter].append(item)
774
                    for chunk in range(max_chunks):
775
                        logging.info(f'Starting to add {len(globals()["net_chunk_list%s" % chunk])} network objects')
776
                        post_network_objects(globals()['net_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])                
777

778
        except KeyError:
779
            logging.info(
780
                f'domain = {domain} do not has Network objects to add')
781
            pass
782
    ''' now to POST our list of range objects '''
783
    for domain, domain_data in domains_add_rangeobj.items():
784
        try:
785
            if domain_data:
786
                if len(domain_data['rangeobject']) < 1000:
787
                    logging.info(f'Starting to add {len(domain_data["rangeobject"])} range objects')
788
                    post_range_objects(domain_data['rangeobject'], get_domain_uuid(domain)['uuid'])
789
                elif len(domain_data['rangeobject']) >= 1000:
790
                    max_chunks = int(len(domain_data['rangeobject'])/1000 + 1)
791
                    for item in range(max_chunks):
792
                        globals()['range_chunk_list%s' % item] = list()
793
                    chunk_size_counter = 0
794
                    chunk_counter = 0
795
                    for index, item in enumerate(domain_data['rangeobject']):
796
                        if index == 1000 + chunk_size_counter:
797
                            chunk_size_counter += 1000
798
                            chunk_counter += 1
799
                        if index < 1000 + chunk_size_counter:
800
                            globals()['range_chunk_list%s' % chunk_counter].append(item)
801
                    for chunk in range(max_chunks):
802
                        logging.info(f'Starting to add {len(globals()["range_chunk_list%s" % chunk])} range objects')
803
                        post_range_objects(globals()['range_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
804
        except KeyError:
805
            logging.info(f'domain = {domain} do not has Range objects to add')
806
            pass
807
    ''' now to POST our list of host objects '''
808
    for domain, domain_data in domains_add_hostobj.items():
809
        try:
810
            if domain_data:
811
                if len(domain_data['hostobject']) < 1000:
812
                    logging.info(f'Starting to add {len(domain_data["hostobject"])} host objects')
813
                    post_host_objects(domain_data['hostobject'], get_domain_uuid(domain)['uuid'])
814
                elif len(domain_data['hostobject']) >= 1000:
815
                    max_chunks = int(len(domain_data['hostobject'])/1000 + 1)
816
                    for item in range(max_chunks):
817
                        globals()['host_chunk_list%s' % item] = list()
818
                    chunk_size_counter = 0
819
                    chunk_counter = 0
820
                    for index, item in enumerate(domain_data['hostobject']):
821
                        if index == 1000 + chunk_size_counter:
822
                            chunk_size_counter += 1000
823
                            chunk_counter += 1
824
                        if index < 1000 + chunk_size_counter:
825
                            globals()['host_chunk_list%s' % chunk_counter].append(item)
826
                    for chunk in range(max_chunks):
827
                        logging.info(f'Starting to add {len(globals()["host_chunk_list%s" % chunk])} host objects')
828
                        post_host_objects(globals()['host_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
829
        except KeyError:
830
            logging.info(f'domain = {domain} do not has Host objects to add')
831
    ''' now to POST our list of url objects '''
832
    for domain, domain_data in domains_add_urlobj.items():
833
        try:
834
            if domain_data:
835
                if len(domain_data['urlobject']) < 1000:
836
                    logging.info(
837
                        f'Starting to add {len(domain_data["urlobject"])} url objects')
838
                    post_url_objects(
839
                        domain_data['urlobject'], get_domain_uuid(domain)['uuid'])
840
                elif len(domain_data['urlobject']) >= 1000:
841
                    max_chunks = int(len(domain_data['urlobject'])/1000 + 1)
842
                    for item in range(max_chunks):
843
                        globals()['url_chunk_list%s' % item] = list()
844
                    chunk_size_counter = 0
845
                    chunk_counter = 0
846
                    for index, item in enumerate(domain_data['urlobject']):
847
                        if index == 1000 + chunk_size_counter:
848
                            chunk_size_counter += 1000
849
                            chunk_counter += 1
850
                        if index < 1000 + chunk_size_counter:
851
                            globals()['url_chunk_list%s' %
852
                                      chunk_counter].append(item)
853
                    for chunk in range(max_chunks):
854
                        logging.info(
855
                            f'Starting to add {len(globals()["url_chunk_list%s" % chunk])} url objects')
856
                        post_url_objects(
857
                            globals()['url_chunk_list%s' % chunk], get_domain_uuid(domain)['uuid'])
858

859
        except KeyError:
860
            logging.info(
861
                f'domain = {domain} do not has Network objects to add')
862
            pass
863
    ''' now to PUT our list of network objects '''
864
    for domain, domain_data in domains_modify_netobj.items():
865
        try:
866
            if domain_data:
867
                for item in domain_data.get('netobject'):
868
                    put_object(item,get_domain_uuid(domain)['uuid'])
869
        except KeyError:
870
            logging.info(
871
                f'domain = {domain} do not has Network objects to modify')
872
            pass
873
    for domain, domain_data in domains_modify_hostobj.items():
874
        try:
875
            if domain_data:
876
                for item in domain_data.get('hostobject'):
877
                    put_object(item,get_domain_uuid(domain)['uuid'])
878
        except KeyError:
879
            logging.info(
880
                f'domain = {domain} do not has Host objects to modify')
881
            pass
882
    for domain, domain_data in domains_modify_rangeobj.items():
883
        try:
884
            if domain_data:
885
                for item in domain_data.get('rangeobject'):
886
                    put_object(item,get_domain_uuid(domain)['uuid'])
887
        except KeyError:
888
            logging.info(
889
                f'domain = {domain} do not has Range objects to modify')
890
            pass
891
    for domain, domain_data in domains_modify_urlobj.items():
892
        try:
893
            if domain_data:
894
                for item in domain_data.get('urlobject'):
895
                    put_object(item, get_domain_uuid(domain)['uuid'])
896
        except KeyError:
897
            logging.info(
898
                f'domain = {domain} do not has Url objects to modify')
899
            pass
900
        
901
    if any([add_netobject_payload, add_rangeobject_payload, add_hostobject_payload, add_urlobject_payload]):
902
        ''' update all_objects cause new hosts have been just added '''
903
        update_all_objects(domain_name)
904
        update_all_networkgroups(domain_name)
905
   
906

907
def create_json_put_obj(group_data, group_object_data, override_object=False):
908
    group_data_for_json = {}
909
    if override_object:
910
        pass
911
    else:
912
        new_objects = list()
913
        for item in group_object_data:
914
            new_objects.append(item)
915
        for element in cfg.all_detailed_networkgroups[group_data['domain_name']][group_data['name']]['objects']:
916
            new_objects.append(element)
917
        group_data_for_json.update({"objects": new_objects})
918
        group_data_for_json.update({"type": "NetworkGroup"})
919
        group_data_for_json.update({"name": group_data['name']})
920
        object_name, object_data = get_object_data(group_data['name'], group_data['domain_name'])
921
        group_data_for_json.update({"id": object_data['id']})
922
    return group_data_for_json
923

924

925
def create_json_url_put_obj(group_data, group_object_data, override_object=False):
926
    group_data_for_json = {}
927
    if override_object:
928
        pass
929
    else:
930
        new_objects = list()
931
        for item in group_object_data:
932
            new_objects.append(item)
933
        for element in cfg.all_obj_domain[group_data['domain_name']]['urlgroups'][group_data['name']]['objects']:
934
            new_objects.append(element)
935
        group_data_for_json.update({"objects": new_objects})
936
        group_data_for_json.update({"type": "UrlGroup"})
937
        group_data_for_json.update({"name": group_data['name']})
938
        object_name, object_data = get_object_data(
939
            group_data['name'], group_data['domain_name'])
940
        group_data_for_json.update({"id": object_data['id']})
941
    return group_data_for_json
942

943
def create_json_group_obj(group_data, group_object_data, override_object=False):
944
    if all([group_data, group_object_data]):
945
        group_data_for_json = {}
946
        group_data_for_json.update({"objects": group_object_data})
947
        group_data_for_json.update({"type": "NetworkGroup"})
948
        group_data_for_json.update({"name": group_data['name']})
949
        return group_data_for_json
950

951

952
def create_json_urlgroup_obj(group_data, group_object_data, override_object=False):
953
    if all([group_data, group_object_data]):
954
        group_data_for_json = {}
955
        group_data_for_json.update({"objects": group_object_data})
956
        group_data_for_json.update({"type": "UrlGroup"})
957
        group_data_for_json.update({"name": group_data['name']})
958
        return group_data_for_json
959

960
def remove_space_from_name(group_name):
961
    if group_name:
962
        group_name = group_name.strip()
963
        if match_space(group_name):
964
            print(f'SPACE is present in group name {group_name}')
965
            # remove_space = do_ask_user_input('Remove SPACE from the group name [y/N] ') == 'y'
966
            # if remove_space:
967
            #     while (match_space(group_name)):
968
            #         group_name = remove_space_obj(group_name)
969
            while (match_space(group_name)):
970
                group_name = remove_space_obj(group_name)
971
    return group_name
972

973

974
def get_objects_data_for_group(object_data):
975
    tmp_dict = dict()
976
    if object_data:
977
        tmp_dict.update({'id': object_data.get('id'), 'type': object_data.get('type'), 'name': object_data.get('name') })
978
    return tmp_dict
979

980

981
def objects_non_override_to_group(domain_name, ws):
982
    """
983
    read xlsx, create groups of objects
984

985
    :param domain_name: domain name for each domain in FMC
986
    :type domain_name: str
987
    :param ws: Excel sheet name (should be linked with domain name)
988
    :type ws: openpyxl class object
989
    """
990
    domains_add_groupobj = dict()
991
    domains_del_groupobj = dict()
992
    domains_add_override_group = dict()
993

994
    groupObject = dict()
995
    try:
996
        uuid = get_domain_uuid(domain_name)['uuid']
997
    except TypeError as error:
998
        logging.info(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
999
        errors_filename = 'outputs/errors.txt'
1000
        with open(errors_filename, "a") as f:
1001
            f.write(f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
1002

1003
    ''' get size of xlsx sheet objects'''
1004
    max_row = ws.max_row
1005
    max_column = ws.max_column
1006
    
1007
    ''' check for empty lines in Excel '''
1008
    max_value = copy.deepcopy(max_row)
1009
    for i in reversed(range(1, max_value+1)):
1010
        some_data = ws.cell(row=i, column=1).value
1011
        if some_data:
1012
            max_row = i
1013
            break
1014
        elif not some_data:
1015
            max_row -= 1      
1016
            
1017

1018
    add_group_payload = list()
1019
    add_modify_group_payload = list()
1020
    del_group_payload = list()
1021

1022
    
1023
    """
1024
    Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
1025
    """    
1026
    df = pd.DataFrame(ws.values)
1027
    new_header = df.iloc[0]
1028
    df = df[1:]
1029
    df = df.rename(columns=new_header)
1030
    
1031
    """ temp list to include all data exported out of the DataFrame """
1032
    list_sorted_groups_False_Add = list()
1033
    
1034
    df_groups_sort = df.groupby(['object_group_name','action'])
1035
    
1036
    for group, group_data1 in df_groups_sort:
1037
        if (group[1] == 'add'):
1038
            temp_group = df_groups_sort.get_group(group)
1039
            temp = temp_group.to_dict('records')
1040
            list_sorted_groups_False_Add.append(temp)
1041
            
1042
    for groups_desc in list_sorted_groups_False_Add:
1043

1044
        check_group_name = groups_desc[0].get('object_group_name')
1045
        check_object_type = groups_desc[0].get('type')
1046
        group_data = dict()
1047
        group_object_data = list()
1048
        
1049
        if not check_if_object_already_exist(check_group_name, domain_name):
1050

1051
            for obj in groups_desc:   
1052
                group_name = obj.get('object_group_name')
1053
                group_name = remove_space_from_name(group_name)
1054
                obj_name = obj.get('object')
1055
                obj_name = remove_space_from_name(obj_name)
1056

1057
                if not check_if_object_already_exist(obj_name, domain_name):
1058
                    logging.info(
1059
                        f'Error: object {obj_name} for group {check_group_name} do NOT exist!')
1060
                    errors_filename = 'outputs/errors.txt'
1061
                    with open(errors_filename, "a") as f:
1062
                        f.write(
1063
                            f'Object {obj_name} do not exist\n')
1064
                    continue
1065
                
1066
                '''So, new line has new group. Thus we need:
1067
                1) Record objects and data for the previous group
1068
                2) Don't drop new group data and new object and record them to group_pre_data'''
1069
                
1070
                try:
1071
                    object_name, object_data = get_object_data(obj_name, domain_name)
1072
                    object_data_for_group = get_objects_data_for_group(object_data)
1073
                    group_object_data.append(object_data_for_group)
1074
                except KeyError as error:
1075
                    logging.info(f'Object {obj_name} do not exist')
1076
                    errors_filename = 'outputs/errors.txt'
1077
                    with open(errors_filename, "a") as f:
1078
                        f.write(f'Object {obj_name} do not exist\n Error: {error}\n')
1079
                
1080
                group_data.update({'name': group_name})
1081
                
1082
                group_data.update({'domain_name': domain_name})
1083
                group_data.update({'objects': group_object_data})
1084

1085
            if group_object_data:
1086
                '''Create json-like structure (using dict) for API request'''
1087
                group_objects_json = create_json_group_obj(
1088
                    group_data,
1089
                    group_object_data)
1090
                    
1091
                
1092
                '''add to common list which would be run by API request later'''
1093
                add_group_payload.append(
1094
                    {group_name: group_objects_json})
1095

1096

1097
        elif check_if_object_already_exist(check_group_name, domain_name):
1098

1099
            for obj in groups_desc:
1100
                group_name = obj.get('object_group_name')
1101
                group_name = remove_space_from_name(group_name)
1102
                obj_name = obj.get('object')
1103
                obj_name = remove_space_from_name(obj_name)
1104
            
1105
                if not check_if_object_already_exist(obj_name, domain_name):
1106
                    f'Error: object {obj_name} for group {check_group_name} do NOT exist!'
1107
                    errors_filename = 'outputs/errors.txt'
1108
                    with open(errors_filename, "a") as f:
1109
                        f.write(
1110
                            f'Object {obj_name} DO not exist\n')
1111
                    continue
1112
            
1113
                '''So, new line has new group. Thus we need:
1114
                1) Record objects and data for the previous group
1115
                2) Don't drop new group data and new object and record them to group_pre_data'''
1116

1117
                try:
1118
                    object_name, object_data = get_object_data(obj_name, domain_name)
1119
                    object_data_for_group = get_objects_data_for_group(object_data)
1120
                    group_object_data.append(object_data_for_group)
1121
                except KeyError as error:
1122
                    logging.info(f'Object {obj_name} do not exist')
1123
                    errors_filename = 'outputs/errors.txt'
1124
                    with open(errors_filename, "a") as f:
1125
                        f.write(f'Object {obj_name} do not exist\n Error: {error}\n')
1126
                
1127
                group_data.update({'name': group_name})
1128
               
1129
                group_data.update({'domain_name': domain_name})
1130
                group_data.update({'objects': group_object_data})
1131

1132
            if group_object_data:
1133
                '''Create json-like structure (using dict) for API request'''
1134
                group_objects_json = create_json_put_obj(
1135
                    group_data,
1136
                    group_object_data)
1137
                
1138
                '''add to common list which would be run by API request later'''
1139
                add_modify_group_payload.append(
1140
                    {group_name: group_objects_json})                
1141

1142
    if add_group_payload:
1143
        '''Merge same group data from multiple dictionaries from add_group_payload list into one dictionary with groups values list '''
1144
        all_groups_dict = dict()
1145
        for group_data in add_group_payload:
1146
            lst1 = list()
1147
            for group_name, group_value in group_data.items():
1148
                if group_name in all_groups_dict:
1149
                    for element in all_groups_dict[group_name]:
1150
                        lst1.append(element)
1151
                    lst1.append(group_data[group_name])
1152
                    all_groups_dict.update({group_name: lst1})
1153
                else:
1154
                    lst1.append(group_data[group_name])
1155
                    all_groups_dict.update({group_name: lst1})
1156
        ''' Merge same group data into one dictionary for API request '''
1157
        all_in_one_dict = dict()
1158
        for group, group_lists in all_groups_dict.items():
1159
            group_objects = list()
1160
            group_data_dict = dict()
1161
            for item in group_lists:
1162
                group_objects += item.get('objects')
1163
                for key, value in item.items():
1164
                    group_data_dict.update({key: value})
1165
            group_data_dict.update({'objects': group_objects})
1166
            all_in_one_dict.update({group: group_data_dict})
1167

1168
        payload_list = []
1169
        for group, group_data in all_in_one_dict.items():
1170
            payload_list.append(group_data)
1171
        
1172
        domains_add_groupobj.update(
1173
            {domain_name: {'groupObject': payload_list}})
1174
    if del_group_payload:
1175
        domains_del_groupobj.update(
1176
            {domain_name: {'groupObject': del_group_payload}})
1177

1178
    if add_modify_group_payload:
1179
        ''' now to PUT our list of group objects '''
1180
        for item in add_modify_group_payload:
1181
            for group, group_data in item.items():
1182
                try:
1183
                    if group_data:
1184
                        put_networkgroups(group_data, domain_name)
1185
                except KeyError:
1186
                    logging.info(
1187
                        f'domain = {domain_name} do not has Group objects to add')
1188
    if domains_add_groupobj:
1189
        ''' now to POST our list of group objects '''
1190
        for domain, domain_data in domains_add_groupobj.items():
1191
            try:
1192
                if domain_data:
1193
                    post_groups_objects(
1194
                        domain_data['groupObject'],
1195
                        get_domain_uuid(domain)['uuid'])
1196
            except KeyError:
1197
                logging.info(
1198
                    f'domain = {domain} do not has Group objects to add')
1199

1200
    if any([domains_add_groupobj, domains_add_override_group]):
1201
        update_all_objects(domain_name)
1202
        update_all_networkgroups(domain_name)
1203

1204

1205
def url_objects_groups(domain_name, ws):
1206
    """
1207
    read xlsx, create groups of objects
1208

1209
    :param domain_name: domain name for each domain in FMC
1210
    :type domain_name: str
1211
    :param ws: Excel sheet name (should be linked with domain name)
1212
    :type ws: openpyxl class object
1213
    """
1214
    domains_add_groupobj = dict()
1215
    domains_del_groupobj = dict()
1216

1217
    try:
1218
        uuid = get_domain_uuid(domain_name)['uuid']
1219
    except TypeError as error:
1220
        logging.info(
1221
            f'Domain {domain_name} do not exist either on FMC, either in Excel sheet')
1222
        errors_filename = 'outputs/errors.txt'
1223
        with open(errors_filename, "a") as f:
1224
            f.write(
1225
                f'Domain {domain_name} do not exist either on FMC, either in Excel sheet\n Error: {error}\n')
1226

1227
    ''' get size of xlsx sheet objects'''
1228
    max_row = ws.max_row
1229
    max_column = ws.max_column
1230

1231
    ''' check for empty lines in Excel '''
1232
    max_value = copy.deepcopy(max_row)
1233
    for i in reversed(range(1, max_value+1)):
1234
        some_data = ws.cell(row=i, column=1).value
1235
        if some_data:
1236
            max_row = i
1237
            break
1238
        elif not some_data:
1239
            max_row -= 1
1240

1241
    add_group_payload = list()
1242
    add_modify_group_payload = list()
1243
    del_group_payload = list()
1244

1245
    """
1246
    Open excel sheet data as pandas DataFrame. Remove first row out of the DataFrame as data row. Use first row as columns name for the DataFrame.
1247
    """
1248
    df = pd.DataFrame(ws.values)
1249
    new_header = df.iloc[0]
1250
    df = df[1:]
1251
    df = df.rename(columns=new_header)
1252

1253
    """ temp list to include all data exported out of the DataFrame """
1254
    list_sorted_groups_False_Add = list()
1255

1256
    df_groups_sort = df.groupby(['url_group_name', 'action'])
1257

1258
    for group, group_data1 in df_groups_sort:
1259
        if (group[1] == 'add'):
1260
            temp_group = df_groups_sort.get_group(group)
1261
            temp = temp_group.to_dict('records')
1262
            list_sorted_groups_False_Add.append(temp)
1263

1264
    for groups_desc in list_sorted_groups_False_Add:
1265

1266
        check_group_name = groups_desc[0].get('url_group_name')
1267
        group_data = dict()
1268
        group_object_data = list()
1269

1270
        if not check_if_object_already_exist(check_group_name, domain_name):
1271

1272
            for obj in groups_desc:
1273
                group_name = obj.get('url_group_name')
1274
                group_name = remove_space_from_name(group_name)
1275
                obj_name = obj.get('url')
1276
                obj_name = remove_space_from_name(obj_name)
1277

1278
                if not check_if_object_already_exist(obj_name, domain_name):
1279
                    logging.info(f'Error: url {obj_name} do NOT exist!')
1280
                    errors_filename = 'outputs/errors.txt'
1281
                    with open(errors_filename, "a") as f:
1282
                        f.write(
1283
                            f'Object {obj_name} do not exist\n')
1284
                    continue
1285

1286
                '''So, new line has new group. Thus we need:
1287
                1) Record objects and data for the previous group
1288
                2) Don't drop new group data and new object and record them to group_pre_data'''
1289

1290
                try:
1291
                    object_name, object_data = get_object_data(
1292
                        obj_name, domain_name)
1293
                    object_data_for_group = get_objects_data_for_group(
1294
                        object_data)
1295
                    group_object_data.append(object_data_for_group)
1296
                except KeyError as error:
1297
                    logging.info(f'url {obj_name} do not exist')
1298
                    errors_filename = 'outputs/errors.txt'
1299
                    with open(errors_filename, "a") as f:
1300
                        f.write(
1301
                            f'Object {obj_name} do not exist\n Error: {error}\n')
1302

1303
                group_data.update({'name': group_name})
1304

1305
                group_data.update({'domain_name': domain_name})
1306
                group_data.update({'objects': group_object_data})
1307

1308
            if group_object_data:
1309
                '''Create json-like structure (using dict) for API request'''
1310
                group_objects_json = create_json_urlgroup_obj(
1311
                    group_data,
1312
                    group_object_data)
1313

1314
                '''add to common list which would be run by API request later'''
1315
                add_group_payload.append(
1316
                    {group_name: group_objects_json})
1317

1318
        elif check_if_object_already_exist(check_group_name, domain_name):
1319

1320
            for obj in groups_desc:
1321
                group_name = obj.get('url_group_name')
1322
                group_name = remove_space_from_name(group_name)
1323
                obj_name = obj.get('url')
1324
                obj_name = remove_space_from_name(obj_name)
1325

1326
                if not check_if_object_already_exist(obj_name, domain_name):
1327
                    f'Error: object {obj_name} for group {check_group_name} do NOT exist!'
1328
                    errors_filename = 'outputs/errors.txt'
1329
                    with open(errors_filename, "a") as f:
1330
                        f.write(
1331
                            f'Object {obj_name} DO not exist\n')
1332
                    continue
1333

1334
                '''So, new line has new group. Thus we need:
1335
                1) Record objects and data for the previous group
1336
                2) Don't drop new group data and new object and record them to group_pre_data'''
1337

1338
                try:
1339
                    object_name, object_data = get_object_data(
1340
                        obj_name, domain_name)
1341
                    object_data_for_group = get_objects_data_for_group(
1342
                        object_data)
1343
                    group_object_data.append(object_data_for_group)
1344
                except KeyError as error:
1345
                    logging.info(f'Object {obj_name} do not exist')
1346
                    errors_filename = 'outputs/errors.txt'
1347
                    with open(errors_filename, "a") as f:
1348
                        f.write(
1349
                            f'Object {obj_name} do not exist\n Error: {error}\n')
1350

1351
                group_data.update({'name': group_name})
1352

1353
                group_data.update({'domain_name': domain_name})
1354
                group_data.update({'objects': group_object_data})
1355

1356
            if group_object_data:
1357
                '''Create json-like structure (using dict) for API request'''
1358
                group_objects_json = create_json_url_put_obj(
1359
                    group_data,
1360
                    group_object_data)
1361

1362
                '''add to common list which would be run by API request later'''
1363
                add_modify_group_payload.append(
1364
                    {group_name: group_objects_json})
1365

1366
    if add_group_payload:
1367
        '''Merge same group data from multiple dictionaries from add_group_payload list into one dictionary with groups values list '''
1368
        all_groups_dict = dict()
1369
        for group_data in add_group_payload:
1370
            lst1 = list()
1371
            for group_name, group_value in group_data.items():
1372
                if group_name in all_groups_dict:
1373
                    for element in all_groups_dict[group_name]:
1374
                        lst1.append(element)
1375
                    lst1.append(group_data[group_name])
1376
                    all_groups_dict.update({group_name: lst1})
1377
                else:
1378
                    lst1.append(group_data[group_name])
1379
                    all_groups_dict.update({group_name: lst1})
1380
        ''' Merge same group data into one dictionary for API request '''
1381
        all_in_one_dict = dict()
1382
        for group, group_lists in all_groups_dict.items():
1383
            group_objects = list()
1384
            group_data_dict = dict()
1385
            for item in group_lists:
1386
                group_objects += item.get('objects')
1387
                for key, value in item.items():
1388
                    group_data_dict.update({key: value})
1389
            group_data_dict.update({'objects': group_objects})
1390
            all_in_one_dict.update({group: group_data_dict})
1391

1392
        payload_list = []
1393
        for group, group_data in all_in_one_dict.items():
1394
            payload_list.append(group_data)
1395

1396
        domains_add_groupobj.update(
1397
            {domain_name: {'groupObject': payload_list}})
1398
    if del_group_payload:
1399
        domains_del_groupobj.update(
1400
            {domain_name: {'groupObject': del_group_payload}})
1401

1402
    if add_modify_group_payload:
1403
        ''' now to PUT our list of group objects '''
1404
        for item in add_modify_group_payload:
1405
            for group, group_data in item.items():
1406
                try:
1407
                    if group_data:
1408
                        put_urlgroups(group_data, domain_name)
1409
                except KeyError:
1410
                    logging.info(
1411
                        f'domain = {domain_name} do not has UrlGroup objects to add')
1412
    if domains_add_groupobj:
1413
        ''' now to POST our list of group objects '''
1414
        for domain, domain_data in domains_add_groupobj.items():
1415
            try:
1416
                if domain_data:
1417
                    post_urlgroups_objects(
1418
                        domain_data['groupObject'],
1419
                        get_domain_uuid(domain)['uuid'])
1420
            except KeyError:
1421
                logging.info(
1422
                    f'domain = {domain} do not has UrlGroup objects to add')
1423
    if any([domains_add_groupobj, add_modify_group_payload, add_group_payload]):
1424
        update_all_objects(domain_name)
1425

1426

1427
def check_xlsx_sheet_empty_lines(ws, max_row):
1428
    ''' check for empty lines in Excel '''
1429
    max_value = copy.deepcopy(max_row)
1430
    for i in reversed(range(1, max_value+1)):
1431
        some_data = ws.cell(row=i, column=1).value
1432
        if some_data:
1433
            max_row = i
1434
            break
1435
        elif not some_data:
1436
            max_row -= 1
1437
    return max_row
1438

1439

1440
def update_all_objects(domain_name):
1441
    """
1442
    update_all_objects update all_jbjects with uuid, name, description, links to self
1443

1444
    :param domain_name: domain_name (function shoudl be run for each domain)
1445
    :type domain_name: str
1446
    :return: return all updated objects, return data shoudl be updated to all_objects global dictionary
1447
    :rtype: dict()
1448
    """       
1449
    # domain_obj_domain, domain_ids = get_all_objects_for_domain(domain_name)
1450
    domain_obj_domain, domain_ids = get_all_objects_for_domain_no_check_ids(domain_name)
1451
    merged_all_obj_domain = dict()
1452
    for domain_name, objects_data in cfg.all_obj_domain.items():
1453
        tmp_hosts = dict()
1454
        tmp_ranges = dict()
1455
        tmp_networks = dict()
1456
        tmp_urls = dict()
1457
        tmp_networkgroups = dict()
1458
        tmp_urlgroups = dict()
1459
        
1460
        if domain_obj_domain.get(domain_name):
1461
            if domain_obj_domain.get(domain_name).get('hosts'):
1462
                tmp_hosts = {**objects_data.get('hosts'), **domain_obj_domain.get(domain_name).get('hosts')}
1463
            else:
1464
                tmp_hosts = objects_data.get('hosts')
1465
        else:
1466
            tmp_hosts = objects_data.get('hosts')
1467
        if domain_obj_domain.get(domain_name):
1468
            if domain_obj_domain.get(domain_name).get('ranges'):
1469
                tmp_ranges = {**objects_data.get('ranges'), **domain_obj_domain.get(domain_name).get('ranges')}
1470
            else:
1471
                tmp_ranges = objects_data.get('ranges')
1472
        else:
1473
            tmp_ranges = objects_data.get('ranges')
1474
        if domain_obj_domain.get(domain_name):
1475
            if domain_obj_domain.get(domain_name).get('networks'):
1476
                tmp_networks = {**objects_data.get('networks'), **domain_obj_domain.get(domain_name).get('networks')}
1477
            else:
1478
                tmp_networks = objects_data.get('networks')
1479
        else:
1480
            tmp_networks = objects_data.get('networks')
1481
        if domain_obj_domain.get(domain_name):
1482
            if domain_obj_domain.get(domain_name).get('urls'):
1483
                tmp_urls = {**objects_data.get('urls'), **domain_obj_domain.get(domain_name).get('urls')}
1484
            else:
1485
                tmp_urls = objects_data.get('urls')
1486
        else:
1487
            tmp_urls = objects_data.get('urls')
1488
        if domain_obj_domain.get(domain_name):
1489
            if domain_obj_domain.get(domain_name).get('networkgroups'):
1490
                tmp_networkgroups = {**objects_data.get('networkgroups'), **domain_obj_domain.get(domain_name).get('networkgroups')}
1491
            else:
1492
                tmp_networkgroups = objects_data.get('networkgroups')
1493
        else:
1494
            tmp_networkgroups = objects_data.get('networkgroups')
1495
        if domain_obj_domain.get(domain_name):
1496
            if domain_obj_domain.get(domain_name).get('urlgroups'):
1497
                tmp_urlgroups = {**objects_data.get('urlgroups'), **domain_obj_domain.get(domain_name).get('urlgroups')}
1498
            else:
1499
                tmp_urlgroups = objects_data.get('urlgroups')
1500
        else:
1501
            tmp_urlgroups = objects_data.get('urlgroups')
1502
        merged_all_obj_domain.update({domain_name: {'hosts': tmp_hosts, 'ranges': tmp_ranges, 'networks': tmp_networks, 'urls': tmp_urls, 'networkgroups': tmp_networkgroups, 'urlgroups': tmp_urlgroups}})
1503

1504
    merged_all_ids_domain = dict()
1505
    if domain_ids:
1506
        for domain_name, object_id_dicts in cfg.all_ids_domain.items():
1507
            tmp_dict = dict()
1508
            if all([object_id_dicts, domain_ids.get(domain_name)]):
1509
                tmp_dict = { **object_id_dicts, **domain_ids.get(domain_name)}
1510
            elif all([object_id_dicts, not domain_ids.get(domain_name)]):
1511
                tmp_dict = object_id_dicts
1512
            elif all([not object_id_dicts, domain_ids.get(domain_name)]):
1513
                tmp_dict = domain_ids.get(domain_name)
1514
            merged_all_ids_domain.update({domain_name: tmp_dict})
1515
    cfg.all_obj_domain = merged_all_obj_domain
1516
    cfg.all_ids_domain = merged_all_ids_domain
1517

1518

1519
def update_all_networkgroups(domain_name):
1520
    """
1521
    update_all_networkgroups update all_networkgroups
1522

1523
    :param domain_name: domain_name (function shoudl be run for each domain)
1524
    :type domain_name: str
1525
    :return: return all updated objects, return data shoudl be updated to all_objects global dictionary
1526
    :rtype: dict()
1527
    """
1528
    domain_detailed_networkgroups = get_all_detailed_groups_for_domain(domain_name)
1529

1530
    merged_detailed_networkgroups_domain = dict()
1531
    for domain_name, objects_data in cfg.all_detailed_networkgroups.items():
1532
        tmp_networkgroups = dict()
1533
        if domain_detailed_networkgroups.get(domain_name):
1534
            tmp_networkgroups = {**domain_detailed_networkgroups.get(domain_name), **objects_data}
1535
        else:
1536
            tmp_networkgroups = cfg.all_detailed_networkgroups.get(domain_name)
1537
        merged_detailed_networkgroups_domain.update({domain_name: tmp_networkgroups})
1538

1539
    cfg.all_detailed_networkgroups = merged_detailed_networkgroups_domain
1540
 
1541

1542
def sort_sheets_by_creation(all_sheets):
1543
    """
1544
    Create 4 lists to sort xlsx data by order:
1545
    first: create objects
1546
    second: create groups
1547
    third: assing objects into groups
1548
    """
1549
    sheet_process_order = []
1550
    group_list = []
1551
    objects_list = []
1552
    objects_into_groups_list = []
1553
    for sheet in all_sheets:
1554
        names = sheet.strip().split('.')       
1555
        if 'groups' in names:
1556
            objects_into_groups_list.append(sheet)
1557
        else:
1558
            objects_list.append(sheet)
1559
    sheet_process_order = objects_list + group_list + objects_into_groups_list
1560
    return sheet_process_order
1561

1562

1563
if __name__ == "__main__":
1564
    cfg.init()
1565
    output_dir = 'outputs'
1566
    try:
1567
        os.makedirs(output_dir)
1568
    except OSError as e:
1569
        pass 
1570
    errors_filename = 'outputs/errors.txt'
1571
    with open(errors_filename, "w") as f:
1572
        f.write('')
1573
    
1574
    cfg.all_domains_json = get_all_domains_data()
1575
    cfg.all_obj_domain, cfg.all_ids_domain = get_all_objects_with_domains()
1576
    cfg.all_devices = get_all_devices()
1577
    cfg.all_detailed_networkgroups = get_all_groups_info()
1578
    
1579
    if cfg.check_if_file_exist(cfg.input_xlsx):  
1580
        ''' read and parse data out of the XLSX Commutation map '''
1581
        wb = Workbook()
1582
        wb = load_workbook(cfg.input_xlsx, read_only=False)
1583
        all_sheets = wb.sheetnames
1584

1585
        sorted_sheets = sort_sheets_by_creation(all_sheets)
1586
        cfg.sorted_sheets = sorted_sheets
1587
        
1588
        # diff_before_filename = f'{cfg.diff_before_filename}-{datetime.now().strftime("%Y-%m-%d-%H-%M-%S")}.xlsx'
1589
        # create_diff_excel_file(diff_before_filename)
1590
        
1591
        for sheet in sorted_sheets:
1592
            ws = wb[sheet]
1593
            names = sheet.strip().split('.')
1594
            if not 'groups' in names and not 'urlgrps' in names:
1595
                domain_name = sheet.replace('.','/')
1596
                object_change_function(domain_name, ws)
1597
            elif 'groups' in names:
1598
                if names[-1] == 'groups':
1599
                    names.remove(names[-1])
1600
                domain_name = '/'.join(names)
1601
                del_group_from_parent_group(domain_name, ws)
1602
                change_hosts_nets_in_parent_group(domain_name, ws)
1603
                objects_non_override_to_group(domain_name, ws)
1604
            elif 'urlgrps' in names:
1605
                if names[-1] == 'urlgrps':
1606
                    names.remove(names[-1])
1607
                domain_name = '/'.join(names)
1608
                change_urls_in_group(domain_name, ws)
1609
                url_objects_groups(domain_name, ws)
1610
        for sheet in sorted_sheets:
1611
            ws = wb[sheet]
1612
            names = sheet.strip().split('.')
1613
            if not 'groups' in names and not 'urlgrps' in names:
1614
                domain_name = sheet.replace('.', '/')
1615
                del_hosts_nets_for_obj_sheet(domain_name, ws)
1616
        wb.close()
1617
            
1618
        create_xlsx_and_sheets(cfg.output_xlsx)
1619
        write_hosts_network_objects_to_xlsx(cfg.output_xlsx)
1620
        write_group_objects_to_xlsx(cfg.output_xlsx)
1621
        write_urlgrps_to_xlsx(cfg.output_xlsx)
1622
        # diff_after_filename = f'{cfg.diff_after_filename}-{datetime.now().strftime("%Y-%m-%d-%H-%M-%S")}.xlsx'
1623
        # create_diff_excel_file(diff_after_filename)
1624
        logging.info(f'\n {datetime.now().strftime("%Y-%m-%d %H:%M:%S")} Execution has been completed with no major exceptions. Done.\n')
1625
    else:
1626
        create_xlsx_and_sheets(cfg.input_xlsx)
1627
        write_hosts_network_objects_to_xlsx(cfg.input_xlsx)
1628
        write_group_objects_to_xlsx(cfg.input_xlsx)
1629
        write_urlgrps_to_xlsx(cfg.input_xlsx)

Использование cookies

Мы используем файлы cookie в соответствии с Политикой конфиденциальности и Политикой использования cookies.

Нажимая кнопку «Принимаю», Вы даете АО «СберТех» согласие на обработку Ваших персональных данных в целях совершенствования нашего веб-сайта и Сервиса GitVerse, а также повышения удобства их использования.

Запретить использование cookies Вы можете самостоятельно в настройках Вашего браузера.