feat():add more logs, and changes in the api_adapter and everfi files (#246)

Co-authored-by: Julio Cezar Moscon <jcmoscon@gmail.com>
This commit is contained in:
JCMOSCON1976 2024-07-25 11:50:36 -04:00 коммит произвёл GitHub
Родитель 2ac8f534bb
Коммит 8977256f26
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: B5690EEEBB952194
6 изменённых файлов: 387 добавлений и 180 удалений

Просмотреть файл

@ -28,9 +28,10 @@ def set_up_logging(level):
elif re.match("^crit", level, flags=re.IGNORECASE):
log_level = logging.CRITICAL
logging.basicConfig(
format='%(asctime)s:\t%(name)s.%(funcName)s()[%(filename)s:%(lineno)s]:\t%(levelname)s: %(message)s',
level=log_level,encoding='utf-8'
#format="[%(asctime)s] %(name)s [%(levelname)s]: %(message)s", level=log_level
format="%(asctime)s:\t%(name)s.%(funcName)s()[%(filename)s:%(lineno)s]:\t%(levelname)s: %(message)s",
level=log_level,
encoding="utf-8",
# format="[%(asctime)s] %(name)s [%(levelname)s]: %(message)s", level=log_level
)

Просмотреть файл

@ -1,2 +1,2 @@
from .api_adapter import APIAdaptor
from .api_adapter import APIAdaptor, APIAdaptorException
from .decorators import cache_pickle

Просмотреть файл

@ -57,11 +57,16 @@ class APIAdaptor:
try:
if response_json:
data_out = response.json()
if response.text != '':
data_out = response.json()
else:
data_out = ''
else:
data_out = response.text
except (ValueError, json.JSONDecodeError) as e:
raise APIAdaptorException("Bad JSON in response") from e
raise APIAdaptorException(f"Bad JSON in response." +
f"Response.text = {response.text} " +
f"Response.status_code = {response.status_code}") from e
is_success = 299 >= response.status_code >= 200 # 200 to 299 is OK
if is_success:
@ -80,9 +85,5 @@ class APIAdaptor:
def patch(self, endpoint: str, params: Dict = None, headers: str = None, data: Dict = None):
return self._request(http_method="PATCH", endpoint=endpoint, params=params, data=data, headers=headers)
def delete(
self, endpoint: str, params: Dict = None, data: Dict = None
):
return self._request(
http_method="DELETE", endpoint=endpoint, params=params, data=data
)
def delete(self, endpoint: str, params: Dict = None, headers: str = None, data: Dict = None):
return self._request(http_method="DELETE", endpoint=endpoint, params=params, data=data, headers=headers)

Просмотреть файл

@ -1,4 +1,4 @@
from api.util import APIAdaptor, cache_pickle
from api.util import APIAdaptor
import logging
import requests
from .secrets_everfi import config
@ -38,7 +38,7 @@ class EverfiAPI():
for rec in result.data.get('data',[]):
if rec['attributes']['name'] == category_name:
cat_id = rec['id']
break
break
if not cat_id:
raise EverfiAPIExceptionNoCategory(f"Category {category_name} not found.")
@ -48,6 +48,35 @@ class EverfiAPI():
return result
def get_category_label_user_id(self, id):
endpoint = 'v1/admin/category_label_users/'
param = {'filter[user_id]':id}
result = self.api_adapter.get(endpoint=endpoint, headers=self.headers,
params=param)
return result
def delete_category_label_user(self, id):
endpoint = f'v1/admin/category_label_users/{id}'
result = self.api_adapter.delete(endpoint=endpoint, headers=self.headers)
return result
def bulk_clear_category_id(self,ids, category_id, category_label):
endpoint = 'v1/admin/bulk_actions/category'
json_data = {
'data': {
'type': 'bulk_action_categories',
'attributes': {
'user_ids': ids,
'category_label': category_label,
'category_id': category_id,
},
},
}
result = self.api_adapter.post(endpoint=endpoint, headers=self.headers,
data=json_data)
return result
# =============================================================
# Hire Dates Category
# =============================================================
@ -75,36 +104,63 @@ class EverfiAPI():
},
}
return self.api_adapter.post(endpoint=endpoint, headers=self.headers,data=json_data)
# =============================================================
# USERS
# =============================================================
def get_users(self, fields,filter, locs, loc_map_table):
def search_user(self, fields, filter):
params = {'page[per_page]': 300,
**filter,
**fields}
users_dict = {}
curr_page = 1
while True:
params['page[page]'] = curr_page
result = self.api_adapter.get(endpoint='v1/admin/users', params=params,headers=self.headers)
if len(result.data.get('data', [])) == 0:
return users_dict
for rec in result.data.get('data',[]):
email = rec.get('attributes',{}).get('email','')
users_dict[email] = rec
curr_page += 1
def get_users(self, fields,filter, locs, loc_map_table,hire_dates_inv):
def fix_none(x):
return '' if not x else x
def build_comparison_string(rec, locs, loc_map_table):
def build_comparison_string(rec, locs, loc_map_table, hire_dates):
cc_learner = [x for x in rec.get('attributes',{}).get('user_rule_set_roles','[]')if x.get('rule_set','')=='cc_learner']
if not cc_learner:
is_manager ='non_supervisor'
else:
is_manager = fix_none(cc_learner[0].get('role',''))
if rec.get('relationships','').get('category_labels',''):
if len(rec.get('relationships','').get('category_labels','').get('data',''))>0:
hire_date_id = rec.get('relationships','').get('category_labels','').get('data','')[0].get('id')
hire_date = hire_dates_inv[hire_date_id]
else:
hire_date = ''
return fix_none(rec.get('attributes',{}).get('email','')) + "|"+\
fix_none(rec.get('attributes',{}).get('first_name','')) + "|"+\
fix_none(rec.get('attributes',{}).get('last_name','')) + "|"+\
fix_none(rec.get('attributes',{}).get('employee_id','')) + "|"+\
fix_none(str(rec.get('attributes',{}).get('location_id',''))) + "|"+\
is_manager
is_manager + "|"+\
hire_date
users_dict = {}
hire_dates_inv = {v: k for k, v in hire_dates_inv.items()}
comp = {}
curr_page = 1
params = {'page[per_page]': 100,
'filter[active]': 'true',
'fields[users]': 'email,first_name,last_name,sso_id,employee_id,student_id,location_id,active,user_rule_set_roles,category_labels'}
params = {'page[per_page]': 300,
**filter,
**fields}
while True:
params['page[page]'] = curr_page
result = self.api_adapter.get(endpoint='v1/admin/users', params=params,headers=self.headers)
@ -114,32 +170,38 @@ class EverfiAPI():
for rec in result.data.get('data',[]):
email = rec.get('attributes',{}).get('email','')
users_dict[email] = rec
comp[email] = build_comparison_string(rec, locs, loc_map_table)
comp[email] = build_comparison_string(rec, locs, loc_map_table,hire_dates_inv)
curr_page += 1
def deactivate_users(self, del_list,everfi_users):
def set_active(self, id, active: bool):
endpoint = f'v1/admin/registration_sets/{id}'
json_data = {
'data': {
'type': 'registration_sets',
'id': id,
'attributes': {
'registrations': [
{
"rule_set": "user_rule_set",
'active': active,
}
],
},
},
}
r = self.api_adapter.patch(endpoint=endpoint,
headers=self.headers,
data=json_data)
def deactivate_user(self, id):
self.set_active(id, False)
def deactivate_users(self, del_list,everfi_users):
for email in del_list:
id = everfi_users[email].get('id')
endpoint = f'v1/admin/registration_sets/{id}'
json_data = {
'data': {
'type': 'registration_sets',
'id': id,
'attributes': {
'registrations': [
{
"rule_set": "user_rule_set",
'active': False,
}
],
},
},
}
r = self.api_adapter.patch(endpoint=endpoint, headers=self.headers, data= json_data)
self.set_active(id, False)
def upd_user(self, id, json_data):
endpoint = f'v1/admin/registration_sets/{id}'
@ -149,7 +211,7 @@ class EverfiAPI():
def add_user(self, json_data):
endpoint = 'v1/admin/registration_sets'
return self.api_adapter.post(endpoint=endpoint, headers=self.headers, data= json_data)
#def delete_label_user(self,)
def assign_label_user(self, user_id, category_label_id):
endpoint = 'v1/admin/category_label_users'
json_data = {
@ -168,15 +230,20 @@ class EverfiAPI():
# LOCATIONS
# =============================================================
def get_locations_mapping_table(self):
# Get all categories and find loc_map_table category
result = self.get_category('Locations Mapping Table')
# Get all categories and find loc_map_table category
try:
result = self.get_category('Locations Mapping Table')
except Exception as e:
raise Exception(e)
map = {}
for rec in result.data.get('included'):
fields = rec.get('attributes').get('name').split("|")
if len(fields)!=2:
continue
map[fields[0]] = fields[1]
if len(map) == 0:
raise Exception("Mapping table is empty")
return map
def get_locations(self, page_size=10000):

Просмотреть файл

@ -2,28 +2,10 @@ import logging
from .secrets_workday import config as wd_config
from api.util import APIAdaptor
import functools
logger = logging.getLogger(__name__)
def cache_pickle(func):
@functools.wraps(func)
def wrapper_cache_pickle(*args, **kwargs):
import pickle
import os.path
if os.path.isfile(func.__name__):
file_pi = open(func.__name__, 'rb')
return pickle.load(file_pi)
else:
value = func(*args, **kwargs)
file_pi = open(func.__name__, 'wb')
pickle.dump(value, file_pi)
return value
return wrapper_cache_pickle
class LocalConfig(object):
def __getattr__(self, attr):
return wd_config[attr]
@ -36,7 +18,7 @@ class WorkdayAPI:
self.api_adapter = APIAdaptor(host=everfi_integration["host"])
@cache_pickle
def get_datawarehouse_workers_csv(self):
everfi_integration = getattr(self._config, "everfi_integration")

Просмотреть файл

@ -1,17 +1,12 @@
# %%
#from workday_everfi.api import Workday as WorkdayAPI
from workday_everfi.api.Workday import WorkdayAPI
from api.util import Util, cache_pickle
from workday_everfi.api.Everfi import EverfiAPI
from api.util import Util, APIAdaptorException
import argparse
import logging
import sys
def cal_user_location(wd_user, locs,loc_map_table):
def cal_user_location(wd_user, locs, loc_map_table):
loc = ""
location_country = wd_user.get("location_country", "")
@ -20,46 +15,67 @@ def cal_user_location(wd_user, locs,loc_map_table):
if not loc:
loc = "Federal (Canada)"
elif location_country == "United States of America":
#if wd_user.get("location_state", "") == "New York":
# if wd_user.get("location_state", "") == "New York":
loc = loc_map_table.get(wd_user.get("location_state", ""), "")
if not loc:
loc = "United States"
else:
loc = "Default"
id = locs.get(loc)["id"]
if not id:
id = locs.get("Default")["Id"]
logger.debug(f"Location id={id} mapped for user {wd_user.get('primary_work_email','')} loc = {loc}")
logger.debug(
f"Location id={id} mapped for user {wd_user.get('primary_work_email','')} loc = {loc}"
)
return id
class Everfi():
class Everfi:
def __init__(self) -> None:
self.everfi_api = EverfiAPI()
self.logger = logging.getLogger(self.__class__.__name__)
# @cache_pickle
def get_everfi_users(self,locs, loc_map_table):
fields = "email,first_name,last_name,sso_id,employee_id,student_id,location_id,active,user_rule_set_roles,category_labels"
return self.everfi_api.get_users(fields, filter, locs, loc_map_table)
def get_everfi_users(self, locs, loc_map_table, hire_dates):
filter = {'filter[active]': 'true'}
fields = {'fields[users]': 'email,first_name,last_name,sso_id,employee_id,student_id,location_id,active,user_rule_set_roles,category_labels'}
return self.everfi_api.get_users(fields, filter, locs, loc_map_table, hire_dates)
def get_locations_mapping_table(self):
return self.everfi_api.get_locations_mapping_table()
def upd_everfi_users(self, hire_date_category_id, hire_dates, locs, upd_list_keys, wd_users , everfi_users, loc_map_table):
errors_list = []
def deactivate_users(self,del_list, everfi_users):
count = 0
for email in del_list:
id = everfi_users[email].get('id')
self.everfi_api.deactivate_user(id)
count += 1
return count
def activate_user(self, id):
self.everfi_api.set_active(id,True)
def get_locations_mapping_table(self):
return self.everfi_api.get_locations_mapping_table()
def upd_everfi_users(
self,
hire_date_category_id,
hire_dates,
locs,
upd_list_keys,
wd_users,
everfi_users,
loc_map_table,
):
errors_list = []
count_upd = 0
for email in upd_list_keys:
wd_user = wd_users[email][1]
loc_id = cal_user_location(wd_user, locs, loc_map_table)
self.logger.info(f"Updating user {email}")
loc_id = cal_user_location(wd_user, locs, loc_map_table)
json_data = {
"data": {
"type": "registration_sets",
"id": everfi_users[email]['id'],
"id": everfi_users[email]["id"],
"attributes": {
"registrations": [
{
@ -67,9 +83,8 @@ class Everfi():
"first_name": wd_user.get("preferred_first_name", ""),
"last_name": wd_user.get("preferred_last_name", ""),
"location_id": loc_id,
"employee_id": wd_user.get("employee_id", ""),
"sso_id": wd_user.get("employee_id", ""),
"employee_id": wd_user.get("employee_id", ""),
"sso_id": wd_user.get("employee_id", ""),
},
{
"rule_set": "cc_learner",
@ -82,26 +97,71 @@ class Everfi():
},
}
try:
r = self.everfi_api.upd_user(everfi_users[email]['id'], json_data)
r = self.everfi_api.upd_user(everfi_users[email]["id"], json_data)
except Exception as e:
self.logger.exception(e)
errors_list.append(e)
cat_label_user_id = self.get_category_label_user_id(everfi_users[email]["id"])
if cat_label_user_id:
self.delete_category_label_user(cat_label_user_id)
#wd_users[email][1]["hire_date"] = '2024-07-10'
hire_date_id = self.get_hire_date_id(
wd_users[email][1]["hire_date"], hire_date_category_id, hire_dates
)
try:
r = self.everfi_api.assign_label_user(
r.data.get("data").get("id"), hire_date_id
)
except Exception as e:
self.logger.exception(e)
errors_list.append(e)
count_upd += 1
return count_upd
def get_category_label_user_id(self, id):
ret = self.everfi_api.get_category_label_user_id(id)
if len(ret.data.get('data',''))>0:
return ret.data.get('data','')[0].get('id','')
else:
return None
def delete_category_label_user(self, id):
ret = self.everfi_api.delete_category_label_user(id)
return ret
def bulk_clear_category_id(self, ids, category_id,category_label):
return self.everfi_api.bulk_clear_category_id(ids, category_id,category_label)
def get_hire_date_id(self, wd_hire_date, hire_date_category_id, hire_dates):
wd_hire_date = wd_hire_date.split('-')
wd_hire_date = wd_hire_date.split("-")
wd_hire_date = wd_hire_date[1] + "-" + wd_hire_date[0]
hire_date_id = hire_dates.get(wd_hire_date)
if not hire_date_id:
#add new hire date
r = self.everfi_api.add_hire_date(name=wd_hire_date, category_id=hire_date_category_id)
id = r.data.get('data').get('id')
# add new hire date
r = self.everfi_api.add_hire_date(
name=wd_hire_date, category_id=hire_date_category_id
)
id = r.data.get("data").get("id")
hire_dates[wd_hire_date] = id
return r.data.get('data').get('id')
return r.data.get("data").get("id")
return hire_date_id
def add_everfi_users(self, hire_date_category_id, hire_dates, locs, add_list_keys, wd_users,loc_map_table):
def add_everfi_users(
self,
hire_date_category_id,
hire_dates,
locs,
add_list_keys,
wd_users,
loc_map_table,
):
errors = []
count_add = 0
for email in add_list_keys:
wd_user = wd_users[email][1]
loc_id = cal_user_location(wd_user, locs, loc_map_table)
@ -117,7 +177,7 @@ class Everfi():
"email": wd_user.get("primary_work_email", ""),
"sso_id": wd_user.get("employee_id", ""),
"employee_id": wd_user.get("employee_id", ""),
"location_id": loc_id
"location_id": loc_id,
},
{
"rule_set": "cc_learner",
@ -133,43 +193,79 @@ class Everfi():
r = self.everfi_api.add_user(json_data)
except Exception as e:
self.logger.exception(e)
errors.append(e)
continue
self.logger.info("Trying to activate user and update ")
if (e.args[0][0].get('id','')=='user_rule_set'):
# try to active user
# find user by email and then update the user with current data
filter = {'filter[email]': wd_user.get("primary_work_email", "")}
fields = {'fields[users]': 'id,email'}
#find user id
user = self.everfi_api.search_user(fields, filter)
id = user.get(email,'').get('id', '')
if id:
#self.activate_user(id)
json_data['data']['id'] = id
json_data['data']['attributes']['registrations'][0]['active'] = True
#active user and update fields
r = self.everfi_api.upd_user(id, json_data)
#remove hire date custom field
#hd = wd_users[email][1]["hire_date"].split('-')
cat_label_user_id = self.get_category_label_user_id(id)
if cat_label_user_id:
self.delete_category_label_user(cat_label_user_id)
#self.bulk_clear_category_id([id], hire_date_category_id, hd[1] + '-' + hd[0])
else:
errors.append(e)
continue
logger.info(f"Setting hire data for user {email}")
hire_date_id = self.get_hire_date_id(wd_users[email][1]['hire_date'], hire_date_category_id, hire_dates)
#wd_users[email][1]["hire_date"] = '2024-07-10'
hire_date_id = self.get_hire_date_id(
wd_users[email][1]["hire_date"], hire_date_category_id, hire_dates
)
try:
self.everfi_api.assign_label_user(r.data.get('data').get('id'), hire_date_id)
r = self.everfi_api.assign_label_user(
r.data.get("data").get("id"), hire_date_id
)
except Exception as e:
self.logger.exception(e)
errors.append(e)
self.logger.info(f"New user { wd_user.get('primary_work_email','')} created.")
# r = everfi_api.api_adapter.post(endpoint=endpoint,headers=headers,data=json_data)
class Workday():
count_add += 1
def build_comparison_string(self,wd_row,locs,loc_map_table):
loc_id = cal_user_location(wd_row, locs,loc_map_table)
return count_add
class Workday:
def build_comparison_string(self, wd_row, locs, loc_map_table):
loc_id = cal_user_location(wd_row, locs, loc_map_table)
hire_date = wd_row['hire_date'].split('-')
is_manager = "supervisor" if wd_row.get("is_manager", "") else "non_supervisor"
return wd_row['primary_work_email'] + "|" +\
wd_row['preferred_first_name'] + "|" +\
wd_row['preferred_last_name'] + "|" +\
wd_row['employee_id'] + "|" +\
loc_id + "|" +\
is_manager
def get_wd_users(self,locs,loc_map_table):
return (
wd_row["primary_work_email"]
+ "|"
+ wd_row["preferred_first_name"]
+ "|"
+ wd_row["preferred_last_name"]
+ "|"
+ wd_row["employee_id"]
+ "|"
+ loc_id
+ "|"
+ is_manager
+ "|"
+ hire_date[1] + "-" + hire_date[0]
)
def get_wd_users(self, locs, loc_map_table):
import pandas as pd
import io
import io
# The API is not returning all fields in the json
# but the csv is, so we will use the csv version
#wd_users_csv = WorkdayAPI.get_datawarehouse_workers_csv()
# wd_users_csv = WorkdayAPI.get_datawarehouse_workers_csv()
workday_api = WorkdayAPI()
wd_users_csv = workday_api.get_datawarehouse_workers_csv()
df = pd.read_csv(io.StringIO(wd_users_csv), sep=",")
@ -177,18 +273,21 @@ class Workday():
(df["currently_active"] == True)
& (df["moco_or_mofo"] == "MoCo")
& (df["worker_type"] == "Employee")
]
#filtered = filtered[(filtered["primary_work_email"] == "daabel@mozilla.com")]
#filtered.to_csv('file1.csv')
comp = {x[1]['primary_work_email']:self.build_comparison_string(x[1],locs,loc_map_table) for x in filtered.iterrows()}
comp = {
x[1]["primary_work_email"]: self.build_comparison_string(
x[1], locs, loc_map_table
)
for x in filtered.iterrows()
}
return comp, {x[1]["primary_work_email"]: x for x in filtered.iterrows()}
class WorkdayEverfiIntegration():
class WorkdayEverfiIntegration:
def __init__(self) -> None:
self.workday = Workday()
self.everfi = Everfi()
self.everfi = Everfi()
self.logger = logging.getLogger(self.__class__.__name__)
def compare_users(self, wd_comp, everfi_comp, wd_users, everfi_users):
@ -202,62 +301,120 @@ class WorkdayEverfiIntegration():
add_list = np.setdiff1d(wd_users_emails, everfi_users_emails)
del_list = np.setdiff1d(everfi_users_emails, wd_users_emails)
intersect_list = np.intersect1d(wd_users_emails, everfi_users_emails)
for upd_email in intersect_list:
if everfi_comp[upd_email] != wd_comp[upd_email]:
upd_list.append(upd_email)
# TODO remove jmoscon(@mozilla.com")
del_list = np.delete(del_list, np.where(np.isin(del_list,["jmoscon@mozilla.com","jcmoscon@mozilla.com"])))
return add_list, del_list, upd_list
def run(self):
hire_date_category_id, hire_dates = self.everfi.everfi_api.get_hire_dates()
#========================================================
# Getting Everfi locations and locations mapping table ...
#========================================================
self.logger.info("Getting Everfi locations ...")
locs = self.everfi.everfi_api.get_locations()
loc_map_table = self.everfi.everfi_api.get_locations_mapping_table()
#========================================================
def run(self, force):
# ========================================================
# Getting Everfi hire dates, locations and locations mapping table ...
# ========================================================
try:
self.logger.info("Getting everfi hire dates")
hire_date_category_id, hire_dates = self.everfi.everfi_api.get_hire_dates()
self.logger.info(f"Number of hire dates: {len(hire_dates)}")
self.logger.info("Getting everfi locations")
locs = self.everfi.everfi_api.get_locations()
self.logger.info(f"Number of locations: {len(locs)}")
self.logger.info("Getting everfi mapping table")
loc_map_table = self.everfi.everfi_api.get_locations_mapping_table()
self.logger.info(f"Number of mappins: {len(loc_map_table)}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Getting Everfi hire dates,locations and locations mapping table ...")
sys.exit(1)
# ========================================================
# Getting Workday users...
#========================================================
# ========================================================
self.logger.info("Getting Workday users...")
wd_comp, wd_users = self.workday.get_wd_users(locs,loc_map_table)
try:
wd_comp, wd_users = self.workday.get_wd_users(locs, loc_map_table)
self.logger.info(f"Number of wd users: {len(wd_users)}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Getting Workday users...")
#========================================================
# ========================================================
# Getting Everfi users...
#========================================================
# ========================================================
self.logger.info("Getting Everfi users...")
everfi_comp, everfi_users = self.everfi.get_everfi_users(locs,loc_map_table)
try:
everfi_comp, everfi_users = self.everfi.get_everfi_users(locs, loc_map_table, hire_dates)
self.logger.info(f"Number of Everfi users: {len(everfi_users)}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Getting Everfi users...")
#========================================================
# ========================================================
# Comparing users...
#========================================================
# ========================================================
self.logger.info("Comparing users...")
add_list, del_list, upd_list = integration.compare_users(wd_comp,everfi_comp, wd_users, everfi_users)
try:
add_list, del_list, upd_list = integration.compare_users(
wd_comp, everfi_comp, wd_users, everfi_users
)
self.logger.info(f"Number of users to delete {len(del_list)}")
self.logger.info(f"Number of users to add {len(add_list)}")
self.logger.info(f"Number of users to update {len(upd_list)}")
except (Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Comparing users...")
#========================================================
# ========================================================
# Deleting Everfi users ...
#========================================================
self.logger.info("Deleting Everfi users ...")
self.everfi.everfi_api.deactivate_users(del_list, everfi_users)
#========================================================
# ========================================================
self.logger.info("Deleting Everfi users ...")
try:
count_dels = self.everfi.deactivate_users(del_list, everfi_users)
self.logger.info(f"Number of users deleted {count_dels}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Faile while Deleting Everfi users ...")
# ========================================================
# Adding Everfi users ...
#========================================================
self.logger.info("Adding Everfi users ...")
self.everfi.add_everfi_users(hire_date_category_id, hire_dates, locs, add_list, wd_users, loc_map_table)
#========================================================
# Updating Everfi users ...
#========================================================
self.logger.info("Updating Everfi users ...")
self.everfi.upd_everfi_users(hire_date_category_id, hire_dates, locs, upd_list, wd_users, everfi_users, loc_map_table)
# ========================================================
self.logger.info("Adding Everfi users ...")
try:
count_add = self.everfi.add_everfi_users(
hire_date_category_id, hire_dates, locs, add_list, wd_users, loc_map_table
)
self.logger.info(f"Number of users added {count_add}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Adding Everfi users ...")
# ========================================================
# Updating Everfi users ...
# ========================================================
self.logger.info("Updating Everfi users ...")
try:
count_upd = self.everfi.upd_everfi_users(
hire_date_category_id,
hire_dates,
locs,
upd_list,
wd_users,
everfi_users,
loc_map_table,
)
self.logger.info(f"Number of users updated {count_upd}")
except (APIAdaptorException, Exception) as e:
self.logger.error(str(e))
self.logger.critical("Failed while Updating Everfi users ...")
self.logger.info("End of integration")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Sync up XMatters with Workday")
@ -280,12 +437,11 @@ if __name__ == "__main__":
args = parser.parse_args()
log_level = Util.set_up_logging(args.level)
logger = logging.getLogger(__name__)
logger.info("Starting...")
integration = WorkdayEverfiIntegration()
integration.run()
integration.run(args.force)