okta and workday scripts
This commit is contained in:
29
okta_search_logs/000description_of_scripts.txt
Normal file
29
okta_search_logs/000description_of_scripts.txt
Normal file
@@ -0,0 +1,29 @@
|
||||
--------------------------------------------------------
|
||||
Okta User AD Profile Viewer
|
||||
show_values_from_ad.py:
|
||||
|
||||
This script fetches and displays the AD profile values for a specific user from Okta. It takes a user ID or login as a command-line argument, resolves the user, and prints their profile.
|
||||
|
||||
--------------------------------------------------------
|
||||
Okta MFA Non-Enrolled Users Report
|
||||
mfa_not_enrolled.py:
|
||||
|
||||
This script generates a CSV report of Okta users who are not enrolled in Multi-Factor Authentication (MFA). It can filter users by status (e.g., active only) and fetches each user's MFA factors to identify those without any. The output is a CSV file containing details of these users.
|
||||
|
||||
--------------------------------------------------------
|
||||
Okta User Detailed Activity and MFA Report
|
||||
show_system_logs_mfa_factors_for_user.py:
|
||||
|
||||
This script provides a detailed interactive report for a single Okta user. It fetches and displays the user's profile information, enrolled MFA factors, and recent system log events. It highlights MFA enrollment/removal events and suspicious activities, and saves the raw log data to a CSV file.
|
||||
|
||||
--------------------------------------------------------
|
||||
Comprehensive Okta User Export Report
|
||||
full_okta_users.py:
|
||||
|
||||
This script exports a comprehensive CSV report of Okta users. It combines user profile information, lifecycle data, password expiry details (calculated from Active Directory's 'ADpwdLastSet' attribute), and MFA enrollment status. It can export all users or a single user and can filter by status.
|
||||
|
||||
--------------------------------------------------------
|
||||
Okta Deactivated Users Report
|
||||
deactivated_users.py:
|
||||
|
||||
This script generates a CSV report of Okta users who are in a non-active state (Staged, Deprovisioned, Provisioned, or Recovery). It fetches users with these statuses and saves their profile information to a CSV file, filtering to only include users with a job title.
|
||||
159
okta_search_logs/deactivated_users.py
Normal file
159
okta_search_logs/deactivated_users.py
Normal file
@@ -0,0 +1,159 @@
|
||||
#!/home/jevans/audit_reports/okta_system_logs/.venv/bin/python
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import csv
|
||||
import requests
|
||||
|
||||
# ---------------- .env loading (KEY=VALUE; quotes supported) ----------------
|
||||
_ENV_LINE_RE = re.compile(r'^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$')
|
||||
|
||||
def _strip_quotes(val: str) -> str:
|
||||
val = val.strip()
|
||||
if len(val) >= 2 and (val[0] == val[-1]) and val[0] in ("'", '"'):
|
||||
return val[1:-1]
|
||||
return val
|
||||
|
||||
def load_env():
|
||||
"""Load KEY=VALUE pairs from .env in script dir or cwd."""
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
candidates = [os.path.join(script_dir, ".env"), os.path.join(os.getcwd(), ".env")]
|
||||
for path in candidates:
|
||||
if os.path.exists(path):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
m = _ENV_LINE_RE.match(line)
|
||||
if not m:
|
||||
continue
|
||||
key, val = m.group(1), _strip_quotes(m.group(2))
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = val
|
||||
|
||||
load_env()
|
||||
|
||||
# ---------------- Config ----------------
|
||||
OKTA_DOMAIN = os.getenv("OKTA_DOMAIN", "gallaudet.okta.com")
|
||||
API_TOKEN = os.getenv("OKTA_API_TOKEN")
|
||||
|
||||
if not API_TOKEN:
|
||||
sys.stderr.write("ERROR: Missing OKTA_API_TOKEN in .env\n")
|
||||
sys.exit(1)
|
||||
|
||||
BASE_URL = f"https://{OKTA_DOMAIN}"
|
||||
USERS_URL = f"{BASE_URL}/api/v1/users"
|
||||
|
||||
HEADERS = {
|
||||
"Authorization": f"SSWS {API_TOKEN}",
|
||||
"Accept": "application/json",
|
||||
}
|
||||
|
||||
CSV_FILENAME = "okta_deprovisioned_users.csv"
|
||||
|
||||
COLUMNS = [
|
||||
"firstName","lastName","email","title","organization",
|
||||
"wdEmployeeRole","wdJobProfile","status","employeeStatus","wdHireDate",
|
||||
"wdTerminated","wdTerminationDate","wdHasAcademicAppointment","wdFutureHire",
|
||||
"InstructorKeepActiveTo","wdTerminatedWorkerKeepActiveTo","department",
|
||||
"wdJobFamilyGroup","wdEmployeeType","userRole","wdIsWorkerActiveStudent",
|
||||
"created","activated","statusChanged","lastLogin","lastUpdated",
|
||||
"passwordChanged","ADpwdLastSet","displayName","login","secondEmail"
|
||||
]
|
||||
|
||||
# ---------------- Helpers ----------------
|
||||
def get_all_users():
|
||||
"""
|
||||
Return users whose status is one of STAGED, DEPROVISIONED, PROVISIONED, or RECOVERY.
|
||||
Handles Okta Link header pagination.
|
||||
"""
|
||||
users = []
|
||||
url = USERS_URL
|
||||
params = {
|
||||
"limit": 200,
|
||||
'filter': 'status eq "STAGED" or status eq "DEPROVISIONED" or status eq "PROVISIONED" or status eq "RECOVERY"',
|
||||
}
|
||||
while url:
|
||||
if url == USERS_URL:
|
||||
resp = requests.get(url, headers=HEADERS, params=params, timeout=15)
|
||||
else:
|
||||
resp = requests.get(url, headers=HEADERS, timeout=15)
|
||||
if resp.status_code != 200:
|
||||
print(f"Failed to fetch users: {resp.status_code}")
|
||||
print(resp.text)
|
||||
break
|
||||
|
||||
data = resp.json()
|
||||
if isinstance(data, list):
|
||||
users.extend(data)
|
||||
|
||||
# Parse pagination link header
|
||||
next_link = None
|
||||
link_hdr = resp.headers.get("link") or resp.headers.get("Link")
|
||||
if link_hdr:
|
||||
parts = [p.strip() for p in link_hdr.split(",")]
|
||||
for part in parts:
|
||||
if 'rel="next"' in part:
|
||||
# format: <URL>; rel="next"
|
||||
start = part.find("<") + 1
|
||||
end = part.find(">")
|
||||
if start > 0 and end > start:
|
||||
next_link = part[start:end]
|
||||
break
|
||||
url = next_link
|
||||
|
||||
return users
|
||||
|
||||
def get_user_by_login(user_login: str):
|
||||
"""
|
||||
Return a list with the matching user (or empty list if not found).
|
||||
"""
|
||||
params = {"filter": f'profile.login eq "{user_login}"'}
|
||||
resp = requests.get(USERS_URL, headers=HEADERS, params=params, timeout=15)
|
||||
if resp.status_code != 200:
|
||||
print(f"Failed to fetch user: {resp.status_code}")
|
||||
print(resp.text)
|
||||
return []
|
||||
users = resp.json()
|
||||
if users:
|
||||
return users
|
||||
print(f"No user found with login: {user_login}")
|
||||
return []
|
||||
|
||||
def format_user(user: dict) -> dict:
|
||||
"""
|
||||
Flatten user object into the CSV field set.
|
||||
Pull top-level lifecycle fields from user,
|
||||
everything else from profile.
|
||||
"""
|
||||
row = {}
|
||||
profile = user.get("profile", {}) or {}
|
||||
for field in COLUMNS:
|
||||
if field in {"status","created","activated","statusChanged","lastLogin","lastUpdated","passwordChanged"}:
|
||||
row[field] = user.get(field, "")
|
||||
else:
|
||||
row[field] = profile.get(field, "")
|
||||
return row
|
||||
|
||||
def save_to_csv(users: list, filename: str):
|
||||
with open(filename, "w", newline="", encoding="utf-8") as csvfile:
|
||||
writer = csv.DictWriter(csvfile, fieldnames=COLUMNS, extrasaction="ignore")
|
||||
writer.writeheader()
|
||||
for user in users:
|
||||
row = format_user(user)
|
||||
# Only write users with a non-empty "title" (as in your original script)
|
||||
if row.get("title") and str(row.get("title")).strip():
|
||||
writer.writerow(row)
|
||||
print(f"User data saved to {filename}")
|
||||
|
||||
# ---------------- Main ----------------
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) > 1 and sys.argv[1].endswith("@gallaudet.edu"):
|
||||
user_login = sys.argv[1]
|
||||
users = get_user_by_login(user_login)
|
||||
out_file = f"okta_user_{user_login.replace('@','_at_').replace('.','_')}.csv"
|
||||
save_to_csv(users, out_file)
|
||||
else:
|
||||
users = get_all_users()
|
||||
save_to_csv(users, CSV_FILENAME)
|
||||
71
okta_search_logs/example_output_ad_values_jared.evans.txt
Normal file
71
okta_search_logs/example_output_ad_values_jared.evans.txt
Normal file
@@ -0,0 +1,71 @@
|
||||
# python3 show_values_from_ad.py jared.evans
|
||||
|
||||
Resolved login 'jared.evans@gallaudet.edu' to Okta user id: 00u1b2ex4bG0fnCGe697
|
||||
{
|
||||
"id": "00u1b2ex4bG0fnCGe697",
|
||||
"externalId": "ResZKdXkP0eMwykiPwJL5w==",
|
||||
"created": "2022-06-08T18:54:21.000Z",
|
||||
"lastUpdated": "2025-10-16T20:01:31.000Z",
|
||||
"scope": "USER",
|
||||
"status": "PROVISIONED",
|
||||
"statusChanged": "2022-06-08T19:09:23.000Z",
|
||||
"passwordChanged": null,
|
||||
"syncState": "SYNCHRONIZED",
|
||||
"lastSync": "2025-10-16T20:01:31.000Z",
|
||||
"credentials": {
|
||||
"userName": "jared.evans@gallaudet.edu"
|
||||
},
|
||||
"profile": {
|
||||
"lastName": "Evans",
|
||||
"preferredLanguage": null,
|
||||
"telephoneNumber": null,
|
||||
"city": null,
|
||||
"displayName": "Jared Evans",
|
||||
"socialSecurityNumber": "4321",
|
||||
"accountExpires": "0",
|
||||
"postalCode": null,
|
||||
"samAccountName": "jared.evans",
|
||||
"description": "Staff",
|
||||
"dn": "CN=jared.evans,OU=People,DC=ad,DC=gallaudet,DC=edu",
|
||||
"employeeID": "RRH9UJPCN",
|
||||
"title": "Information Security Officer",
|
||||
"division": null,
|
||||
"uid": [
|
||||
"0141868"
|
||||
],
|
||||
"managerDn": null,
|
||||
"countryCode": null,
|
||||
"primaryGroupId": "513",
|
||||
"state": null,
|
||||
"department": "Relationship & Project Management",
|
||||
"email": "jared.evans@gallaudet.edu",
|
||||
"legalFirstName": "Jared",
|
||||
"oktaID": "1003181",
|
||||
"adCountryCode": 0,
|
||||
"dateOfBirth": "1973-10-26",
|
||||
"cn": "jared.evans",
|
||||
"facsimileTelephoneNumber": null,
|
||||
"co": null,
|
||||
"primaryID": "0141868",
|
||||
"proxyAddresses": [],
|
||||
"firstName": "Jared",
|
||||
"deliveryOffice": null,
|
||||
"msDS_UserPasswordExpiryTimeComputed": "2025-12-27T13:49:26+0000",
|
||||
"mobilePhone": null,
|
||||
"legalLastName": "Evans",
|
||||
"streetAddress": null,
|
||||
"departmentNumber": "SO_00005",
|
||||
"objectSid": "S-1-5-21-1340932589-1776855805-1520363230-56184",
|
||||
"middleName": null,
|
||||
"managerUpn": null,
|
||||
"pwdLastSet": "2025-06-30T13:49:26+0000"
|
||||
},
|
||||
"_links": {
|
||||
"app": {
|
||||
"href": "https://gallaudet.okta.com/api/v1/apps/0oa1b0suqs9iEEt6H697"
|
||||
},
|
||||
"user": {
|
||||
"href": "https://gallaudet.okta.com/api/v1/users/00u1b2ex4bG0fnCGe697"
|
||||
}
|
||||
}
|
||||
}
|
||||
372
okta_search_logs/full_okta_users.py
Normal file
372
okta_search_logs/full_okta_users.py
Normal file
@@ -0,0 +1,372 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Merged Okta users export:
|
||||
- Includes all profile columns from okta_users.py
|
||||
- Includes lifecycle/login/password-expiry (ADpwdLastSet-based)
|
||||
- Includes MFA enrollment + factors (unless --skip-mfa)
|
||||
- Loads OKTA_DOMAIN, OKTA_API_TOKEN, OKTA_APP_ID from .env
|
||||
|
||||
Examples:
|
||||
# All users (any status):
|
||||
python3 full_okta_users.py --all --out okta_all_users.csv
|
||||
|
||||
# Only ACTIVE users:
|
||||
python3 full_okta_users.py --all --only-active --out okta_active_users.csv
|
||||
|
||||
# Single user:
|
||||
python3 full_okta_users.py --user jared.evans
|
||||
|
||||
# Skip MFA (faster for large runs):
|
||||
python3 full_okta_users.py --all --skip-mfa
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import csv
|
||||
import math
|
||||
import time
|
||||
import argparse
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
import requests
|
||||
|
||||
# ---------------- .env loading ----------------
|
||||
_ENV_LINE_RE = re.compile(r'^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$')
|
||||
|
||||
def _strip_quotes(val: str) -> str:
|
||||
val = val.strip()
|
||||
if len(val) >= 2 and (val[0] == val[-1]) and val[0] in ("'", '"'):
|
||||
return val[1:-1]
|
||||
return val
|
||||
|
||||
def load_env():
|
||||
"""Load KEY=VALUE pairs from .env in script dir or cwd."""
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
candidates = [os.path.join(script_dir, ".env"), os.path.join(os.getcwd(), ".env")]
|
||||
for path in candidates:
|
||||
if os.path.exists(path):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
m = _ENV_LINE_RE.match(line)
|
||||
if not m:
|
||||
continue
|
||||
key, val = m.group(1), _strip_quotes(m.group(2))
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = val
|
||||
|
||||
load_env()
|
||||
|
||||
# ---------------- Configuration ----------------
|
||||
OKTA_DOMAIN = os.getenv("OKTA_DOMAIN", "gallaudet.okta.com")
|
||||
API_TOKEN = os.getenv("OKTA_API_TOKEN")
|
||||
OKTA_APP_ID = os.getenv("OKTA_APP_ID", "")
|
||||
|
||||
BASE_URL = f"https://{OKTA_DOMAIN}"
|
||||
USERS_URL = f"{BASE_URL}/api/v1/users"
|
||||
|
||||
DEFAULT_TIMEOUT = 15 # seconds
|
||||
FIXED_MAX_AGE_DAYS = 180
|
||||
|
||||
# ---------------- HTTP session ----------------
|
||||
SESSION = requests.Session()
|
||||
if API_TOKEN:
|
||||
SESSION.headers.update({
|
||||
"Authorization": f"SSWS {API_TOKEN}",
|
||||
"Accept": "application/json",
|
||||
})
|
||||
else:
|
||||
SESSION.headers.update({"Accept": "application/json"})
|
||||
|
||||
def require_token():
|
||||
if not API_TOKEN:
|
||||
sys.stderr.write(
|
||||
"ERROR: Missing OKTA_API_TOKEN. Create a .env with:\n"
|
||||
" OKTA_DOMAIN=\"gallaudet.okta.com\"\n"
|
||||
" OKTA_API_TOKEN=\"xxxxxxxx\"\n"
|
||||
" OKTA_APP_ID=\"0oa...\" # optional\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# ---------------- HTTP helpers ----------------
|
||||
def retry_get(url, params=None, max_tries=5):
|
||||
params = dict(params or {})
|
||||
delay = 0.5
|
||||
for i in range(max_tries):
|
||||
r = SESSION.get(url, params=params, timeout=DEFAULT_TIMEOUT)
|
||||
if r.status_code in (429, 500, 502, 503, 504):
|
||||
rem = r.headers.get("X-Rate-Limit-Remaining")
|
||||
reset = r.headers.get("X-Rate-Limit-Reset")
|
||||
sys.stderr.write(f"[backoff] {r.status_code} remaining={rem} reset={reset} try={i+1}\n")
|
||||
if i == max_tries - 1:
|
||||
r.raise_for_status()
|
||||
time.sleep(delay)
|
||||
delay *= 1.7
|
||||
continue
|
||||
r.raise_for_status()
|
||||
return r
|
||||
raise RuntimeError("Unreachable")
|
||||
|
||||
def get_with_pagination(url, params=None):
|
||||
params = dict(params or {})
|
||||
while True:
|
||||
r = retry_get(url, params=params)
|
||||
data = r.json()
|
||||
if isinstance(data, list):
|
||||
for item in data:
|
||||
yield item
|
||||
else:
|
||||
yield data
|
||||
nxt = r.links.get("next", {}).get("url")
|
||||
if not nxt:
|
||||
break
|
||||
url, params = nxt, {}
|
||||
|
||||
# ---------------- Date/time helpers ----------------
|
||||
def iso_to_dt(iso_str):
|
||||
if not iso_str:
|
||||
return None
|
||||
try:
|
||||
s = str(iso_str)
|
||||
if s.endswith("Z"):
|
||||
s = s.replace("Z", "+00:00")
|
||||
return datetime.fromisoformat(s).astimezone(timezone.utc)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def fmt_utc(dt_utc):
|
||||
return dt_utc.isoformat() if dt_utc else ""
|
||||
|
||||
def days_between(a_dt, b_dt):
|
||||
if not a_dt or not b_dt:
|
||||
return ""
|
||||
return math.floor((b_dt - a_dt).total_seconds() / 86400.0)
|
||||
|
||||
FILETIME_EPOCH = datetime(1601, 1, 1, tzinfo=timezone.utc)
|
||||
|
||||
def parse_adpwdlastset(value):
|
||||
if value is None or value == "":
|
||||
return None
|
||||
dt = iso_to_dt(value)
|
||||
if dt:
|
||||
return dt
|
||||
try:
|
||||
s = str(value).strip()
|
||||
if s.isdigit():
|
||||
n = int(s)
|
||||
if n > 10_000_000_000_000: # FILETIME
|
||||
seconds = n / 10_000_000
|
||||
return FILETIME_EPOCH + timedelta(seconds=seconds)
|
||||
elif n > 10_000_000_000: # ms epoch
|
||||
return datetime.fromtimestamp(n / 1000.0, tz=timezone.utc)
|
||||
else: # sec epoch
|
||||
return datetime.fromtimestamp(n, tz=timezone.utc)
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
f = float(str(value).strip())
|
||||
if f > 0:
|
||||
if f > 10_000_000_000: # ms
|
||||
return datetime.fromtimestamp(f / 1000.0, tz=timezone.utc)
|
||||
return datetime.fromtimestamp(f, tz=timezone.utc)
|
||||
except Exception:
|
||||
pass
|
||||
return None
|
||||
|
||||
def derive_password_expired_flag(user_obj):
|
||||
if user_obj.get("passwordExpired") is True:
|
||||
return True
|
||||
status = (user_obj.get("status") or "").upper()
|
||||
if status == "PASSWORD_EXPIRED":
|
||||
return True
|
||||
cred = user_obj.get("credentials") or {}
|
||||
pwd = cred.get("password") or {}
|
||||
if (pwd.get("status") or "").upper() == "EXPIRED":
|
||||
return True
|
||||
return False
|
||||
|
||||
# ---------------- Fetchers ----------------
|
||||
def get_all_users(only_active=False):
|
||||
users = list(get_with_pagination(USERS_URL, params={"limit": 200}))
|
||||
if only_active:
|
||||
users = [u for u in users if (u.get("status") or "").upper() == "ACTIVE"]
|
||||
return users
|
||||
|
||||
def get_user_by_login(user_login):
|
||||
r = retry_get(USERS_URL, params={"filter": f'profile.login eq "{user_login}"'})
|
||||
data = r.json()
|
||||
if isinstance(data, list):
|
||||
return data
|
||||
return [data] if data else []
|
||||
|
||||
def get_user_factors(user_id):
|
||||
url = f"{USERS_URL}/{user_id}/factors"
|
||||
r = retry_get(url, params={"limit": 200})
|
||||
data = r.json()
|
||||
return data if isinstance(data, list) else []
|
||||
|
||||
def normalize_login(user_arg: str) -> str:
|
||||
if "@" in user_arg:
|
||||
return user_arg
|
||||
return f"{user_arg}@gallaudet.edu"
|
||||
|
||||
# ---------------- Column set ----------------
|
||||
PROFILE_COLUMNS_FROM_FIRST = [
|
||||
"firstName","lastName","email","title","organization",
|
||||
"wdEmployeeRole","wdJobProfile","status","employeeStatus","wdHireDate",
|
||||
"wdTerminated","wdTerminationDate","wdHasAcademicAppointment","wdFutureHire",
|
||||
"InstructorKeepActiveTo","wdTerminatedWorkerKeepActiveTo","department",
|
||||
"wdJobFamilyGroup","wdEmployeeType","userRole","wdIsWorkerActiveStudent",
|
||||
"created","activated","statusChanged","lastLogin","lastUpdated",
|
||||
"passwordChanged","ADpwdLastSet","displayName","login",
|
||||
"usernameWithoutDomain","secondEmail"
|
||||
]
|
||||
|
||||
ALL_COLUMNS = PROFILE_COLUMNS_FROM_FIRST + [
|
||||
"status_upper","is_locked_out",
|
||||
"created_utc","activated_utc","lastUpdated_utc","statusChanged_utc",
|
||||
"deprovisioned_date","lastLockedOut",
|
||||
"division","manager","costCenter","userType",
|
||||
"ADpwdLastSet_raw","ADpwdLastSet_utc","ADpwdLastSet_days_since",
|
||||
"password_last_set_utc","days_since_password_last_set",
|
||||
"maxAgeDays_hardcoded","estimated_expire_utc","days_until_expiry",
|
||||
"last_successful_signin_utc","days_since_last_successful_signin",
|
||||
"passwordExpired","id",
|
||||
"mfa_enrolled","mfa_factors"
|
||||
]
|
||||
|
||||
# ---------------- Row builder ----------------
|
||||
def build_row(user, now_utc, include_mfa: bool):
|
||||
profile = user.get("profile") or {}
|
||||
status = (user.get("status") or "")
|
||||
status_u = status.upper()
|
||||
|
||||
row = {}
|
||||
for k in PROFILE_COLUMNS_FROM_FIRST:
|
||||
if k in ("status","created","activated","statusChanged","lastLogin","lastUpdated","passwordChanged"):
|
||||
row[k] = user.get(k, "")
|
||||
elif k == "usernameWithoutDomain":
|
||||
login = profile.get("login","")
|
||||
row[k] = login.split("@")[0] if "@" in login else login
|
||||
else:
|
||||
row[k] = profile.get(k, "")
|
||||
|
||||
row["id"] = user.get("id","")
|
||||
row["status_upper"] = status_u
|
||||
|
||||
created_dt = iso_to_dt(user.get("created"))
|
||||
activated_dt = iso_to_dt(user.get("activated"))
|
||||
last_updated_dt = iso_to_dt(user.get("lastUpdated"))
|
||||
status_changed_dt = iso_to_dt(user.get("statusChanged"))
|
||||
last_login_dt = iso_to_dt(user.get("lastLogin"))
|
||||
|
||||
row["created_utc"] = fmt_utc(created_dt)
|
||||
row["activated_utc"] = fmt_utc(activated_dt)
|
||||
row["lastUpdated_utc"] = fmt_utc(last_updated_dt)
|
||||
row["statusChanged_utc"] = fmt_utc(status_changed_dt)
|
||||
|
||||
row["is_locked_out"] = "Yes" if status_u == "LOCKED_OUT" else "No"
|
||||
row["lastLockedOut"] = row["statusChanged_utc"] if status_u == "LOCKED_OUT" else ""
|
||||
row["deprovisioned_date"] = row["statusChanged_utc"] if status_u == "DEPROVISIONED" else ""
|
||||
|
||||
row["division"] = profile.get("division","")
|
||||
row["manager"] = profile.get("manager","")
|
||||
row["costCenter"] = profile.get("costCenter","")
|
||||
row["userType"] = profile.get("userType","")
|
||||
|
||||
ad_raw = profile.get("ADpwdLastSet", "")
|
||||
ad_dt = parse_adpwdlastset(ad_raw)
|
||||
row["ADpwdLastSet_raw"] = ad_raw if ad_raw is not None else ""
|
||||
row["ADpwdLastSet"] = ad_raw if ad_raw is not None else ""
|
||||
row["ADpwdLastSet_utc"] = fmt_utc(ad_dt)
|
||||
row["ADpwdLastSet_days_since"] = days_between(ad_dt, now_utc)
|
||||
|
||||
last_set_dt = ad_dt
|
||||
row["password_last_set_utc"] = fmt_utc(last_set_dt)
|
||||
row["days_since_password_last_set"] = days_between(last_set_dt, now_utc)
|
||||
|
||||
if last_set_dt:
|
||||
expire_dt = last_set_dt + timedelta(days=FIXED_MAX_AGE_DAYS)
|
||||
row["maxAgeDays_hardcoded"] = FIXED_MAX_AGE_DAYS
|
||||
row["estimated_expire_utc"] = fmt_utc(expire_dt)
|
||||
row["days_until_expiry"] = days_between(now_utc, expire_dt)
|
||||
else:
|
||||
row["maxAgeDays_hardcoded"] = ""
|
||||
row["estimated_expire_utc"] = ""
|
||||
row["days_until_expiry"] = ""
|
||||
|
||||
row["last_successful_signin_utc"] = fmt_utc(last_login_dt)
|
||||
row["days_since_last_successful_signin"] = days_between(last_login_dt, now_utc)
|
||||
row["passwordExpired"] = "True" if derive_password_expired_flag(user) else "False"
|
||||
|
||||
if include_mfa:
|
||||
mfa_factors = []
|
||||
try:
|
||||
factors = get_user_factors(row["id"])
|
||||
for f in factors:
|
||||
ftype = (f or {}).get("factorType") or (f or {}).get("provider")
|
||||
if ftype:
|
||||
mfa_factors.append(str(ftype).lower())
|
||||
mfa_factors = sorted(set(mfa_factors))
|
||||
except requests.HTTPError as e:
|
||||
sys.stderr.write(f"Warning: factors fetch failed for {row['id']}: {e}\n")
|
||||
row["mfa_enrolled"] = "Yes" if mfa_factors else "No"
|
||||
row["mfa_factors"] = ",".join(mfa_factors)
|
||||
else:
|
||||
row["mfa_enrolled"] = ""
|
||||
row["mfa_factors"] = ""
|
||||
|
||||
return row
|
||||
|
||||
# ---------------- Main ----------------
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Merged Okta users export (profiles + lifecycle/login + ADpwdLastSet-based expiry + MFA)."
|
||||
)
|
||||
mg = parser.add_mutually_exclusive_group(required=True)
|
||||
mg.add_argument("--user", help="Username (e.g., 'jared.evans') or full email")
|
||||
mg.add_argument("--all", action="store_true", help="Export all users")
|
||||
|
||||
parser.add_argument("--only-active", action="store_true",
|
||||
help="With --all, include only ACTIVE users")
|
||||
parser.add_argument("--skip-mfa", action="store_true",
|
||||
help="Skip calling /factors for each user (faster for large exports)")
|
||||
parser.add_argument("--out", default="okta_users_merged.csv",
|
||||
help="Output CSV path (default: okta_users_merged.csv)")
|
||||
args = parser.parse_args()
|
||||
|
||||
require_token()
|
||||
|
||||
if args.user:
|
||||
login = normalize_login(args.user)
|
||||
print(f"Fetching user {login}...")
|
||||
users = get_user_by_login(login)
|
||||
if not users:
|
||||
print(f"No user found with login: {login}")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(f"Fetching users (only_active={args.only_active})...")
|
||||
users = get_all_users(only_active=args.only_active)
|
||||
print(f"Found {len(users)} users.")
|
||||
|
||||
now_utc = datetime.now(timezone.utc)
|
||||
rows = []
|
||||
for idx, u in enumerate(users, start=1):
|
||||
rows.append(build_row(u, now_utc, include_mfa=(not args.skip_mfa)))
|
||||
if idx % 200 == 0 and not args.user:
|
||||
print(f"Processed {idx}/{len(users)} users...")
|
||||
|
||||
with open(args.out, "w", newline="", encoding="utf-8") as f:
|
||||
w = csv.DictWriter(f, fieldnames=ALL_COLUMNS)
|
||||
w.writeheader()
|
||||
for r in rows:
|
||||
w.writerow(r)
|
||||
|
||||
print(f"Done. Wrote {args.out}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
212
okta_search_logs/mfa_not_enrolled.py
Normal file
212
okta_search_logs/mfa_not_enrolled.py
Normal file
@@ -0,0 +1,212 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Report: Users NOT enrolled in MFA
|
||||
|
||||
Examples:
|
||||
# Active users only (default):
|
||||
python3 mfa_not_enrolled.py --out no_mfa_active.csv
|
||||
|
||||
# All statuses except DEPROVISIONED:
|
||||
python3 mfa_not_enrolled.py --all-statuses --out no_mfa_all.csv
|
||||
|
||||
# Include DEPROVISIONED as well:
|
||||
python3 mfa_not_enrolled.py --all-statuses --include-deprovisioned --out no_mfa_all_incl_deprov.csv
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import csv
|
||||
import time
|
||||
import math
|
||||
import argparse
|
||||
from datetime import datetime, timezone, timedelta
|
||||
|
||||
import requests
|
||||
|
||||
# ---------------- .env loading (KEY=VALUE; quotes ok) ----------------
|
||||
_ENV_LINE_RE = re.compile(r'^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$')
|
||||
def _strip_quotes(v: str) -> str:
|
||||
v = v.strip()
|
||||
return v[1:-1] if len(v) >= 2 and v[0] == v[-1] and v[0] in ("'", '"') else v
|
||||
|
||||
def load_env():
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
for p in (os.path.join(script_dir, ".env"), os.path.join(os.getcwd(), ".env")):
|
||||
if os.path.exists(p):
|
||||
with open(p, "r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
s = raw.strip()
|
||||
if not s or s.startswith("#"):
|
||||
continue
|
||||
m = _ENV_LINE_RE.match(s)
|
||||
if not m:
|
||||
continue
|
||||
k, v = m.group(1), _strip_quotes(m.group(2))
|
||||
if k and k not in os.environ:
|
||||
os.environ[k] = v
|
||||
|
||||
load_env()
|
||||
|
||||
# ---------------- Config ----------------
|
||||
OKTA_DOMAIN = os.getenv("OKTA_DOMAIN", "gallaudet.okta.com")
|
||||
API_TOKEN = os.getenv("OKTA_API_TOKEN")
|
||||
|
||||
if not API_TOKEN:
|
||||
sys.stderr.write("ERROR: Missing OKTA_API_TOKEN in .env\n")
|
||||
sys.exit(1)
|
||||
|
||||
BASE_URL = f"https://{OKTA_DOMAIN}"
|
||||
USERS_URL = f"{BASE_URL}/api/v1/users"
|
||||
|
||||
DEFAULT_TIMEOUT = 15
|
||||
|
||||
# ---------------- HTTP session with backoff ----------------
|
||||
SESSION = requests.Session()
|
||||
SESSION.headers.update({"Authorization": f"SSWS {API_TOKEN}", "Accept": "application/json"})
|
||||
|
||||
def retry_get(url, params=None, max_tries=5):
|
||||
params = dict(params or {})
|
||||
delay = 0.5
|
||||
for i in range(max_tries):
|
||||
r = SESSION.get(url, params=params, timeout=DEFAULT_TIMEOUT)
|
||||
if r.status_code in (429, 500, 502, 503, 504):
|
||||
rem = r.headers.get("X-Rate-Limit-Remaining")
|
||||
reset = r.headers.get("X-Rate-Limit-Reset")
|
||||
sys.stderr.write(f"[backoff] {r.status_code} remaining={rem} reset={reset} try={i+1}\n")
|
||||
if i == max_tries - 1:
|
||||
r.raise_for_status()
|
||||
time.sleep(delay)
|
||||
delay *= 1.7
|
||||
continue
|
||||
r.raise_for_status()
|
||||
return r
|
||||
raise RuntimeError("unreachable")
|
||||
|
||||
def get_with_pagination(url, params=None):
|
||||
params = dict(params or {})
|
||||
while True:
|
||||
r = retry_get(url, params=params)
|
||||
data = r.json()
|
||||
if isinstance(data, list):
|
||||
for item in data:
|
||||
yield item
|
||||
else:
|
||||
yield data
|
||||
nxt = r.links.get("next", {}).get("url")
|
||||
if not nxt:
|
||||
break
|
||||
url, params = nxt, {}
|
||||
|
||||
# ---------------- Helpers ----------------
|
||||
def iso_to_dt(s):
|
||||
if not s:
|
||||
return None
|
||||
try:
|
||||
s = s.replace("Z", "+00:00") if s.endswith("Z") else s
|
||||
return datetime.fromisoformat(s).astimezone(timezone.utc)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def fmt_utc(dt):
|
||||
return dt.isoformat() if dt else ""
|
||||
|
||||
def get_user_factors(user_id: str):
|
||||
url = f"{USERS_URL}/{user_id}/factors"
|
||||
r = retry_get(url, params={"limit": 200})
|
||||
data = r.json()
|
||||
return data if isinstance(data, list) else []
|
||||
|
||||
def list_users(active_only: bool, include_deprov: bool, all_statuses: bool):
|
||||
"""
|
||||
Build an Okta filter for statuses:
|
||||
- default: ACTIVE only
|
||||
- --all-statuses: everything except DEPROVISIONED (unless --include-deprovisioned)
|
||||
"""
|
||||
if active_only and not all_statuses:
|
||||
filter_str = 'status eq "ACTIVE"'
|
||||
else:
|
||||
# Okta supports OR filters. Include common states; optionally deprovisioned.
|
||||
statuses = ['"ACTIVE"', '"STAGED"', '"PROVISIONED"', '"RECOVERY"', '"PASSWORD_EXPIRED"', '"LOCKED_OUT"']
|
||||
if include_deprov:
|
||||
statuses.append('"DEPROVISIONED"')
|
||||
ors = " or ".join([f"status eq {s}" for s in statuses])
|
||||
filter_str = ors
|
||||
|
||||
params = {"limit": 200, "filter": filter_str}
|
||||
return list(get_with_pagination(USERS_URL, params=params))
|
||||
|
||||
def build_row(user, factors_list):
|
||||
profile = user.get("profile") or {}
|
||||
status = (user.get("status") or "")
|
||||
created = iso_to_dt(user.get("created"))
|
||||
last_login = iso_to_dt(user.get("lastLogin"))
|
||||
last_updated = iso_to_dt(user.get("lastUpdated"))
|
||||
|
||||
# normalize factor names
|
||||
ftypes = []
|
||||
for f in factors_list:
|
||||
ftype = (f or {}).get("factorType") or (f or {}).get("provider")
|
||||
if ftype:
|
||||
ftypes.append(str(ftype).lower())
|
||||
ftypes = sorted(set(ftypes))
|
||||
mfa_enrolled = "Yes" if ftypes else "No"
|
||||
|
||||
return {
|
||||
"id": user.get("id", ""),
|
||||
"login": profile.get("login", ""),
|
||||
"email": profile.get("email", ""),
|
||||
"status": status,
|
||||
"created_utc": fmt_utc(created),
|
||||
"lastLogin_utc": fmt_utc(last_login),
|
||||
"lastUpdated_utc": fmt_utc(last_updated),
|
||||
"mfa_enrolled": mfa_enrolled,
|
||||
"mfa_factors": ",".join(ftypes),
|
||||
"firstName": profile.get("firstName", ""),
|
||||
"lastName": profile.get("lastName", ""),
|
||||
"department": profile.get("department", ""),
|
||||
"title": profile.get("title", ""),
|
||||
}
|
||||
|
||||
# ---------------- Main ----------------
|
||||
def main():
|
||||
ap = argparse.ArgumentParser(description="List accounts NOT enrolled in MFA.")
|
||||
ap.add_argument("--out", default="okta_users_without_mfa.csv", help="Output CSV file")
|
||||
ap.add_argument("--all-statuses", action="store_true",
|
||||
help="Include all statuses (default is ACTIVE only).")
|
||||
ap.add_argument("--include-deprovisioned", action="store_true",
|
||||
help="When --all-statuses is used, also include DEPROVISIONED.")
|
||||
args = ap.parse_args()
|
||||
|
||||
active_only = not args.all_statuses
|
||||
users = list_users(active_only=active_only,
|
||||
include_deprov=args.include_deprovisioned,
|
||||
all_statuses=args.all_statuses)
|
||||
|
||||
rows = []
|
||||
for idx, u in enumerate(users, 1):
|
||||
try:
|
||||
factors = get_user_factors(u.get("id",""))
|
||||
except requests.HTTPError as e:
|
||||
sys.stderr.write(f"Warning: factors fetch failed for {u.get('id')}: {e}\n")
|
||||
factors = []
|
||||
row = build_row(u, factors)
|
||||
if row["mfa_enrolled"] == "No":
|
||||
rows.append(row)
|
||||
if idx % 200 == 0:
|
||||
print(f"Processed {idx} users...")
|
||||
|
||||
fieldnames = ["id","login","email","status","created_utc","lastLogin_utc","lastUpdated_utc",
|
||||
"mfa_enrolled","mfa_factors","firstName","lastName","department","title"]
|
||||
|
||||
with open(args.out, "w", newline="", encoding="utf-8") as f:
|
||||
w = csv.DictWriter(f, fieldnames=fieldnames)
|
||||
w.writeheader()
|
||||
for r in rows:
|
||||
w.writerow(r)
|
||||
|
||||
print(f"Done. Found {len(rows)} user(s) without MFA. Wrote {args.out}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
2
okta_search_logs/okta_users_merged_jared.evans.csv
Normal file
2
okta_search_logs/okta_users_merged_jared.evans.csv
Normal file
@@ -0,0 +1,2 @@
|
||||
firstName,lastName,email,title,organization,wdEmployeeRole,wdJobProfile,status,employeeStatus,wdHireDate,wdTerminated,wdTerminationDate,wdHasAcademicAppointment,wdFutureHire,InstructorKeepActiveTo,wdTerminatedWorkerKeepActiveTo,department,wdJobFamilyGroup,wdEmployeeType,userRole,wdIsWorkerActiveStudent,created,activated,statusChanged,lastLogin,lastUpdated,passwordChanged,ADpwdLastSet,displayName,login,usernameWithoutDomain,secondEmail,status_upper,is_locked_out,created_utc,activated_utc,lastUpdated_utc,statusChanged_utc,deprovisioned_date,lastLockedOut,division,manager,costCenter,userType,ADpwdLastSet_raw,ADpwdLastSet_utc,ADpwdLastSet_days_since,password_last_set_utc,days_since_password_last_set,maxAgeDays_hardcoded,estimated_expire_utc,days_until_expiry,last_successful_signin_utc,days_since_last_successful_signin,passwordExpired,id,mfa_enrolled,mfa_factors
|
||||
Jared,Evans,jared.evans@gallaudet.edu,Information Security Officer,Gallaudet Technology Services,Gallaudet Staff,Senior Information Security Manager,ACTIVE,Active,2017-10-23,FALSE,,FALSE,FALSE,,,Relationship & Project Management,Administration,Regular,Staff,FALSE,2022-06-08T19:09:23.000Z,2022-07-26T20:10:06.000Z,2025-06-30T13:49:29.000Z,2025-11-10T19:34:48.000Z,2025-11-09T17:27:39.000Z,,2025-06-30T13:49:26+0000,Jared Evans,jared.evans@gallaudet.edu,jared.evans,jnevans@gmail.com,ACTIVE,No,2022-06-08T19:09:23+00:00,2022-07-26T20:10:06+00:00,2025-11-09T17:27:39+00:00,2025-06-30T13:49:29+00:00,,,,1003840,SO_00005,,2025-06-30T13:49:26+0000,2025-06-30T13:49:26+00:00,134,2025-06-30T13:49:26+00:00,134,180,2025-12-27T13:49:26+00:00,45,2025-11-10T19:34:48+00:00,0,False,00u1b2ex4bG0fnCGe697,Yes,"push,signed_nonce,sms,token:software:totp,webauthn"
|
||||
|
365
okta_search_logs/show_system_logs_mfa_factors_for_user.py
Normal file
365
okta_search_logs/show_system_logs_mfa_factors_for_user.py
Normal file
@@ -0,0 +1,365 @@
|
||||
#!/home/jevans/audit_reports/okta_system_logs/.venv/bin/python
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import requests
|
||||
from datetime import datetime, timedelta, timezone
|
||||
import csv
|
||||
import json
|
||||
import pytz
|
||||
|
||||
# ---------------- .env loading ----------------
|
||||
_ENV_LINE_RE = re.compile(r'^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=\s*(.*)\s*$')
|
||||
|
||||
def _strip_quotes(val: str) -> str:
|
||||
val = val.strip()
|
||||
if len(val) >= 2 and (val[0] == val[-1]) and val[0] in ("'", '"'):
|
||||
return val[1:-1]
|
||||
return val
|
||||
|
||||
def load_env():
|
||||
"""Load KEY=VALUE pairs from .env in script dir or cwd."""
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
candidates = [os.path.join(script_dir, ".env"), os.path.join(os.getcwd(), ".env")]
|
||||
for path in candidates:
|
||||
if os.path.exists(path):
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#"):
|
||||
continue
|
||||
m = _ENV_LINE_RE.match(line)
|
||||
if not m:
|
||||
continue
|
||||
key, val = m.group(1), _strip_quotes(m.group(2))
|
||||
if key and key not in os.environ:
|
||||
os.environ[key] = val
|
||||
|
||||
load_env()
|
||||
|
||||
# ---------------- Config ----------------
|
||||
OKTA_DOMAIN = os.getenv("OKTA_DOMAIN", "gallaudet.okta.com")
|
||||
API_TOKEN = os.getenv("OKTA_API_TOKEN")
|
||||
LOGS_URL = f"https://{OKTA_DOMAIN}/api/v1/logs"
|
||||
USER_URL = f"https://{OKTA_DOMAIN}/api/v1/users"
|
||||
|
||||
if not API_TOKEN:
|
||||
sys.stderr.write("ERROR: Missing OKTA_API_TOKEN in .env\n")
|
||||
sys.exit(1)
|
||||
|
||||
HEADERS = {
|
||||
"Authorization": f"SSWS {API_TOKEN}",
|
||||
"Accept": "application/json"
|
||||
}
|
||||
|
||||
FIELD_ORDER = [
|
||||
"published","published_EST","actor.displayName","client.ipAddress",
|
||||
"client.geographicalContext.city","client.geographicalContext.state",
|
||||
"client.geographicalContext.country","client.geographicalContext.postalCode",
|
||||
"displayMessage","target.displayName","target.detailEntry",
|
||||
"eventType","outcome.result","outcome.reason",
|
||||
"client.userAgent.rawUserAgent","client.userAgent.os","client.userAgent.browser",
|
||||
"client.device","securityContext.asOrg","securityContext.isp","securityContext.domain",
|
||||
]
|
||||
|
||||
# ---------------- Functions ----------------
|
||||
def fetch_user_by_email(email):
|
||||
params = {"filter": f'profile.email eq "{email}"'}
|
||||
response = requests.get(USER_URL, headers=HEADERS, params=params)
|
||||
response.raise_for_status()
|
||||
users = response.json()
|
||||
return users[0] if users else None
|
||||
|
||||
def flatten_user_info(user):
|
||||
profile = user.get("profile", {})
|
||||
status = user.get("status", "")
|
||||
created = user.get("created", "")
|
||||
last_login = user.get("lastLogin", "")
|
||||
credentials = user.get("credentials", {})
|
||||
provider = credentials.get("provider", {}).get("type", "")
|
||||
return {
|
||||
"id": user.get("id", ""),
|
||||
"status": status,
|
||||
"created": created,
|
||||
"lastLogin": last_login,
|
||||
"login": profile.get("login", ""),
|
||||
"email": profile.get("email", ""),
|
||||
"firstName": profile.get("firstName", ""),
|
||||
"lastName": profile.get("lastName", ""),
|
||||
"displayName": profile.get("displayName", ""),
|
||||
"title": profile.get("title", ""),
|
||||
"department": profile.get("department", ""),
|
||||
"provider": provider,
|
||||
}
|
||||
|
||||
def print_user_info(user_flat):
|
||||
print("\nUser Information")
|
||||
print("-" * 60)
|
||||
for k, v in user_flat.items():
|
||||
print(f"{k}: {v}")
|
||||
print("-" * 60 + "\n")
|
||||
|
||||
def fetch_user_factors(user_id):
|
||||
url = f"{USER_URL}/{user_id}/factors"
|
||||
response = requests.get(url, headers=HEADERS)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
|
||||
def print_user_factors(factors):
|
||||
print("\nUser Enrolled Factors (MFA):")
|
||||
print("-" * 60)
|
||||
filtered = [f for f in factors if f.get('factorType') in ('sms', 'push')]
|
||||
if not filtered:
|
||||
print("No SMS or Push factors enrolled.")
|
||||
else:
|
||||
for f in filtered:
|
||||
factor_type = f.get('factorType')
|
||||
provider = f.get('provider')
|
||||
status = f.get('status')
|
||||
profile = f.get('profile', {})
|
||||
print(f"Type: {factor_type}, Provider: {provider}, Status: {status}")
|
||||
if profile:
|
||||
for k, v in profile.items():
|
||||
if factor_type == 'push' and k == 'keys':
|
||||
continue
|
||||
print(f" {k}: {v}")
|
||||
print()
|
||||
print("-" * 60)
|
||||
|
||||
def fetch_logs(actor_name, limit=100, since=None, until=None):
|
||||
filter_str = f'actor.displayName eq "{actor_name}"'
|
||||
params = {"limit": limit, "sortOrder": "DESCENDING", "filter": filter_str}
|
||||
if since:
|
||||
params["since"] = since
|
||||
if until:
|
||||
params["until"] = until
|
||||
|
||||
logs = []
|
||||
url = LOGS_URL
|
||||
while True:
|
||||
response = requests.get(url, headers=HEADERS, params=params)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
logs.extend(data)
|
||||
links = response.links
|
||||
if "next" in links:
|
||||
url = links["next"]["url"]
|
||||
params = {}
|
||||
else:
|
||||
break
|
||||
if len(logs) >= limit:
|
||||
break
|
||||
return logs[:limit]
|
||||
|
||||
def utc_to_est(utc_ts):
|
||||
if not utc_ts:
|
||||
return ""
|
||||
try:
|
||||
if utc_ts.endswith("Z"):
|
||||
utc_ts = utc_ts[:-1] + "+00:00"
|
||||
dt_utc = datetime.fromisoformat(utc_ts)
|
||||
eastern = pytz.timezone("US/Eastern")
|
||||
dt_est = dt_utc.astimezone(eastern)
|
||||
return dt_est.strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
|
||||
def flatten_log_event(event):
|
||||
actor = event.get("actor") or {}
|
||||
client = event.get("client") or {}
|
||||
user_agent = client.get("userAgent") or {}
|
||||
geo = client.get("geographicalContext") or {}
|
||||
outcome = event.get("outcome") or {}
|
||||
security = event.get("securityContext") or {}
|
||||
targets = event.get("target") or []
|
||||
published = event.get("published", "")
|
||||
published_est = utc_to_est(published)
|
||||
target_display_names = ", ".join([t.get("displayName", "") for t in targets])
|
||||
target_details = []
|
||||
for t in targets:
|
||||
if t.get("detailEntry"):
|
||||
target_details.append(json.dumps(t["detailEntry"], ensure_ascii=False))
|
||||
else:
|
||||
target_details.append("")
|
||||
target_detail_entries = " | ".join(target_details)
|
||||
return {
|
||||
"published": published,"published_EST": published_est,
|
||||
"actor.displayName": actor.get("displayName", ""),
|
||||
"client.ipAddress": client.get("ipAddress", ""),
|
||||
"client.geographicalContext.city": geo.get("city", ""),
|
||||
"client.geographicalContext.state": geo.get("state", ""),
|
||||
"client.geographicalContext.country": geo.get("country", ""),
|
||||
"client.geographicalContext.postalCode": geo.get("postalCode", ""),
|
||||
"displayMessage": event.get("displayMessage", ""),
|
||||
"target.displayName": target_display_names,
|
||||
"target.detailEntry": target_detail_entries,
|
||||
"eventType": event.get("eventType", ""),
|
||||
"outcome.result": outcome.get("result", ""),
|
||||
"outcome.reason": outcome.get("reason", ""),
|
||||
"client.userAgent.rawUserAgent": user_agent.get("rawUserAgent", ""),
|
||||
"client.userAgent.os": user_agent.get("os", ""),
|
||||
"client.userAgent.browser": user_agent.get("browser", ""),
|
||||
"client.device": client.get("device", ""),
|
||||
"securityContext.asOrg": security.get("asOrg", ""),
|
||||
"securityContext.isp": security.get("isp", ""),
|
||||
"securityContext.domain": security.get("domain", ""),
|
||||
}
|
||||
|
||||
def print_log_event(event):
|
||||
published = event.get("published", "N/A")
|
||||
published_est = utc_to_est(published)
|
||||
print(f"published: {published}")
|
||||
print(f"published_EST: {published_est}")
|
||||
actor = event.get("actor") or {}
|
||||
print(f"actor.displayName: {actor.get('displayName', 'N/A')}")
|
||||
client = event.get("client") or {}
|
||||
print(f"client.ipAddress: {client.get('ipAddress', 'N/A')}")
|
||||
geo = client.get("geographicalContext") or {}
|
||||
print(f"client.geographicalContext.city: {geo.get('city', 'N/A')}")
|
||||
print(f"client.geographicalContext.state: {geo.get('state', 'N/A')}")
|
||||
print(f"client.geographicalContext.country: {geo.get('country', 'N/A')}")
|
||||
print(f"client.geographicalContext.postalCode: {geo.get('postalCode', 'N/A')}")
|
||||
print(f"displayMessage: {event.get('displayMessage', 'N/A')}")
|
||||
targets = event.get("target") or []
|
||||
print(f"target.displayName: {', '.join([t.get('displayName', '') for t in targets])}")
|
||||
print(f"target.detailEntry: {' | '.join([json.dumps(t['detailEntry'], ensure_ascii=False) if t.get('detailEntry') else '' for t in targets])}")
|
||||
print(f"eventType: {event.get('eventType', 'N/A')}")
|
||||
outcome = event.get("outcome") or {}
|
||||
print(f"outcome.result: {outcome.get('result', 'N/A')}")
|
||||
print(f"outcome.reason: {outcome.get('reason', 'N/A')}")
|
||||
user_agent = client.get("userAgent") or {}
|
||||
print(f"client.userAgent.rawUserAgent: {user_agent.get('rawUserAgent', 'N/A')}")
|
||||
print(f"client.userAgent.os: {user_agent.get('os', 'N/A')}")
|
||||
print(f"client.userAgent.browser: {user_agent.get('browser', 'N/A')}")
|
||||
print(f"client.device: {client.get('device', 'N/A')}")
|
||||
security = event.get("securityContext") or {}
|
||||
print(f"securityContext.asOrg: {security.get('asOrg', 'N/A')}")
|
||||
print(f"securityContext.isp: {security.get('isp', 'N/A')}")
|
||||
print(f"securityContext.domain: {security.get('domain', 'N/A')}")
|
||||
print("-" * 60)
|
||||
|
||||
def print_mfa_enrollments_and_removals(logs):
|
||||
print("\nRecent MFA Enrollment and Removal Events (Last 90 Days):")
|
||||
print("-" * 60)
|
||||
found = False
|
||||
for event in logs:
|
||||
etype = event.get("eventType", "")
|
||||
if etype in ["user.mfa.factor.activate", "user.mfa.factor.deactivate"]:
|
||||
found = True
|
||||
published = event.get("published", "")
|
||||
published_est = utc_to_est(published)
|
||||
actor = event.get("actor", {}).get("displayName", "")
|
||||
display_message = event.get("displayMessage", "")
|
||||
target_names = ", ".join([t.get("displayName", "") for t in event.get("target", [])])
|
||||
print(f"{published_est} | {etype.replace('user.mfa.factor.','').capitalize()} | By: {actor} | Target: {target_names}")
|
||||
print(f" {display_message}\n")
|
||||
if not found:
|
||||
print("No recent MFA enrollment/removal events found.")
|
||||
print("-" * 60)
|
||||
|
||||
def print_failed_and_suspicious_events(logs):
|
||||
print("\nFailed Logins and Suspicious Events (Last 90 Days):")
|
||||
print("-" * 60)
|
||||
suspicious_types = [
|
||||
"user.authentication.failed","user.account.lock","user.authentication.sso.failed",
|
||||
"user.mfa.verification.failed","security.policy.violation","user.session.impersonation.initiate"
|
||||
]
|
||||
found = False
|
||||
for event in logs:
|
||||
etype = event.get("eventType", "")
|
||||
if etype in suspicious_types:
|
||||
found = True
|
||||
published = event.get("published", "")
|
||||
published_est = utc_to_est(published)
|
||||
actor = event.get("actor", {}).get("displayName", "")
|
||||
display_message = event.get("displayMessage", "")
|
||||
client = event.get("client", {}) or {}
|
||||
ip = client.get("ipAddress", "")
|
||||
geo = client.get("geographicalContext", {}) or {}
|
||||
city = geo.get("city", "")
|
||||
country = geo.get("country", "")
|
||||
user_agent = (client.get("userAgent", {}) or {}).get("rawUserAgent", "")
|
||||
print(f"{published_est} | {etype} | By: {actor} | IP: {ip} | {city}, {country}")
|
||||
print(f" UA: {user_agent}")
|
||||
print(f" {display_message}\n")
|
||||
if not found:
|
||||
print("No failed logins or suspicious events found.")
|
||||
print("-" * 60)
|
||||
|
||||
def sanitize_filename(s):
|
||||
return re.sub(r'[^A-Za-z0-9]+', '_', s.strip()) or "output"
|
||||
|
||||
# ---------------- Main ----------------
|
||||
def main():
|
||||
username = input("Enter the user's name (e.g., jared.evans): ").strip()
|
||||
email = username if '@' in username else f"{username}@gallaudet.edu"
|
||||
print("Retrieving user info from Okta...")
|
||||
user_obj = fetch_user_by_email(email)
|
||||
if not user_obj:
|
||||
print(f"No user found for email '{email}'. Exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
user_flat = flatten_user_info(user_obj)
|
||||
display_name = user_obj.get("profile", {}).get("displayName", "")
|
||||
if not display_name:
|
||||
print("Could not find displayName for this user. Exiting.")
|
||||
sys.exit(1)
|
||||
|
||||
# Prompt for user-selected system log window
|
||||
while True:
|
||||
try:
|
||||
limit = int(input("How many results do you want to return? (1-1000): ").strip())
|
||||
if 1 <= limit <= 1000:
|
||||
break
|
||||
print("Please enter a number between 1 and 1000.")
|
||||
except ValueError:
|
||||
print("Please enter a valid integer.")
|
||||
|
||||
while True:
|
||||
try:
|
||||
days = int(input("How many days back do you want to search for general events? (1-90): ").strip())
|
||||
if 1 <= days <= 90:
|
||||
break
|
||||
print("Please enter a number between 1 and 90.")
|
||||
except ValueError:
|
||||
print("Please enter a valid integer.")
|
||||
|
||||
until_time = datetime.now(timezone.utc)
|
||||
since_time = until_time - timedelta(days=days)
|
||||
since_iso = since_time.isoformat().replace("+00:00", "Z")
|
||||
until_iso = until_time.isoformat().replace("+00:00", "Z")
|
||||
print(f"Fetching logs from {since_iso} to {until_iso} for actor.displayName eq \"{display_name}\"...")
|
||||
logs = fetch_logs(actor_name=display_name, limit=limit, since=since_iso, until=until_iso)
|
||||
print(f"Retrieved {len(logs)} log events:\n")
|
||||
|
||||
flat_rows = []
|
||||
for event in logs:
|
||||
print_log_event(event)
|
||||
flat_rows.append(flatten_log_event(event))
|
||||
|
||||
# Always fetch 90 days for special events
|
||||
special_until_time = datetime.now(timezone.utc)
|
||||
special_since_time = special_until_time - timedelta(days=90)
|
||||
logs_90d = fetch_logs(actor_name=display_name, limit=1000,
|
||||
since=special_since_time.isoformat().replace("+00:00", "Z"),
|
||||
until=special_until_time.isoformat().replace("+00:00", "Z"))
|
||||
|
||||
print_user_info(user_flat)
|
||||
user_id = user_obj["id"]
|
||||
factors = fetch_user_factors(user_id)
|
||||
print_user_factors(factors)
|
||||
print_mfa_enrollments_and_removals(logs_90d)
|
||||
print_failed_and_suspicious_events(logs_90d)
|
||||
|
||||
if logs:
|
||||
safe_actor = sanitize_filename(display_name)
|
||||
filename = f"{safe_actor}_results.csv"
|
||||
with open(filename, "w", newline='', encoding="utf-8") as f:
|
||||
writer = csv.DictWriter(f, fieldnames=FIELD_ORDER, extrasaction="ignore")
|
||||
writer.writeheader()
|
||||
for row in flat_rows:
|
||||
writer.writerow(row)
|
||||
print(f"\nResults saved to: {filename}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
130
okta_search_logs/show_values_from_ad.py
Normal file
130
okta_search_logs/show_values_from_ad.py
Normal file
@@ -0,0 +1,130 @@
|
||||
#!/usr/bin/env python3
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
from urllib.parse import quote
|
||||
|
||||
# ---------------- .env loading (KEY=VALUE; quotes supported) ----------------
|
||||
def _strip_quotes(val: str) -> str:
|
||||
val = val.strip()
|
||||
if len(val) >= 2 and (val[0] == val[-1]) and val[0] in ("'", '"'):
|
||||
return val[1:-1]
|
||||
return val
|
||||
|
||||
def _load_env_file(path: str) -> None:
|
||||
if not os.path.exists(path):
|
||||
return
|
||||
with open(path, "r", encoding="utf-8") as f:
|
||||
for raw in f:
|
||||
s = raw.strip()
|
||||
if not s or s.startswith("#"):
|
||||
continue
|
||||
if "=" not in s:
|
||||
continue
|
||||
k, v = s.split("=", 1)
|
||||
k = k.strip()
|
||||
v = _strip_quotes(v)
|
||||
if k and k not in os.environ:
|
||||
os.environ[k] = v
|
||||
|
||||
def load_env():
|
||||
"""Load .env from script dir then CWD; no 'export' needed."""
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
for p in (os.path.join(script_dir, ".env"), os.path.join(os.getcwd(), ".env")):
|
||||
_load_env_file(p)
|
||||
|
||||
load_env()
|
||||
|
||||
# ---------------- Config ----------------
|
||||
OKTA_DOMAIN = os.getenv("OKTA_DOMAIN") # e.g., "gallaudet.okta.com"
|
||||
API_TOKEN = os.getenv("OKTA_API_TOKEN") # required
|
||||
APP_ID = os.getenv("OKTA_APP_ID") # required: the target Okta appId
|
||||
DEFAULT_EMAIL_DOMAIN = os.getenv("DEFAULT_EMAIL_DOMAIN", "gallaudet.edu")
|
||||
|
||||
if not (OKTA_DOMAIN and API_TOKEN and APP_ID):
|
||||
sys.stderr.write(
|
||||
"ERROR: Missing required settings. Ensure your .env contains:\n"
|
||||
" OKTA_DOMAIN=\"gallaudet.okta.com\"\n"
|
||||
" OKTA_API_TOKEN=\"xxxxx\"\n"
|
||||
" OKTA_APP_ID=\"0oa...\"\n"
|
||||
"Optional:\n"
|
||||
" DEFAULT_EMAIL_DOMAIN=\"gallaudet.edu\"\n"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
BASE_URL = f"https://{OKTA_DOMAIN}"
|
||||
USERS_URL = f"{BASE_URL}/api/v1/users"
|
||||
APPS_URL = f"{BASE_URL}/api/v1/apps"
|
||||
|
||||
# ---------------- HTTP helper ----------------
|
||||
def req(method, url, **kw):
|
||||
headers = kw.pop("headers", {})
|
||||
headers["Authorization"] = f"SSWS {API_TOKEN}"
|
||||
headers["Accept"] = "application/json"
|
||||
return requests.request(method, url, headers=headers, timeout=15, **kw)
|
||||
|
||||
# ---------------- Args ----------------
|
||||
if len(sys.argv) != 2:
|
||||
print(f"Usage: {sys.argv[0]} <USER_ID_or_LOGIN>")
|
||||
print("Examples:")
|
||||
print(f" {sys.argv[0]} 00u1abcdE2FGHIJKL3p4")
|
||||
print(f" {sys.argv[0]} jared.evans@gallaudet.edu")
|
||||
print(f" {sys.argv[0]} jared.evans # will append @{DEFAULT_EMAIL_DOMAIN}")
|
||||
sys.exit(1)
|
||||
|
||||
user_arg = sys.argv[1]
|
||||
|
||||
def normalize_login(s: str) -> str:
|
||||
return s if "@" in s else f"{s}@{DEFAULT_EMAIL_DOMAIN}"
|
||||
|
||||
# ---------------- Okta helpers ----------------
|
||||
def find_user_id_by_login(login: str):
|
||||
r = req("GET", USERS_URL, params={"filter": f'profile.login eq "{login}"', "limit": "1"})
|
||||
if r.status_code != 200:
|
||||
raise RuntimeError(f"User lookup error {r.status_code}: {r.text}")
|
||||
data = r.json()
|
||||
if isinstance(data, list) and data:
|
||||
return data[0].get("id"), data[0]
|
||||
return None, None
|
||||
|
||||
def get_app_user_assignment(app_id: str, user_id: str) -> requests.Response:
|
||||
url = f"{APPS_URL}/{quote(app_id)}/users/{quote(user_id)}"
|
||||
return req("GET", url)
|
||||
|
||||
# ---------------- Main flow ----------------
|
||||
# Resolve user id if needed
|
||||
if user_arg.startswith("00u"): # looks like an Okta user id
|
||||
user_id = user_arg
|
||||
resolved_login = None
|
||||
else:
|
||||
login = normalize_login(user_arg)
|
||||
user_id, user_obj = find_user_id_by_login(login)
|
||||
if not user_id:
|
||||
print(f"User not found for login '{login}'.")
|
||||
sys.exit(1)
|
||||
resolved_login = login
|
||||
print(f"Resolved login '{login}' to Okta user id: {user_id}")
|
||||
|
||||
# Fetch assignment for the fixed app
|
||||
resp = get_app_user_assignment(APP_ID, user_id)
|
||||
|
||||
if resp.status_code == 200:
|
||||
print(json.dumps(resp.json(), indent=2))
|
||||
sys.exit(0)
|
||||
|
||||
# Helpful diagnostics
|
||||
if resp.status_code == 404:
|
||||
# 404 can mean: user not assigned to app, bad app id, or masked permission issue
|
||||
app_check = req("GET", f"{APPS_URL}/{quote(APP_ID)}")
|
||||
if app_check.status_code == 404:
|
||||
print(f"App not found: appId '{APP_ID}'.")
|
||||
elif app_check.status_code == 200:
|
||||
who = resolved_login or user_id
|
||||
print(f"User '{who}' (id {user_id}) is likely NOT assigned to app '{APP_ID}', or you lack permission.")
|
||||
else:
|
||||
print(f"Assignment 404; app check returned {app_check.status_code}: {app_check.text}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Error {resp.status_code}: {resp.text}")
|
||||
sys.exit(1)
|
||||
Binary file not shown.
326
workday_monthly_admin_activities_audit/bkup-process.py
Normal file
326
workday_monthly_admin_activities_audit/bkup-process.py
Normal file
@@ -0,0 +1,326 @@
|
||||
# process.py
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import hashlib
|
||||
from collections import Counter, defaultdict
|
||||
|
||||
import requests
|
||||
import pandas as pd
|
||||
import spacy
|
||||
from openai import OpenAI
|
||||
from tqdm import tqdm
|
||||
|
||||
# =========================
|
||||
# Configuration
|
||||
# =========================
|
||||
#DEFAULT_LM_IP = "192.168.1.221" # default LM Studio host (without /v1)
|
||||
DEFAULT_LM_IP = "10.81.209.99" # default LM Studio host (without /v1)
|
||||
LLM_MODEL = "openai/gpt-oss-20b"
|
||||
LLM_API_KEY = "not-needed" # LM Studio typically doesn't require an API key
|
||||
|
||||
INPUT_CSV = "test.csv"
|
||||
OUTPUT_CSV = "test_with_names.csv"
|
||||
EVENT_LOG = "event_log.txt"
|
||||
|
||||
# Columns to process
|
||||
SOURCE_COL_1 = "Instance that Changed"
|
||||
TARGET_COL_1 = "Applied to"
|
||||
|
||||
SOURCE_COL_2 = "Added"
|
||||
TARGET_COL_2 = "Added Applied to"
|
||||
|
||||
ENTERED_COL = "Entered On"
|
||||
ENTERED_MMDD_COL = "Entered On (MM/DD)"
|
||||
|
||||
# Values to ignore entirely (case-insensitive)
|
||||
AUTO_STRINGS = {"automatic complete"}
|
||||
|
||||
def is_auto(val) -> bool:
|
||||
return isinstance(val, str) and val.strip().lower() in AUTO_STRINGS
|
||||
|
||||
# Regex helpers
|
||||
DELIM_SPLIT = re.compile(r"\s*[\/|\-–—]\s*")
|
||||
KEEP_CHARS = re.compile(r"[^A-Za-zÀ-ÖØ-öø-ÿ' .\-]")
|
||||
|
||||
def clean_person(text: str) -> str:
|
||||
"""Clean extracted name by removing job codes/fragments after dashes/slashes; keep name-ish chars."""
|
||||
if not text:
|
||||
return ""
|
||||
first = DELIM_SPLIT.split(text, maxsplit=1)[0]
|
||||
first = KEEP_CHARS.sub("", first).strip()
|
||||
return re.sub(r"\s{2,}", " ", first)
|
||||
|
||||
# =========================
|
||||
# LM Studio reachability
|
||||
# =========================
|
||||
def check_lmstudio(ip: str) -> str:
|
||||
"""
|
||||
Ensure LM Studio endpoint is reachable; if not, prompt for IP until it is.
|
||||
Returns the validated base URL like "http://<ip>:1234/v1".
|
||||
"""
|
||||
def _ok(url: str) -> bool:
|
||||
try:
|
||||
r = requests.get(url.rstrip("/") + "/models", timeout=5)
|
||||
return r.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
base_url = f"http://{ip}:1234/v1"
|
||||
if _ok(base_url):
|
||||
print(f"✅ LM Studio reachable at {base_url}")
|
||||
return base_url
|
||||
|
||||
print(f"❌ Could not reach LM Studio at {base_url}")
|
||||
while True:
|
||||
new_ip = input("Enter LM Studio IP address (e.g. 192.168.1.221): ").strip()
|
||||
if not new_ip:
|
||||
print("Aborted: No IP provided.")
|
||||
sys.exit(1)
|
||||
base_url = f"http://{new_ip}:1234/v1"
|
||||
print(f"🔍 Retesting {base_url}...")
|
||||
if _ok(base_url):
|
||||
print(f"✅ LM Studio reachable at {base_url}")
|
||||
return base_url
|
||||
else:
|
||||
print("❌ Still unreachable. Try again or Ctrl+C to exit.")
|
||||
|
||||
# Perform reachability check BEFORE any processing
|
||||
LLM_BASE_URL = check_lmstudio(DEFAULT_LM_IP)
|
||||
client = OpenAI(base_url=LLM_BASE_URL, api_key=LLM_API_KEY)
|
||||
|
||||
# =========================
|
||||
# spaCy model (Transformer)
|
||||
# =========================
|
||||
print("🔍 Loading spaCy transformer model: en_core_web_trf")
|
||||
nlp = spacy.load(
|
||||
"en_core_web_trf",
|
||||
exclude=["parser", "tagger", "attribute_ruler", "lemmatizer", "morphologizer"],
|
||||
)
|
||||
print("✅ spaCy model loaded successfully.")
|
||||
|
||||
def extract_names(text: str) -> str:
|
||||
"""Extract distinct PERSON names using spaCy Transformer model."""
|
||||
if not isinstance(text, str) or not text.strip():
|
||||
return ""
|
||||
doc = nlp(text)
|
||||
names, seen = [], set()
|
||||
for ent in doc.ents:
|
||||
if ent.label_ == "PERSON":
|
||||
cleaned = clean_person(ent.text)
|
||||
key = cleaned.lower()
|
||||
if cleaned and key not in seen:
|
||||
seen.add(key)
|
||||
names.append(cleaned)
|
||||
return ", ".join(names)
|
||||
|
||||
def insert_after(df: pd.DataFrame, after_col: str, new_col: str, values: pd.Series) -> None:
|
||||
"""Insert new_col immediately after after_col (drop existing if present)."""
|
||||
if new_col in df.columns:
|
||||
df.drop(columns=[new_col], inplace=True)
|
||||
idx = df.columns.get_loc(after_col) + 1
|
||||
df.insert(idx, new_col, values)
|
||||
|
||||
def dataframe_to_compact_event(df: pd.DataFrame) -> str:
|
||||
"""Compact JSON payload for a grouped event (keeps unique values per column)."""
|
||||
def uniq(col):
|
||||
return sorted([v for v in df[col].dropna().unique().tolist()]) if col in df else []
|
||||
payload = {
|
||||
"applied_to": uniq(TARGET_COL_1),
|
||||
"by_user": uniq("By User"),
|
||||
"in_transaction": uniq("In Transaction"),
|
||||
"entered_on": uniq(ENTERED_COL),
|
||||
"dates_mmdd": uniq(ENTERED_MMDD_COL),
|
||||
"instances": uniq(SOURCE_COL_1),
|
||||
"added": uniq(SOURCE_COL_2),
|
||||
"row_count": int(len(df)),
|
||||
}
|
||||
return json.dumps(payload, ensure_ascii=False, indent=2)
|
||||
|
||||
# =========================
|
||||
# Main flow
|
||||
# =========================
|
||||
|
||||
# If processed CSV already exists, skip straight to summarization
|
||||
if os.path.exists(OUTPUT_CSV):
|
||||
print(f"⚡ Skipping CSV processing — {OUTPUT_CSV} already exists.")
|
||||
df = pd.read_csv(OUTPUT_CSV)
|
||||
# Ensure MM/DD exists (for old CSVs)
|
||||
if ENTERED_MMDD_COL not in df.columns and ENTERED_COL in df.columns:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], errors="coerce")
|
||||
df[ENTERED_MMDD_COL] = ts.dt.strftime("%m/%d").fillna("")
|
||||
else:
|
||||
print("⚙️ Processing CSV to extract names and generate output...")
|
||||
|
||||
# Load CSV
|
||||
df = pd.read_csv(INPUT_CSV)
|
||||
|
||||
# Derive Entered On (MM/DD)
|
||||
if ENTERED_COL in df.columns:
|
||||
try:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], format="mixed", errors="coerce")
|
||||
except TypeError:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], errors="coerce")
|
||||
df[ENTERED_MMDD_COL] = ts.dt.strftime("%m/%d").fillna("")
|
||||
else:
|
||||
df[ENTERED_MMDD_COL] = ""
|
||||
|
||||
# Live progress counters for names across both columns
|
||||
name_counter = Counter()
|
||||
|
||||
def _process_series_with_progress(series: pd.Series, desc: str) -> pd.Series:
|
||||
"""Iterate with progress, update name_counter, and return extracted names Series."""
|
||||
values = series.fillna("").astype(str).tolist()
|
||||
out = []
|
||||
total = len(values)
|
||||
if total == 0:
|
||||
return pd.Series([], dtype=object)
|
||||
step = max(10, total // 20) # update ~every 5% (at least every 10 rows)
|
||||
pbar = tqdm(values, desc=f"NER: {desc}", leave=True)
|
||||
for i, text in enumerate(pbar, start=1):
|
||||
names = extract_names(text)
|
||||
# Update running totals (ignore "Automatic Complete")
|
||||
for n in [x.strip() for x in names.split(",") if x.strip()]:
|
||||
if n.lower() not in AUTO_STRINGS:
|
||||
name_counter[n] += 1
|
||||
out.append(names)
|
||||
# Periodic status refresh
|
||||
if i % step == 0 or i == total:
|
||||
top = ", ".join(f"{n}:{c}" for n, c in name_counter.most_common(3))
|
||||
pbar.set_postfix_str(f"unique={len(name_counter)} top=[{top}]")
|
||||
return pd.Series(out, index=series.index, dtype=object)
|
||||
|
||||
# 1) Extract from "Instance that Changed" -> "Applied to"
|
||||
if SOURCE_COL_1 in df.columns:
|
||||
applied_series = _process_series_with_progress(df[SOURCE_COL_1], SOURCE_COL_1)
|
||||
insert_after(df, SOURCE_COL_1, TARGET_COL_1, applied_series)
|
||||
else:
|
||||
df[TARGET_COL_1] = ""
|
||||
|
||||
# 1a) Simplified quick-fill:
|
||||
# If "Applied to" has a value, always copy it to "Added Applied to"
|
||||
if SOURCE_COL_2 in df.columns:
|
||||
if TARGET_COL_2 not in df.columns:
|
||||
df[TARGET_COL_2] = ""
|
||||
for i, row in df.iterrows():
|
||||
name = str(row.get(TARGET_COL_1, "")).strip()
|
||||
aat = str(row.get(TARGET_COL_2, "")).strip()
|
||||
if name and not aat:
|
||||
df.at[i, TARGET_COL_2] = name
|
||||
else:
|
||||
df[TARGET_COL_2] = ""
|
||||
|
||||
# 2) Extract from "Added" -> "Added Applied to" (skip rows with value already set OR empty Added)
|
||||
if SOURCE_COL_2 in df.columns:
|
||||
mask_need = (df[TARGET_COL_2].fillna("").str.strip() == "") & (df[SOURCE_COL_2].fillna("").str.strip() != "")
|
||||
idxs = df.index[mask_need].tolist()
|
||||
if idxs:
|
||||
values = df.loc[idxs, SOURCE_COL_2]
|
||||
pbar = tqdm(values.tolist(), desc=f"NER: {SOURCE_COL_2} (remaining)", leave=True)
|
||||
extracted = []
|
||||
for text in pbar:
|
||||
names = extract_names(text)
|
||||
# update counter (ignore "Automatic Complete")
|
||||
for n in [x.strip() for x in names.split(",") if x.strip()]:
|
||||
if n.lower() not in AUTO_STRINGS:
|
||||
name_counter[n] += 1
|
||||
extracted.append(names)
|
||||
df.loc[idxs, TARGET_COL_2] = extracted
|
||||
|
||||
# --- Remove any rows that are purely "Automatic Complete" in key fields ---
|
||||
for col in [SOURCE_COL_1, SOURCE_COL_2, "In Transaction"]:
|
||||
if col in df.columns:
|
||||
df = df[~df[col].apply(is_auto)]
|
||||
|
||||
# --- Keep only selected columns (incl. MM/DD) ---
|
||||
keep_cols = [
|
||||
SOURCE_COL_1,
|
||||
TARGET_COL_1,
|
||||
"In Transaction",
|
||||
SOURCE_COL_2,
|
||||
TARGET_COL_2,
|
||||
"By User",
|
||||
ENTERED_COL,
|
||||
ENTERED_MMDD_COL,
|
||||
]
|
||||
df = df[[c for c in keep_cols if c in df.columns]]
|
||||
|
||||
# --- Filter rows: keep where Applied to == Added Applied to (case-insensitive) ---
|
||||
if TARGET_COL_1 in df.columns and TARGET_COL_2 in df.columns:
|
||||
df = df[
|
||||
df[TARGET_COL_1].fillna("").str.strip().str.lower()
|
||||
== df[TARGET_COL_2].fillna("").str.strip().str.lower()
|
||||
]
|
||||
|
||||
# --- Drop duplicates & save overall result ---
|
||||
df = df.drop_duplicates().reset_index(drop=True)
|
||||
df.to_csv(OUTPUT_CSV, index=False)
|
||||
print(f"✅ Saved {len(df)} unique matching rows to {OUTPUT_CSV}")
|
||||
|
||||
# =========================
|
||||
# LM Studio event summary generation (group by By User, then date asc)
|
||||
# =========================
|
||||
if not df.empty:
|
||||
grouped = df.groupby([TARGET_COL_1, "By User", ENTERED_COL], dropna=False)
|
||||
summaries = [] # list of tuples (by_user, mmdd, sentence)
|
||||
|
||||
for keys, gdf in grouped:
|
||||
applied_to, by_user, entered_on = keys
|
||||
if not applied_to or str(applied_to).strip() == "":
|
||||
continue
|
||||
|
||||
mmdd_vals = gdf[ENTERED_MMDD_COL].dropna().astype(str)
|
||||
mmdd = next((v for v in mmdd_vals if v.strip()), "")
|
||||
|
||||
payload = dataframe_to_compact_event(gdf)
|
||||
|
||||
prompt = (
|
||||
"You are a compliance and information security analyst. "
|
||||
"Given the following grouped audit data, produce ONE clear and concise sentence summarizing the event. "
|
||||
"Include: (1) who performed the action (By User, include name and ID if available), "
|
||||
"(2) who the change applied to (Applied to), "
|
||||
"(3) the full list of role names that were assigned or added (from 'Instance that Changed' and 'Added'), "
|
||||
"and (4) the date of the event. "
|
||||
"Always mention the specific role titles exactly as shown in the data. "
|
||||
"If multiple roles were assigned, list them all in a natural phrase like "
|
||||
"'assigned the A, B, and C roles'. "
|
||||
"Do not include raw JSON, extra commentary, or line breaks. Return only one sentence.\n\n"
|
||||
f"Audit Data (JSON):\n{payload}"
|
||||
)
|
||||
|
||||
try:
|
||||
resp = client.chat.completions.create(
|
||||
model=LLM_MODEL,
|
||||
messages=[
|
||||
{"role": "system", "content": "You write terse, clear compliance summaries."},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
temperature=0.2,
|
||||
)
|
||||
one_liner = (resp.choices[0].message.content or "").strip()
|
||||
except Exception as e:
|
||||
one_liner = f"[LLM ERROR] {e}"
|
||||
|
||||
summaries.append((by_user or "Unknown User", mmdd, one_liner))
|
||||
|
||||
# Group by By User, sort each user's entries by mm/dd asc, write file (OVERWRITE)
|
||||
grouped_summaries: dict[str, list[tuple[str, str]]] = defaultdict(list)
|
||||
for by_user, mmdd, line in summaries:
|
||||
grouped_summaries[by_user].append((mmdd, line))
|
||||
|
||||
for user in grouped_summaries:
|
||||
grouped_summaries[user].sort(key=lambda x: x[0] or "")
|
||||
|
||||
with open(EVENT_LOG, "w", encoding="utf-8") as f:
|
||||
for user in sorted(grouped_summaries.keys()):
|
||||
f.write(f"=== {user} ===\n")
|
||||
for mmdd, line in grouped_summaries[user]:
|
||||
prefix = f"{mmdd} - " if mmdd else ""
|
||||
f.write(f"{prefix}{line}\n")
|
||||
f.write("\n")
|
||||
|
||||
total_events = sum(len(v) for v in grouped_summaries.values())
|
||||
print(f"📝 Overwrote {EVENT_LOG} with {total_events} grouped event summaries")
|
||||
else:
|
||||
print("ℹ️ No matching rows found; nothing to summarize.")
|
||||
91
workday_monthly_admin_activities_audit/event_log.txt
Normal file
91
workday_monthly_admin_activities_audit/event_log.txt
Normal file
@@ -0,0 +1,91 @@
|
||||
=== 1000054 / Dennis Cregan ===
|
||||
11/03 - By Dennis Cregan assigned the User-Based Group Change - Event Lite Type role to Carmen Huang on 11/3/25.
|
||||
11/03 - By Dennis Cregan, applied to Carmen Huang and Oona Julien, assigned no roles on 11/3/25.
|
||||
11/03 - By Dennis Cregan assigned the User-Based Group Change - Event Lite Type role to Oona Julien on 11/3/25.
|
||||
11/05 - By Dennis Cregan applied the change to Catherine Wukitsch on 11/5/25.
|
||||
11/05 - By Dennis Cregan, the PSID role was assigned to PSID on 11/5/25.
|
||||
11/07 - By Dennis Cregan (1000054) applied to Mariana Arroyo Chavez, assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: Mariana Arroyo Chavez (Private), English (United States), and Mariana Arroyo Chavez (Private) roles on 11/07/25.
|
||||
11/07 - By Dennis Cregan assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: pherzog-impl / Petra Herzog, Successfully Completed, and pherzog-impl / Petra Herzog roles to Petra Herzog on 11/07/25.
|
||||
11/10 - By Dennis Cregan (1000054) assigned the Edit Workday Account, Edit Workday Account (Default Definition), and Preferences for Colton Crace roles to Colton Crace on 11/10/25.
|
||||
11/11 - Dennis Cregan (ID 1000054) assigned Katie Peardon the Approval by Integration Administrator, Background Process Launch: Student Prospect Update Event (Default Definition) step b - Batch/Job, and Edit Other IDs roles on 11/11/25.
|
||||
11/11 - By User 1000054 / Dennis Cregan, the PSID role was assigned to PSID on 11/11/25.
|
||||
11/12 - By user 1000054 / Dennis Cregan, the event applied to Isabella Alberti on 11/12/25 with no roles assigned.
|
||||
11/12 - By Dennis Cregan assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: Isabella Alberti, Isabella Alberti, and Successfully Completed roles to Isabella Alberti on 11/12/25.
|
||||
11/12 - Dennis Cregan (1000054) performed an Edit Other IDs transaction on Isabella Alberti, assigning no roles, on 11/12/25.
|
||||
11/12 - By Dennis Cregan, applied to Jody Williams, no roles were assigned on 11/12/25.
|
||||
11/12 - By Dennis Cregan assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: Jody Williams, Jody Williams, Preferences for Jody Williams, Successfully Completed, and User Notification Settings For System User: Jody Williams roles to Jody Williams on 11/12/25.
|
||||
11/18 - By Dennis Cregan applied the change to Brian Cheslik on 11/18/25.
|
||||
11/18 - By user 1000054 /Dennis Cregan assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: Brian Cheslik, Preferences for Brian Cheslik, and User Notification Settings For System User: Brian Chesik roles to Brian Cheslik on 11/18/25.
|
||||
|
||||
=== 1003153 / Joseph DeSiervi ===
|
||||
11/12 - By User Joseph DeSiervi (1003153) assigned the Deny and Financial Aid Administrator - UBSG - U roles to Deny on 11/12/25.
|
||||
11/12 - By User Joseph DeSiervi (ID 1003153) applied to Deny, assigned the Deny role on 11/12/25.
|
||||
11/13 - By User 1003153 / Joseph DeSiervi assigned the 1004304 / John Huang, GU Testing Proxy Access1, and User-Based Group Change - Event Lite Type on 11/13/2025, 6:06:23.595 AM roles to John Huang on 11/13/25.
|
||||
|
||||
=== 1003966 / Kristinn Bjarnason ===
|
||||
11/06 - By User 1003966 / Kristinn Bjarnason assigned the Marketing Manager, Student Finance Student Worker, GU Student Worker, and SFS Student Assistant roles to Amanda Gerson on 11/6/25.
|
||||
11/06 - By user 1003966 / Kristinn Bjarnason assigned the P007183 SFS Student Assistant role to Evan Stromberg on 11/6/25.
|
||||
11/06 - Kristinn Bjarnason (ID 1003966) added roles to Kristinn Bjarnason, Kenneth Grazier's on 11/06/2025.
|
||||
11/07 - By Kristinn Bjarnason (1003966) assigned the Dean of Faculty Academic Unit / Graduate and P000267 Graduate and Immigration Coordinator - B. Mutisya Nzyuko roles to B. Mutisya Nzyuko on 11/7/25 13:46.
|
||||
11/07 - By Kristinn Bjarnason (1003966) assigned the Student Finance Administrator - UBSG role to John Huang on 11/7/25.
|
||||
11/07 - By User 1003966 / Kristinn Bjarnason assigned the P000506 Associate Dean, Graduate Education and P007153 Assistant Dean - Mary Perrodin-Singh roles to Mary Perrodin on 11/7/25.
|
||||
11/07 - By User 1003966 / Kristinn Bjarnason assigned the P000267 Immigration Compliance Coordinator role to Mutisya Nzyuko on 11/07/2025.
|
||||
11/07 - By user 1003966 / Kristinn Bjarnason, the event applied to beth gibbons and assigned the P000506 Associate Dean, Graduate Education, Student Registration Assistant, and Dean of Faculty Academic Unit / Graduate roles on 11/7/25.
|
||||
11/11 - By user 1003966 / Kristinn Bjarnason, the change applied to Alexander Leffers assigned him the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) - Gallaudet University and P000621 Communication and Community Engagement Manager roles on 11/11/2025.
|
||||
11/11 - By user 1003966 / Kristinn Bjarnason, the role P000621 Communication and Community Engagement Manager was assigned to Alexander Leffers on 11/11/25.
|
||||
11/11 - By Kristinn Bjarnason assigned the Manager, Gallaudet Interpreting Services (Unfilled) and Accommodations Viewer (GIS) roles to Edwin Martinez on 11/11/2025.
|
||||
11/11 - By Kristinn Bjarnason (1003966) assigned the P000155 Manager, Gallaudet Interpreting Services (Unfilled); P000243 Accommodations Coordinator – Edwin Martinez (Private); P000601 Director, Student Success – Jerri Dorminy; P000621 Communication and Community Engagement Manager – Alexander Leffers; P000888 Academic and Career Success Advisor – Riley Schultz; P002098 Manager, Operations – Gino Gouby; P002479 Accommodations Coordinator (Unfilled); and P003678 Supervisor, Operations – Vanessa Saperstein roles to Edwin Martinez, Jerri Dorminy, Alexander Leffers, Riley Schultz, Gino Gouby and Vanessa Saperstein on 11/11/25 at 12:14.
|
||||
11/11 - By Kristinn Bjarnason, the change applied to Gino Gouby assigned the P002098 Manager, Operations and the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) roles on 11/11/25.
|
||||
11/11 - By Kristinn Bjarnason, the roles P000456 Accessibility Resources Manager – Jennifer Tuell (On Leave), P000621 Communication and Community Engagement Manager – Alexander Leffers, and P000652 Manager, Office of Students With Disabilities – Karen Terhune were assigned to Jennifer Tuell, Alexander Leffers and Karen Tuell on 11/11/25.
|
||||
11/11 - By User 1003966 / Kristinn Bjarnason assigned the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) - Gallaudet University role to Jerri Dorminy on 11/11/25.
|
||||
11/11 - By user 1003966 / Kristinn Bjarnason, the event assigned Karen Terhune the Manager, Office of Students With Disabilities and Accommodations Recorder roles on 11/11/25.
|
||||
11/11 - By Kristinn Bjarnason (1003966) assigned the P000652 Manager, Office of Students With Disabilities - Karen Terhune(+1) - Accommodations Recorder - Gallaudet University role to Karen Terhune on 11/11/25.
|
||||
11/11 - The audit records show that on 11/11/25, user Kristinn Bjarnason (ID 1003966) removed the Accommodations Viewer (GIS) role from Kristinn Bjarnason and Karen Terhune.
|
||||
11/11 - By User 1003966 / Kristinn Bjarnason applied changes to Kristinn Bjarnason, Karen Terhune on 11/11/25.
|
||||
11/11 - By user 1003966 / Kristinn Bjarnason, the system applied changes to Kristinn Bjarnason and Karen Terhune, assigning no roles on 11/11/25.
|
||||
11/11 - By User 1003966 / Kristinn Bjarnason assigned the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) - Gallaudet University role to Riley Schultz on 11/11/25.
|
||||
11/11 - By User 1003966 / Kristinn Bjarnason applied to Shane Dundas assigned the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) - Gallaudet University and P007096 Coordinator DSC roles on 11/11/25.
|
||||
11/11 - By user 1003966 / Kristinn Bjarnason, the system assigned the P000652 Manager, Office of Students With Disabilities - Karen Terhune(+1) - Accommodations Recorder - Gallaudet University and the P007096 Coordinator DSC - Shane Dundas roles to Shane Dundas on 11/11/25.
|
||||
11/11 - By Kristinn Bjarnason assigned the Accommodations Reviewer- UBSG-U role to Shane Dundas on 11/11/25.
|
||||
11/11 - By Kristinn Bjarnason (1003966) applied to Vanessa Saperstein assigned the P000155 Manager, Gallaudet Interpreting Services (Unfilled)(+6) - Accommodations Viewer (GIS) - Gallaudet University role on 11/11/25.
|
||||
11/18 - By user 1003966 / Kristinn Bjarnason assigned the Marketing Manager, Student Finance Student Worker, and SFS Student Assistant roles to Amanda Gerson on 11/18/25.
|
||||
11/18 - By User 1003966 / Kristinn Bjarnason applied to Bernadine Bertrand assigned the Director, Information Technology Operations and Student Finance Data Viewer roles on 11/18/25.
|
||||
11/18 - By Kristinn Bjarnason, assigned the Director, Information Technology Operations and Student Finance Data Viewer roles to Bernadine Bertrand, and the Marketing Manager and Student Finance Student Worker roles to Amanda Gerson on 11/18/25.
|
||||
11/18 - By user Kristinn Bjarnason (1003966) assigned the Accommodations Coordinator and Accommodations Recorder Only roles to Edwin Martinez on 11/18/25.
|
||||
11/18 - By Kristinn Bjarnason, assigned the Accommodations Coordinator - Edwin Martinez (Private), Accommodations Coordinator (Unfilled), and Coordinator DSC roles to Edwin Martinez, Shane Dundas on 11/18/25.
|
||||
11/18 - By User 1003966 / Kristinn Bjarnason assigned the P000228 Associate Registrar, P000231 Academic Classroom Coordinator, P000237 Transfer Articulation Coordinator, P000238 Curriculum Coordinator, P000254 Operations Coordinator, P000258 ASL Education Coordinator, P000267 Graduate and Immigration Coordinator, P000362 Manager, Operations, P000389 Director, Information Technology Operations, P000430 Executive Director, Product Management (Closed), P000506 Associate Dean, Graduate Education, P000563 Dean, Curriculum, Outreach, Resources, and Effectiveness, P000595 Registrar, P000601 Director, Student Success, P000625 Education Abroad Manager, P000669 Workday Manager, P000740 Residence Life Manager, P002052 Gallaudet Innovation and Entrepreneurship Institute Director, P002153 Workday Student Manager, P003626 Customer Experience Manager, P003894 Marketing Manager, P005663 Customer Representative Service, and P005664 Customer Service Representative to Laurie Miskovsky, Ericka Brown, Laura Willey, Kai Gagnon, Patrick Rolfe, Keith Grant, B. Mutisya Nzyuko, Rowena Winiarczyk, Bernadine Bertrand, Caroline Pezzarossi, Elice Patterson, Jerri Dorminy, Fiona Grugan, Joseph DeSiervi, Nikki Surber, Russell Stein, Kristinn Bjarnason, Corey Burton, Amanda Gerson, Anna Reyes, and Ama Penny on 11/18/25.
|
||||
11/18 - By 1003966 / Kristinn Bjarnason, assigned the P002479 Accommodations Coordinator (Unfilled)(+2) - Accommodations Recorder Only role to Shane Dundas on 11/18/25.
|
||||
|
||||
=== 1004645 / Julie Longson ===
|
||||
11/12 - By Julie Longson (1004645) assigned the Senior Financial Planning and Analysis Manager, International Billing Specialist, SF Developing Country, and Financial Services Specialist roles to Christopher Jappah on 11/12/25.
|
||||
11/12 - By User 1004645 / Julie Longson assigned the P000394 Manager, Student Financial Services, P000447 Financial Services Specialist, and P001408 Senior Financial Planning and Analysis Manager roles to John Huang, Mekayla Walker, and Kathy Chen on 11/12/25.
|
||||
11/12 - Julie Longson (ID 1004645) assigned Kathy Chen the Senior Financial Planning and Analysis Manager, International Billing Specialist – SF Developing Country, and Senior Financial Planning & Analysis (FP&A) Analyst-2 roles on 11/12/2025.
|
||||
11/12 - By User 1004645 / Julie Longson applied to Kathy Chen assigned the P001408 Senior Financial Planning and Analysis Manager - Kathy Chen-Petrus(+3) - International Billing Specialist - SF Developing Country role on 11/12/25.
|
||||
11/14 - By Julie Longson on 11/14/25, the system assigned the P000052 Director, Financial Aid; P000054 Financial Aid Counselor (Private); P000061 Technical Specialist; P000083 Academic and Career Success Advisor (Unfilled); P000148 Associate Accreditation Coordinator (Private); P000194 Dean, Academic and Career Success; P000254 Operations Coordinator (Private); P000258 ASL Education Coordinator; P000265 Graduate Admissions Coordinator; P000420 Lecturer II; P000563 Dean, Curriculum, Outreach, Resources, and Effectiveness; P000591 Director, Institutional Research; P006294 Assistant Director to Amanda Jackson, Dylan Westbury, Na Zhuo, Sydney Padgett, Robert Sanchez, Patrick Rolfe, Keith Grant, Zeshan Shafiq, Gemma Gabor, Caroline Pezzarossi, Lindsay Buchko, Caroline Finklea Vizzuto, Michael Tota, Khadijat Rashid, Emelia Beldon, Mercedes Olson, Corey Burton, Shelby Bean, Shanna Cooley, and Vicki Cheeseman.
|
||||
11/14 - Julie Longson (1004645) assigned the Director, Financial Aid and Assistant Director roles to Amanda Jackson and Vicki Cheeseman on 11/14/25.
|
||||
11/14 - By User Julie Longson (1004645) assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer - Gallaudet University role to Caroline Finklea Vizzuto on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer role to Caroline Pezzarossi on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer role to Corey Burton on 11/14/25.
|
||||
11/14 - By User Julie Longson (ID 1004645) assigned the P000083 Academic and Career Success Advisor role to Dylan Westbury on 11/14/25.
|
||||
11/14 - Julie Longson (ID 1004645) assigned the Financial Aid Counselor – Dylan Westbury (Private), Associate Financial Aid Counselor – Mercedes Olson, Financial Aid Counselor – Shelby Bean, Financial Aid Specialist – Shanna Cooley (Private), and Assistant Director – Vicki Cheeseman roles to Dylan Westbury, Mercedes Olson, Shelby Bean, Shanna Cooley and Vicki Cheeseman on 11/14/25.
|
||||
11/14 - By Julie Longson (ID 1004645) assigned the Academic and Career Success Advisor (Unfilled)(+20) – Unofficial Student Transcript Viewer – Gallaudet University role to Emelia Beldon on 11/14/25.
|
||||
11/14 - By User Julie Longson, applied to Gemma Gabor, assigned the P000083 Academic and Career Success Advisor (Unfilled) role on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the Academic and Career Success Advisor (Unfilled)(+20) role to Keith Grant on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Lindsay Buchko assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer role on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Mercedes Olson, assigned the P002048 Associate Financial Aid Counselor role and added the Academic and Career Success Advisor (Unfilled) and Unofficial Student Transcript Viewer roles on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer role to Michael Tota on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Na Zhuo assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer - Gallaudet University role on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the Academic and Career Success Advisor role to Patrick Rolfe on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the Academic and Career Success Advisor (Unfilled)(+20) role to Robert Sanchez on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Shanna Cooley assigned the Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer - Gallaudet University role on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer role to Shelby Bean on 11/14/25.
|
||||
11/14 - By user 1004645 / Julie Longson, the change applied to Shelby Bean, assigning the Financial Aid Counselor and Financial Aid Specialist roles on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Sydney Padgett assigned the P000083 Academic and Career Success Advisor (Unfilled)(+20) - Unofficial Student Transcript Viewer - Gallaudet University role on 11/14/25.
|
||||
11/14 - Julie Longson (ID 1004645) assigned the P006294 Assistant Director – Vicki Cheeseman(+1) – Financial Aid Management, P000083 Academic and Career Success Advisor (Unfilled)(+20) – Unofficial Student Transcript Viewer, and Financial Aid Administrator – UBSG – U roles to Vicki Cheeseman on 11/14/2025.
|
||||
11/14 - By user 1004645 / Julie Longson assigned the GU Testing Proxy Access1 role to Vicki Cheeseman on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Vicki Cheeseman assigned the P006294 Assistant Director, Financial Aid role on 11/14/25.
|
||||
11/14 - By User 1004645 / Julie Longson applied to Zeshan Shafiq assigned the P000083 Academic and Career Success Advisor (Unfilled) role on 11/14/25.
|
||||
|
||||
=== mrakowski-impl / Margaret Rakowski ===
|
||||
11/19 - By User Margaret Rakowski, applied to Domenique Meneses, assigned the Edit Workday Account, Edit Workday Account (Default Definition), and Edit Workday Account: dmeneses-impl / Domenique Meneses roles on 11/19/25.
|
||||
|
||||
161
workday_monthly_admin_activities_audit/event_log_bkup.txt
Normal file
161
workday_monthly_admin_activities_audit/event_log_bkup.txt
Normal file
@@ -0,0 +1,161 @@
|
||||
=== 1000054 / Dennis Cregan ===
|
||||
10/01 - By Dennis Cregan (ID 1000054) applied to no one, assigned the Edit Reference ID role on 10/1/25.
|
||||
10/02 - By Dennis Cregan, applied to Jacquelin Kutz, assigned no roles on 10/02.
|
||||
10/02 - Dennis Cregan applied the Activate Pending Security Policy Changes role to the system on 10/2/25.
|
||||
10/02 - By Dennis Cregan assigned the Edit Workday Account role on 10/02/2025.
|
||||
10/06 - By Dennis Cregan, applied to no one, assigned the ISSG SINT008 Student Term, Maintain Permissions for Security Group, Student Data: Personal Data, Student Data: Student Profile, and View Only roles on 10/06/2025.
|
||||
10/06 - Dennis Cregan applied the Activate Pending Security Policy Changes role to the system on 10/6/25.
|
||||
10/06 - By Dennis Cregan (ID 1000054) assigned the Edit Universal Id role, applied to no one, on 10/6/25.
|
||||
10/06 - By Dennis Cregan, no user was specified as the target and he assigned the Edit Workday Account role on 10/6/25.
|
||||
10/13 - By Dennis Cregan (ID 1000054) applied to no user, assigned the Edit Reference ID role on 10/13/25.
|
||||
10/20 - Dennis Cregan (ID 1000054) applied the roles Audit: Academics Data, ISSG SINT058 OKTA Student Sync Outbound, Maintain Permissions for Security Group, Student Data: Program of Study, Student Data: Student Transcript, View Only, and View Only for Audit: Academics Data, View Only for Student Data: Program of Study, and View Only for Student Data: Student Transcript to the target on 10/20/25.
|
||||
10/20 - By Dennis Cregan applied the Activate Pending Security Policy Changes role on 10/20/25.
|
||||
10/22 - By Dennis Cregan assigned the Edit Workday Account role to Adrain Cookneuchan on 10/22/25.
|
||||
10/22 - By Dennis Cregan (ID 1000054) applied to no specific user, assigned the Edit Reference ID role on 10/22/25.
|
||||
10/22 - By Dennis Cregan, the Edit Workday Account role was assigned on 10/22/25.
|
||||
10/23 - By Dennis Cregan, the Edit Workday Account was applied to Raven Taylor on 10/23/25.
|
||||
10/23 - By Dennis Cregan, assigned the Create Universal Id, Edit Reference ID, and Edit Workday Account roles on 10/23/25.
|
||||
10/24 - By Dennis Cregan applied to Michael Deninger on 10/24/25.
|
||||
10/24 - By Dennis Cregan, applied to no one, assigned the English (United States), HCM Security Administrator, Maintain Assignable Roles, and Supervisory roles on 10/24/25.
|
||||
10/24 - By Dennis Cregan, applied to Gallaudet Technology Services (HCM) – RBSG‑C, he assigned the Create Security Group, Edit Role‑Based Security Group (Constrained), Edit Segment‑Based Security Group, English (United States), Gallaudet Technology Services (HCM), Gallaudet Technology Services (HCM) – RBSG‑C, and Worker Home Email – SBSG roles on 10/24/25.
|
||||
10/24 - By Dennis Cregan applied to no one assigned the Maintain Permissions for Security Group, Person Data: Home Contact Information, Person Data: Home Email, View Only, View Only for Person Data: Home Contact Information, and Worker Home Email - SBSG roles on 10/24/25.
|
||||
10/24 - By Dennis Cregan (1000054) applied the Activate Pending Security Policy Changes role on 10/24/25.
|
||||
10/24 - By Dennis Cregan (ID 1000054), assigned the Create Universal Id, Edit Reference ID, and Edit Workday Account roles on 10/24/25.
|
||||
10/24 - By Dennis Cregan (ID 1000054) applied to no specific user, he assigned the Create Security Group, Edit Role-Based Security Group (Unconstrained), and Payroll Accountant – RBSG‑U roles on 10/24/25.
|
||||
10/24 - By user Dennis Cregan (ID 1000054) applied the Activate Pending Security Policy Changes role to no one on 10/24/25.
|
||||
10/27 - By Dennis Cregan, the Edit Workday Account was applied to Benjamin Jarashow on 10/27/25.
|
||||
10/27 - By Dennis Cregan, applied to the system, assigned the Enable Domain Security Policy and Security Administrator Hub roles on 10/27/25.
|
||||
10/27 - Dennis Cregan applied the Activate Pending Security Policy Changes role to no one on 10/27/25.
|
||||
10/27 - By Dennis Cregan, the Edit Workday Account role was assigned on 10/27/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to Cassidy Wainwright on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) assigned no roles to Elnicky on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to Gail Levine, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to Gwennie O'Connell on 10/28/25.
|
||||
10/28 - By user 1000054 / Dennis Cregan, the change applied to Hannah Kish on 10/28/25; no roles were assigned.
|
||||
10/28 - By Dennis Cregan, applied to JoAnna Marker assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to Kim White, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - Dennis Cregan assigned the Edit Workday Account role to Kristen Rusnak on 10/28/25.
|
||||
10/28 - Dennis Cregan (ID 1000054) assigned Lauren Wass the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to Oana Petrican on 10/28/25.
|
||||
10/28 - Dennis Cregan (1000054) assigned the Edit Workday Account role to Petra Herzog on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to Prasadapu Srinivasa Rao on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) applied changes to Ro Percy on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to Ron Salyers on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) applied changes to Anushka Gupta (agupta), assigning no roles on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) assigned the Edit Workday Account role to chuang, Carmen Huang on 10/28/25.
|
||||
10/28 - By Dennis Cregan applied the Edit Workday Account to Cassidy Wainwright on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to Gwennie O'Connell, edited the Workday account on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the action was applied to Jon Bannan, editing his Workday account on 10/28/25.
|
||||
10/28 - By user 1000054 / Dennis Cregan, the Edit Workday Account role was assigned to jcano, Josh Cano on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to jney, John Ney, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to Katie Lopez, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) assigned no roles to Lara Munoz on 10/28/25.
|
||||
10/28 - By Dennis Cregan, assigned the Edit Workday Account role to mfowler, Matthew Fowler on 10/28/25.
|
||||
10/28 - By Dennis Cregan (1000054) on 10/28/25, the Edit Workday Account role was assigned to Megan Romack (mromack).
|
||||
10/28 - By Dennis Cregan applied to Megan Seekford assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to nalbert, Nico Albert on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account was applied to nmshelia on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned no roles to Oliver Day on 10/28/25.
|
||||
10/28 - Dennis Cregan (1000054) assigned the Edit Workday Account role to Oona Julien on 10/28/25.
|
||||
10/28 - By Dennis Cregan assigned the Edit Workday Account role to viyer, Venu Iyer on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned to the account on 10/28/25.
|
||||
10/28 - By Dennis Cregan (ID 1000054) applied to no user, he assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned to no user on 10/28/25.
|
||||
10/28 - By Dennis Cregan, assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: gkim.ey / Grace Kim, and Successfully Completed roles to Grace Kim on 10/28/25.
|
||||
10/28 - By Dennis Cregan (ID 1000054) assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned to the account on 10/28/25.
|
||||
10/28 - By Dennis Cregan (ID 1000054) assigned the Edit Workday Account role to the account on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned to no one on 10/28/25.
|
||||
10/28 - By User Dennis Cregan (ID 1000054) applied to no one, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to the account (none), assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to none, assigned the Edit Workday Account, Edit Workday Account (Default Definition), Edit Workday Account: mricciardi-impl / Mike Ricciardi, and Successfully Completed roles on 10/28/25.
|
||||
10/28 - By Dennis Cregan (ID 1000054) assigned the Edit Workday Account role to no user on 10/28/25.
|
||||
10/28 - By user Dennis Cregan (ID 1000054) applied the Edit Workday Account role to no one on 10/28/25.
|
||||
10/28 - By Dennis Cregan, the Edit Workday Account role was assigned on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to Prasadapu Srinivasa Rao’s Workday account, assigned the Edit Workday Account, Edit Workday Account (Default Definition), and Edit Workday Account: prao‑impl / Prasadapu Srinivasa Rao roles on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to no user, assigned the Edit Workday Account role on 10/28/25.
|
||||
10/28 - By Dennis Cregan (ID 1000054) assigned the Edit Workday Account, Edit Workday Account (Default Definition), and Edit Workday Account: vmadhavan.ey / Vaishnavi Madhavan roles on 10/28/25.
|
||||
10/28 - By Dennis Cregan, applied to no one, assigned the Edit Reference ID, Edit Universal Id, and Edit Workday Account roles on 10/28/25.
|
||||
10/29 - By Dennis Cregan, applied to Grace Kim, assigned the Service Center Representative Status Change role on 10/29/25.
|
||||
10/29 - By Dennis Cregan assigned no roles to Jennifer Suta on 10/29/25.
|
||||
10/29 - By Dennis Cregan, applied to Jennifer Suta, inactivated the Service Center Representative role on 10/29/25.
|
||||
10/29 - By Dennis Cregan (1000054) edited Kyle Blanco’s Workday account on 10/29/25.
|
||||
10/29 - By Dennis Cregan applied to Kyle Blanco, inactivated the Service Center Representative role on 10/29/25.
|
||||
10/29 - By Dennis Cregan assigned the Edit Workday Account role to Petra Herzog on 10/29/25.
|
||||
10/29 - By Dennis Cregan assigned the Edit Workday Account role to Sanjana Bhaskar on 10/29/25.
|
||||
10/29 - By User 1000054 / Dennis Cregan applied to Sanjana Bhaskar assigned the Service Center Representative role on 10/29/25.
|
||||
10/29 - By Dennis Cregan, applied to Vaishnavi Madhavan, assigned the Service Center Representative role on 10/29/25.
|
||||
10/29 - By Dennis Cregan (ID 1000054) on 10/29/25, the Edit Workday Account role was assigned to no one.
|
||||
10/29 - By Dennis Cregan, a change was applied to no specific user, assigning the Edit Workday Account role on 10/29/25.
|
||||
10/29 - By Dennis Cregan (ID 1000054) assigned the Edit Workday Account role to no user on 10/29/25.
|
||||
10/29 - By Dennis Cregan, the roles Awaiting Action, Business Process Definition Error for Inactivate Service Center Representative, and Inactivate Service Center Representative were assigned on 10/29/25.
|
||||
10/29 - Dennis Cregan applied the Inactivate Service Center Representative role on 10/29/25.
|
||||
10/29 - By Dennis Cregan, he assigned the Inactivate HCM Validator and Inactivate Service Center Representative roles on 10/29/25.
|
||||
10/29 - By Dennis Cregan, the FIN Validator and HCM Validator roles were assigned to the Service Center Representative on 10/29/25.
|
||||
|
||||
=== 1000998 / Thad Ferguson ===
|
||||
10/01 - By Thad Ferguson (1000998) applied the Edit Reference ID role to the system on 10/1/25.
|
||||
|
||||
=== 1003153 / Joseph DeSiervi ===
|
||||
10/06 - By User Joseph DeSiervi (1003153) applied to no specific user assigned the Assign Users to User-Based Security Group, Security Configurator, User-Based Group Change, and User-Based Group Change - Event Lite Type on 10/06/2025, 9:03:06.767 AM roles on 10/6/25.
|
||||
10/06 - By Joseph DeSiervi, the system applied to no users and assigned the Assign Users to User-Based Security Group, Security Configurator, User-Based Group Change, and User-Based Group Change - Event Lite Type on 10/06/2025, 9:04:28.359 AM roles on 10/6/25.
|
||||
10/24 - By User Joseph DeSiervi (ID 1003153) applied to no target, assigned the Assign Users to User-Based Security Group, Report Writer, User-Based Group Change, and User-Based Group Change - Event Lite Type on 10/24/2025, 12:14:56.013 PM roles on 10/24/25.
|
||||
10/28 - By User 1003153 / Joseph DeSiervi assigned the Director, Strategic Sourcing; Expense Settlement Specialist - Gallaudet University; and Supervisor, Accounts Payable roles to Stephanie Johnson on 10/28/25.
|
||||
10/28 - By Joseph DeSiervi (1003153) assigned the Enable Domain Security Policy and Process: Receivable Repayment roles on 10/28/25.
|
||||
10/28 - By Joseph DeSiervi (1003153) applied the Enable Domain Security Policy and Process: Receivable Repayment - Cancel roles on 10/28/25.
|
||||
10/28 - By Joseph DeSiervi (1003153) applied to no one, assigned the Enable Domain Security Policy and Process: Receivable Repayment - Core roles on 10/28/25.
|
||||
10/28 - By User Joseph DeSiervi (ID 1003153) applied to no target, assigned the Enable Domain Security Policy and Process: Receivable Repayment - Reporting roles on 10/28/25.
|
||||
10/28 - By Joseph DeSiervi (1003153) assigned the Expense Settlement Specialist, GMT‑05:00 Eastern Time (New York), Role‑Based Group Change, and Role‑Based Group Change – Event Lite Type on 10/27/2025, 9:00:00.000 PM roles to the target on 10/28/25 at 9:25.
|
||||
10/28 - By Joseph DeSiervi (ID 1003153) applied the Activate Pending Security Policy Changes role on 10/28/25.
|
||||
10/31 - By Joseph DeSiervi (ID 1003153) on 10/31/25, the Activate Pending Security Policy Changes role was assigned.
|
||||
|
||||
=== 1003966 / Kristinn Bjarnason ===
|
||||
10/06 - By user 1003966 / Kristinn Bjarnason, the change applied to Dae and assigned the P000109 Assistant Professor - Dae-Kun Kim and P000109 Assistant Professor - Dae-Kun Kim - Program Coordinator (Undergraduate) - English Program roles on 10/6/25.
|
||||
10/06 - By User Kristinn Bjarnason (ID 1003966) assigned the GMT‑05:00 Eastern Time (New York), Program Coordinator – Undergraduate – RBSG‑C, Program of Study Reviewer (Undergraduate) – RBSG‑U, Role‑Based Group Change, and Role‑Based Group Change – Event Lite Type on 10/05/2025, 9:00:00.000 PM roles to the target on 10/6/25 at 14:28.
|
||||
10/16 - Kristinn Bjarnason (1003966) assigned the Faculty Advisor – AA ASL Connect Advisor, Academic Foundation Manager – ASL Connect Academic Unit / Continuing Education, Accommodations Viewer (CCE/ASLC) – ASL Connect Academic Unit, Admissions Administration – American Sign Language Department, Program Coordinator – ASL Connect Academic Unit, and Program of Study Manager – ASL Connect Academic Unit roles to Corey Burton on 10/16/25.
|
||||
10/16 - By Kristinn Bjarnason (1003966) assigned the P000254 Operations Coordinator and P000254 Operations Specialist roles to Patrick Rolfe on 10/16/25.
|
||||
10/16 - By Kristinn Bjarnason, applied to none, assigned the Academic Foundation Manager - RBSG-C, Accommodations Viewer (CCE/ASLC) - RBSG-C, Admissions Administration - RBSG - C, Faculty Advisor - RBSG-C and Faculty Advisor - RBSG-U, Program Coordinator (CE/ASL-C ) - RBSG-C, and Program of Study Manager - RBSG-C roles on 10/16/25.
|
||||
10/17 - By User 1003966 / Kristinn Bjarnason applied the change to Kristinn Bjarnason on 10/17/25.
|
||||
10/17 - By User 1003966 / Kristinn Bjarnason applied to Mary Perrodin, assigned the P007153 Assistant Dean role on 10/17/25.
|
||||
10/17 - By User 1003966 / Kristinn Bjarnason assigned the Advising Administrator, Graduate Dean, and Unofficial Student Transcript Viewer roles to beth gibbons on 10/17/25.
|
||||
10/17 - By Kristinn Bjarnason (1003966) assigned the Dean, Academics & Career Services; Dean, Academics & Career Success; Assistant Dean, Graduate Education; Advising Administrator – RBSG‑U; Graduate Dean – RBSG‑U; and Unofficial Student Transcript Viewer – RBSG‑C and U roles to the applicable users on 10/17/25.
|
||||
10/24 - By user 1003966 / Kristinn Bjarnason assigned the P000621 Communication and Community Engagement Manager role to Alexander Leffers on 10/24/25.
|
||||
10/24 - By user 1003966 / Kristinn Bjarnason, the change applied to Shane Dundas, assigning him the Accommodation Viewer (Housing) and Accommodations Recorder roles on 10/24/25.
|
||||
10/24 - By User Kristinn Bjarnason (1003966) applied to P000155 Manager, Gallaudet Interpreting Services (Unfilled), assigned the Accommodations Recorder - RBSG-C, Accommodations Viewer (GIS) - RBSG-C, and Accommodations Viewer (Housing) - RBSG-C roles on 10/24/25.
|
||||
10/24 - By Kristinn Bjarnason, the P000621 Communication and Community Engagement Manager was assigned the Accommodations Recorder - RBSG-C and Accommodations Viewer (GIS) - RBSG-C roles on 10/24/25 at 9:06.
|
||||
10/28 - By Kristinn Bjarnason (1003966) assigned the Customer Experience Manager and Student Records Associate - American Sign Language Department roles to Corey Burton on 10/28/25.
|
||||
10/28 - By User Kristinn Bjarnason (1003966) applied to the American Sign Language Department, assigned the Student Records Associate - RBSG-C role on 10/28/25.
|
||||
10/31 - By user 1003966 / Kristinn Bjarnason, the change applied to Jeffrey Levitt assigned him the Education Abroad Assistant, Study Abroad Student Worker - Dean of Faculty Academic Unit / Graduate, and Study Abroad Student Worker - Dean of Faculty Academic Unit / Undergraduate roles on 10/31/25.
|
||||
10/31 - By User Kristinn Bjarnason, the system assigned the Create Security Group, Edit Role-Based Security Group (Unconstrained), English (United States), General Education Director, and General Education Director – RBSG-U roles on 10/31/25 at 11:29.
|
||||
10/31 - By user Kristinn Bjarnason (ID 1003966) applied the Activate Pending Security Policy Changes role to no one on 10/31/25.
|
||||
10/31 - By User Kristinn Bjarnason (ID 1003966) assigned the Maintain Assignable Roles role to no user on 10/31/25.
|
||||
10/31 - By User Kristinn Bjarnason (ID 1003966) applied to the system, assigned the Create Security Group, Edit Role-Based Security Group (Constrained), English (United States), Study Abroad Student Worker, and Study Abroad Student Worker – RBSG‑C roles on 10/31/25.
|
||||
10/31 - By user Kristinn Bjarnason (ID 1003966) applied the Activate Pending Security Policy Changes role to no users on 10/31/25 at 14:15.
|
||||
10/31 - By User Kristinn Bjarnason (ID 1003966) applied to no one, assigned the Create Security Group, English (United States), and International Student Data (Medium View Only) roles on 10/31/25.
|
||||
10/31 - By User 1003966 / Kristinn Bjarnason assigned the Edit Segment-Based Security Group, International Student Data (Medium View Only), Student, Study Abroad Manager - RBSG-C, and Study Abroad Student Worker – RBSG-C roles on 10/31/25.
|
||||
10/31 - By User 1003966 / Kristinn Bjarnason assigned the Activate Pending Security Policy Changes role on 10/31/25.
|
||||
10/31 - By User Kristinn Bjarnason (ID 1003966) applied to no one, assigned the Create Security Group and Student Age Data (View only) roles on 10/31/25.
|
||||
10/31 - By Kristinn Bjarnason (ID 1003966) assigned the Edit Segment-Based Security Group and Student Age Data (View only) roles to Study Abroad Student Worker – RBSG‑C on 10/31/25.
|
||||
10/31 - By user Kristinn Bjarnason (ID 1003966) applied the Activate Pending Security Policy Changes role to the system on 10/31/25.
|
||||
10/31 - By User Kristinn Bjarnason activated the Pending Security Policy Changes on 10/31/25.
|
||||
10/31 - By Kristinn Bjarnason (1003966) assigned the GMT-05:00 Eastern Time (New York), Role-Based Group Change, Role-Based Group Change - Event Lite Type on 10/30/2025, 9:00:00.000 PM, and Study Abroad Student Worker – RBSG-C roles to the Dean of Faculty Academic Unit / Graduate and Dean of Faculty Academic Unit / Undergraduate on 10/31/25.
|
||||
|
||||
=== 1004645 / Julie Longson ===
|
||||
10/03 - By User Julie Longson (ID 1004645) applied to no one, assigned the Advising Notes and Edit Student Note Security Segment roles on 10/3/25.
|
||||
10/06 - Julie Longson applied the Activate Pending Security Policy Changes role to no users on 10/6/25.
|
||||
10/14 - By User Julie Longson (1004645) applied to Ann Marie Divina assigned the Student Finance Associate and Student Financials Campus Engagement Administrator roles on 10/14/25.
|
||||
10/14 - By Julie Longson (1004645) assigned the Student Financials Campus Engagement Administrator and Student Finance Associate roles to Fritz Ann Marie Divina on 10/14/25.
|
||||
10/14 - Julie Longson (1004645) assigned Kelly Webster the ALLSTAR Student Assistant role on 10/14/25.
|
||||
10/14 - By User 1004645 / Julie Longson assigned the Financial Aid Counselor, Financial Aid Student Assistant Level I, and Financial Aid Student Assistant Level II roles to Shelby Bean on 10/14/25.
|
||||
10/14 - By Julie Longson (1004645) assigned the Financial Aid Student Assistant Level I - RBSG-C, Financial Aid Student Assistant Level II - RBSG-C, Financial Aid Student Assistant Level II - RBSG-U - DNU (Inactive), GMT-05:00 Eastern Time (New York), Role-Based Group Change, and Role-Based Group Change - Event Lite Type on 10/13/2025, 9:00:00.000 PM roles on 10/14/25.
|
||||
10/14 - By Julie Longson (ID 1004645) applied to Fritz Ann Marie Divina, assigned the Student Finance Associate – RBSG‑U and Student Financials Campus Engagement Administrator – RBSG‑U roles on 10/14/25.
|
||||
10/14 - By User Julie Longson (ID 1004645) applied the Student Finance Administrator - UBSG role to no user on 10/14/25.
|
||||
10/16 - By user Julie Longson (ID 1004645) applied the Activate Pending Security Policy Changes role to no users on 10/16/2025.
|
||||
10/22 - By User 1004645 / Julie Longson applied the Activate Pending Security Policy Changes role to no one on 10/22/25.
|
||||
|
||||
=== jsharp-impl / Jodi Sharp ===
|
||||
10/29 - By Jodi Sharp, applied to Eric Vu assigned the Person Email Change - Event Lite Type for Eric Vu on 10/29/2025, 9:51:44.582 AM role on 10/29/25.
|
||||
10/29 - By Jodi Sharp (jsharp-impl) applied to no one, assigned the Create Implementer role on 10/29/25.
|
||||
10/29 - By Jodi Sharp (jsharp-impl) applied the Create Implementer, Person Email Change, Person Email Change - Event Lite Type for Alexa Grant on 10/29/2025, 9:56:49.540 AM, and agrant@hcg.com \n\nAlexa Grant roles to the system on 10/29/25.
|
||||
|
||||
121
workday_monthly_admin_activities_audit/event_log_long.txt
Normal file
121
workday_monthly_admin_activities_audit/event_log_long.txt
Normal file
File diff suppressed because one or more lines are too long
1356
workday_monthly_admin_activities_audit/final.csv
Normal file
1356
workday_monthly_admin_activities_audit/final.csv
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
359
workday_monthly_admin_activities_audit/process.py
Normal file
359
workday_monthly_admin_activities_audit/process.py
Normal file
@@ -0,0 +1,359 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import json
|
||||
import logging
|
||||
from collections import Counter, defaultdict
|
||||
from typing import List, Dict, Any, Optional
|
||||
|
||||
import requests
|
||||
import pandas as pd
|
||||
import spacy
|
||||
from openai import OpenAI
|
||||
from tqdm import tqdm
|
||||
|
||||
# =========================
|
||||
# Configuration
|
||||
# =========================
|
||||
# DEFAULT_LM_IP = "192.168.1.221" # default LM Studio host (without /v1)
|
||||
DEFAULT_LM_IP = "10.81.209.99" # default LM Studio host (without /v1)
|
||||
LLM_MODEL = "openai/gpt-oss-20b"
|
||||
LLM_API_KEY = "not-needed" # LM Studio typically doesn't require an API key
|
||||
|
||||
INPUT_CSV = "test.csv"
|
||||
OUTPUT_CSV = "test_with_names.csv"
|
||||
EVENT_LOG = "event_log.txt"
|
||||
FINAL_SNAPSHOT = "final.csv" # snapshot right before LM Studio summarization
|
||||
|
||||
# Columns to process
|
||||
SOURCE_COL_1 = "Instance that Changed"
|
||||
TARGET_COL_1 = "Applied to"
|
||||
|
||||
SOURCE_COL_2 = "Added"
|
||||
TARGET_COL_2 = "Added Applied to"
|
||||
|
||||
ENTERED_COL = "Entered On"
|
||||
ENTERED_MMDD_COL = "Entered On (MM/DD)"
|
||||
|
||||
# Values to ignore entirely (case-insensitive)
|
||||
AUTO_STRINGS = {"automatic complete"}
|
||||
|
||||
def is_auto(val) -> bool:
|
||||
return isinstance(val, str) and val.strip().lower() in AUTO_STRINGS
|
||||
|
||||
# Regex helpers
|
||||
DELIM_SPLIT = re.compile(r"\s*[\/|\-–—]\s*")
|
||||
KEEP_CHARS = re.compile(r"[^A-Za-zÀ-ÖØ-öø-ÿ' .\-]")
|
||||
|
||||
def clean_person(text: str) -> str:
|
||||
"""Clean extracted name by removing job codes/fragments after dashes/slashes; keep name-ish chars."""
|
||||
if not text:
|
||||
return ""
|
||||
first = DELIM_SPLIT.split(text, maxsplit=1)[0]
|
||||
first = KEEP_CHARS.sub("", first).strip()
|
||||
return re.sub(r"\s{2,}", " ", first)
|
||||
|
||||
# =========================
|
||||
# LM Studio reachability
|
||||
# =========================
|
||||
def check_lmstudio(ip: str) -> str:
|
||||
"""
|
||||
Ensure LM Studio endpoint is reachable; if not, prompt for IP until it is.
|
||||
Returns the validated base URL like "http://<ip>:1234/v1".
|
||||
"""
|
||||
def _ok(url: str) -> bool:
|
||||
try:
|
||||
r = requests.get(url.rstrip("/") + "/models", timeout=5)
|
||||
return r.status_code == 200
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
base_url = f"http://{ip}:1234/v1"
|
||||
if _ok(base_url):
|
||||
print(f"✅ LM Studio reachable at {base_url}")
|
||||
return base_url
|
||||
|
||||
print(f"❌ Could not reach LM Studio at {base_url}")
|
||||
while True:
|
||||
new_ip = input("Enter LM Studio IP address (e.g. 192.168.1.221): ").strip()
|
||||
if not new_ip:
|
||||
print("Aborted: No IP provided.")
|
||||
sys.exit(1)
|
||||
base_url = f"http://{new_ip}:1234/v1"
|
||||
print(f"🔍 Retesting {base_url}...")
|
||||
if _ok(base_url):
|
||||
print(f"✅ LM Studio reachable at {base_url}")
|
||||
return base_url
|
||||
else:
|
||||
print("❌ Still unreachable. Try again or Ctrl+C to exit.")
|
||||
|
||||
# Perform reachability check BEFORE any processing
|
||||
LLM_BASE_URL = check_lmstudio(DEFAULT_LM_IP)
|
||||
client = OpenAI(base_url=LLM_BASE_URL, api_key=LLM_API_KEY)
|
||||
|
||||
# =========================
|
||||
# spaCy model (Transformer)
|
||||
# =========================
|
||||
print("🔍 Loading spaCy transformer model: en_core_web_trf")
|
||||
nlp = spacy.load(
|
||||
"en_core_web_trf",
|
||||
exclude=["parser", "tagger", "attribute_ruler", "lemmatizer", "morphologizer"],
|
||||
)
|
||||
print("✅ spaCy model loaded successfully.")
|
||||
|
||||
def extract_names(text: str) -> str:
|
||||
"""Extract distinct PERSON names using spaCy Transformer model."""
|
||||
if not isinstance(text, str) or not text.strip():
|
||||
return ""
|
||||
doc = nlp(text)
|
||||
names, seen = [], set()
|
||||
for ent in doc.ents:
|
||||
if ent.label_ == "PERSON":
|
||||
cleaned = clean_person(ent.text)
|
||||
key = cleaned.lower()
|
||||
if cleaned and key not in seen:
|
||||
seen.add(key)
|
||||
names.append(cleaned)
|
||||
return ", ".join(names)
|
||||
|
||||
def insert_after(df: pd.DataFrame, after_col: str, new_col: str, values: pd.Series) -> None:
|
||||
"""Insert new_col immediately after after_col (drop existing if present)."""
|
||||
if new_col in df.columns:
|
||||
df.drop(columns=[new_col], inplace=True)
|
||||
idx = df.columns.get_loc(after_col) + 1
|
||||
df.insert(idx, new_col, values)
|
||||
|
||||
def dataframe_to_compact_event(df: pd.DataFrame) -> str:
|
||||
"""Compact JSON payload for a grouped event (keeps unique values per column)."""
|
||||
def uniq(col):
|
||||
return sorted([v for v in df[col].dropna().unique().tolist()]) if col in df else []
|
||||
payload = {
|
||||
"applied_to": uniq(TARGET_COL_1),
|
||||
"by_user": uniq("By User"),
|
||||
"in_transaction": uniq("In Transaction"),
|
||||
"entered_on": uniq(ENTERED_COL),
|
||||
"dates_mmdd": uniq(ENTERED_MMDD_COL),
|
||||
"instances": uniq(SOURCE_COL_1),
|
||||
"added": uniq(SOURCE_COL_2),
|
||||
"row_count": int(len(df)),
|
||||
}
|
||||
return json.dumps(payload, ensure_ascii=False, indent=2)
|
||||
|
||||
# =========================
|
||||
# Main flow
|
||||
# =========================
|
||||
|
||||
# If processed CSV already exists, skip straight to summarization
|
||||
if os.path.exists(OUTPUT_CSV):
|
||||
print(f"⚡ Skipping CSV processing — {OUTPUT_CSV} already exists.")
|
||||
df = pd.read_csv(OUTPUT_CSV)
|
||||
# Ensure MM/DD exists (for old CSVs)
|
||||
if ENTERED_MMDD_COL not in df.columns and ENTERED_COL in df.columns:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], errors="coerce")
|
||||
df[ENTERED_MMDD_COL] = ts.dt.strftime("%m/%d").fillna("")
|
||||
else:
|
||||
print("⚙️ Processing CSV to extract names and generate output...")
|
||||
|
||||
# Load CSV
|
||||
df = pd.read_csv(INPUT_CSV)
|
||||
|
||||
# Derive Entered On (MM/DD)
|
||||
if ENTERED_COL in df.columns:
|
||||
try:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], format="mixed", errors="coerce")
|
||||
except TypeError:
|
||||
ts = pd.to_datetime(df[ENTERED_COL], errors="coerce")
|
||||
df[ENTERED_MMDD_COL] = ts.dt.strftime("%m/%d").fillna("")
|
||||
else:
|
||||
df[ENTERED_MMDD_COL] = ""
|
||||
|
||||
# Live progress counters for names across both columns
|
||||
name_counter = Counter()
|
||||
|
||||
def _process_series_with_progress(series: pd.Series, desc: str) -> pd.Series:
|
||||
"""Iterate with progress, update name_counter, and return extracted names Series."""
|
||||
values = series.fillna("").astype(str).tolist()
|
||||
out = []
|
||||
total = len(values)
|
||||
if total == 0:
|
||||
return pd.Series([], dtype=object)
|
||||
step = max(10, total // 20) # update ~every 5% (at least every 10 rows)
|
||||
pbar = tqdm(values, desc=f"NER: {desc}", leave=True)
|
||||
for i, text in enumerate(pbar, start=1):
|
||||
names = extract_names(text)
|
||||
# Update running totals (ignore "Automatic Complete")
|
||||
for n in [x.strip() for x in names.split(",") if x.strip()]:
|
||||
if n.lower() not in AUTO_STRINGS:
|
||||
name_counter[n] += 1
|
||||
out.append(names)
|
||||
# Periodic status refresh
|
||||
if i % step == 0 or i == total:
|
||||
top = ", ".join(f"{n}:{c}" for n, c in name_counter.most_common(3))
|
||||
pbar.set_postfix_str(f"unique={len(name_counter)} top=[{top}]")
|
||||
return pd.Series(out, index=series.index, dtype=object)
|
||||
|
||||
# =========================
|
||||
# Requested processing order
|
||||
# 1) Process "Added" FIRST -> fill BOTH "Added Applied to" and "Applied to"
|
||||
# 2) Then process "Instance that Changed" ONLY where "Applied to" is still empty -> fill BOTH
|
||||
# =========================
|
||||
|
||||
# Ensure target columns exist and are positioned
|
||||
if SOURCE_COL_1 in df.columns:
|
||||
if TARGET_COL_1 not in df.columns:
|
||||
insert_after(df, SOURCE_COL_1, TARGET_COL_1, pd.Series([""] * len(df), index=df.index))
|
||||
else:
|
||||
if TARGET_COL_1 not in df.columns:
|
||||
df[TARGET_COL_1] = ""
|
||||
|
||||
if SOURCE_COL_2 in df.columns:
|
||||
if TARGET_COL_2 not in df.columns:
|
||||
insert_after(df, SOURCE_COL_2, TARGET_COL_2, pd.Series([""] * len(df), index=df.index))
|
||||
else:
|
||||
if TARGET_COL_2 not in df.columns:
|
||||
df[TARGET_COL_2] = ""
|
||||
|
||||
# ---- 1) Added -> fill BOTH "Added Applied to" and "Applied to"
|
||||
if SOURCE_COL_2 in df.columns:
|
||||
added_names = _process_series_with_progress(df[SOURCE_COL_2], f"{SOURCE_COL_2} (ALL)")
|
||||
df[TARGET_COL_2] = added_names
|
||||
df[TARGET_COL_1] = added_names
|
||||
else:
|
||||
df[TARGET_COL_2] = df.get(TARGET_COL_2, "")
|
||||
df[TARGET_COL_1] = df.get(TARGET_COL_1, "")
|
||||
|
||||
# ---- 2) Instance that Changed -> only where "Applied to" still empty; fill BOTH
|
||||
if SOURCE_COL_1 in df.columns:
|
||||
mask_empty_applied = df[TARGET_COL_1].fillna("").str.strip() == ""
|
||||
if mask_empty_applied.any():
|
||||
inst_subset = df.loc[mask_empty_applied, SOURCE_COL_1]
|
||||
inst_names = _process_series_with_progress(inst_subset, f"{SOURCE_COL_1} (only where Applied to empty)")
|
||||
df.loc[mask_empty_applied, TARGET_COL_1] = inst_names
|
||||
df.loc[mask_empty_applied, TARGET_COL_2] = inst_names
|
||||
|
||||
# --- Remove any rows that are purely "Automatic Complete" in key fields ---
|
||||
for col in [SOURCE_COL_1, SOURCE_COL_2, "In Transaction"]:
|
||||
if col in df.columns:
|
||||
df = df[~df[col].apply(is_auto)]
|
||||
|
||||
# --- Keep only selected columns (incl. MM/DD) ---
|
||||
keep_cols = [
|
||||
SOURCE_COL_1,
|
||||
TARGET_COL_1,
|
||||
"In Transaction",
|
||||
SOURCE_COL_2,
|
||||
TARGET_COL_2,
|
||||
"By User",
|
||||
ENTERED_COL,
|
||||
ENTERED_MMDD_COL,
|
||||
]
|
||||
df = df[[c for c in keep_cols if c in df.columns]]
|
||||
|
||||
# --- Filter rows: keep where Applied to == Added Applied to (case-insensitive) ---
|
||||
if TARGET_COL_1 in df.columns and TARGET_COL_2 in df.columns:
|
||||
df = df[
|
||||
df[TARGET_COL_1].fillna("").str.strip().str.lower()
|
||||
== df[TARGET_COL_2].fillna("").str.strip().str.lower()
|
||||
]
|
||||
|
||||
# --- Drop duplicates & save overall result ---
|
||||
df = df.drop_duplicates().reset_index(drop=True)
|
||||
df.to_csv(OUTPUT_CSV, index=False)
|
||||
print(f"✅ Saved {len(df)} unique matching rows to {OUTPUT_CSV}")
|
||||
|
||||
# =========================
|
||||
# NEW RULE APPLIED BEFORE SUMMARIZATION (covers both branches):
|
||||
# Ignore rows where 'Added Applied to' value appears inside 'By User'
|
||||
# Vectorized + strictly-boolean mask to avoid TypeError on "~"
|
||||
# =========================
|
||||
if TARGET_COL_2 in df.columns and "By User" in df.columns and not df.empty:
|
||||
# Normalize to lowercase strings
|
||||
names = df[TARGET_COL_2].fillna("").astype(str).str.strip().str.lower()
|
||||
byuser = df["By User"].fillna("").astype(str).str.strip().str.lower()
|
||||
|
||||
# Build a boolean mask: both non-empty AND 'names' is a substring of 'byuser'
|
||||
contains_flags = pd.Series(
|
||||
[(n != "") and (u != "") and (n in u) for n, u in zip(names, byuser)],
|
||||
index=df.index,
|
||||
dtype="bool",
|
||||
)
|
||||
|
||||
before = len(df)
|
||||
df = df[~contains_flags].reset_index(drop=True)
|
||||
removed = before - len(df)
|
||||
if removed:
|
||||
print(f"🚫 Ignored {removed} rows where 'Added Applied to' matched text in 'By User'.")
|
||||
|
||||
# --- Snapshot rows right before LM Studio processing ---
|
||||
df.to_csv(FINAL_SNAPSHOT, index=False)
|
||||
print(f"📦 Wrote {len(df)} rows to {FINAL_SNAPSHOT} (pre-LM Studio snapshot)")
|
||||
|
||||
# =========================
|
||||
# LM Studio event summary generation (group by By User, then date asc)
|
||||
# =========================
|
||||
if not df.empty:
|
||||
grouped = df.groupby([TARGET_COL_1, "By User", ENTERED_COL], dropna=False)
|
||||
summaries = [] # list of tuples (by_user, mmdd, sentence)
|
||||
|
||||
for keys, gdf in grouped:
|
||||
applied_to, by_user, entered_on = keys
|
||||
if not applied_to or str(applied_to).strip() == "":
|
||||
continue
|
||||
|
||||
mmdd_vals = gdf[ENTERED_MMDD_COL].dropna().astype(str)
|
||||
mmdd = next((v for v in mmdd_vals if v.strip()), "")
|
||||
|
||||
payload = dataframe_to_compact_event(gdf)
|
||||
|
||||
prompt = (
|
||||
"You are a compliance and information security analyst. "
|
||||
"Given the following grouped audit data, produce ONE clear and concise sentence summarizing the event. "
|
||||
"Include: (1) who performed the action (By User, include name and ID if available), "
|
||||
"(2) who the change applied to (Applied to), "
|
||||
"(3) the full list of role names that were assigned or added (from 'Instance that Changed' and 'Added'), "
|
||||
"and (4) the date of the event. "
|
||||
"Always mention the specific role titles exactly as shown in the data. "
|
||||
"If multiple roles were assigned, list them all in a natural phrase like "
|
||||
"'assigned the A, B, and C roles'. "
|
||||
"Do not include raw JSON, extra commentary, or line breaks. Return only one sentence.\n\n"
|
||||
f"Audit Data (JSON):\n{payload}"
|
||||
)
|
||||
|
||||
try:
|
||||
resp = client.chat.completions.create(
|
||||
model=LLM_MODEL,
|
||||
messages=[
|
||||
{"role": "system", "content": "You write terse, clear compliance summaries."},
|
||||
{"role": "user", "content": prompt},
|
||||
],
|
||||
temperature=0.2,
|
||||
)
|
||||
one_liner = (resp.choices[0].message.content or "").strip()
|
||||
except Exception as e:
|
||||
one_liner = f"[LLM ERROR] {e}"
|
||||
|
||||
summaries.append((by_user or "Unknown User", mmdd, one_liner))
|
||||
|
||||
# Group by By User, sort each user's entries by mm/dd asc, write file (OVERWRITE)
|
||||
grouped_summaries: Dict[str, list[tuple[str, str]]] = defaultdict(list)
|
||||
for by_user, mmdd, line in summaries:
|
||||
grouped_summaries[by_user].append((mmdd, line))
|
||||
|
||||
for user in grouped_summaries:
|
||||
grouped_summaries[user].sort(key=lambda x: x[0] or "")
|
||||
|
||||
with open(EVENT_LOG, "w", encoding="utf-8") as f:
|
||||
for user in sorted(grouped_summaries.keys()):
|
||||
f.write(f"=== {user} ===\n")
|
||||
for mmdd, line in grouped_summaries[user]:
|
||||
prefix = f"{mmdd} - " if mmdd else ""
|
||||
f.write(f"{prefix}{line}\n")
|
||||
f.write("\n")
|
||||
|
||||
total_events = sum(len(v) for v in grouped_summaries.values())
|
||||
print(f"📝 Overwrote {EVENT_LOG} with {total_events} grouped event summaries")
|
||||
else:
|
||||
print("ℹ️ No matching rows found; nothing to summarize.")
|
||||
1
workday_monthly_admin_activities_audit/run_me.sh
Executable file
1
workday_monthly_admin_activities_audit/run_me.sh
Executable file
@@ -0,0 +1 @@
|
||||
rm final.csv test_with_names.csv; python process.py
|
||||
File diff suppressed because it is too large
Load Diff
2031
workday_monthly_admin_activities_audit/test.csv
Normal file
2031
workday_monthly_admin_activities_audit/test.csv
Normal file
File diff suppressed because it is too large
Load Diff
58057
workday_monthly_admin_activities_audit/test_long.csv
Normal file
58057
workday_monthly_admin_activities_audit/test_long.csv
Normal file
File diff suppressed because it is too large
Load Diff
2289
workday_monthly_admin_activities_audit/test_not_dennis.csv
Normal file
2289
workday_monthly_admin_activities_audit/test_not_dennis.csv
Normal file
File diff suppressed because it is too large
Load Diff
4412
workday_monthly_admin_activities_audit/test_short.csv
Normal file
4412
workday_monthly_admin_activities_audit/test_short.csv
Normal file
File diff suppressed because it is too large
Load Diff
1528
workday_monthly_admin_activities_audit/test_with_names.csv
Normal file
1528
workday_monthly_admin_activities_audit/test_with_names.csv
Normal file
File diff suppressed because it is too large
Load Diff
1057
workday_monthly_admin_activities_audit/test_with_names_bkup.csv
Normal file
1057
workday_monthly_admin_activities_audit/test_with_names_bkup.csv
Normal file
File diff suppressed because it is too large
Load Diff
15053
workday_monthly_admin_activities_audit/test_with_names_long.csv
Normal file
15053
workday_monthly_admin_activities_audit/test_with_names_long.csv
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user