-
-
Notifications
You must be signed in to change notification settings - Fork 184
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat: Add scripts for CloudWatch and IAM actions
- Loading branch information
1 parent
807aeb0
commit c654937
Showing
11 changed files
with
639 additions
and
244 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
""" | ||
Description: This script counts the total number of CloudWatch log groups in an AWS account. | ||
It uses the AWS SDK for Python (Boto3) to interact with CloudWatch Logs and | ||
implements pagination to handle potentially large numbers of log groups. | ||
Author: Danny Steenman | ||
License: MIT | ||
""" | ||
|
||
import boto3 | ||
|
||
|
||
def count_log_groups(): | ||
""" | ||
Counts the total number of CloudWatch log groups in the AWS account. | ||
Returns: | ||
int: The total number of CloudWatch log groups. | ||
""" | ||
# Create a CloudWatch Logs client | ||
client = boto3.client("logs") | ||
|
||
# Initialize log group count | ||
log_group_count = 0 | ||
|
||
# Use paginator to handle potential large number of log groups | ||
paginator = client.get_paginator("describe_log_groups") | ||
for page in paginator.paginate(): | ||
log_group_count += len(page["logGroups"]) | ||
|
||
return log_group_count | ||
|
||
|
||
def main(): | ||
""" | ||
Main function to execute the log group counting process. | ||
""" | ||
log_group_count = count_log_groups() | ||
print(f"Total number of CloudWatch log groups: {log_group_count}") | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,69 @@ | ||
""" | ||
Description: This script fetches all CloudWatch log groups in an AWS account, | ||
calculates their age, and displays the information in a tabulated format. | ||
It uses the AWS SDK for Python (Boto3) to interact with CloudWatch Logs | ||
and implements pagination to handle potentially large numbers of log groups. | ||
Author: Danny Steenman | ||
License: MIT | ||
""" | ||
|
||
from datetime import datetime | ||
|
||
import boto3 | ||
from tabulate import tabulate | ||
|
||
|
||
def fetch_log_groups_with_creation_dates(): | ||
""" | ||
Fetches all CloudWatch log groups and their creation dates. | ||
Returns: | ||
list: A list of tuples containing log group name, creation date, and age in days. | ||
""" | ||
# Create a CloudWatch Logs client | ||
client = boto3.client("logs") | ||
|
||
# List to store log group names, creation dates, and age | ||
log_groups_info = [] | ||
|
||
# Use paginator to handle potential large number of log groups | ||
paginator = client.get_paginator("describe_log_groups") | ||
for page in paginator.paginate(): | ||
for log_group in page["logGroups"]: | ||
# Extract log group name and creation time | ||
log_group_name = log_group["logGroupName"] | ||
creation_time_millis = log_group.get("creationTime", 0) | ||
creation_date = datetime.fromtimestamp(creation_time_millis / 1000) | ||
|
||
# Calculate the age of the log group | ||
age_delta = datetime.now() - creation_date | ||
age_human_readable = f"{age_delta.days} days" if age_delta.days > 0 else "less than a day" | ||
|
||
# Append the extracted information to the list | ||
log_groups_info.append((log_group_name, creation_date, age_delta.days)) | ||
|
||
# Sort by age in descending order (most days to least days) | ||
log_groups_info.sort(key=lambda x: x[2], reverse=True) | ||
|
||
return log_groups_info | ||
|
||
|
||
def main(): | ||
""" | ||
Main function to execute the log group fetching process and display results. | ||
""" | ||
log_groups_info = fetch_log_groups_with_creation_dates() | ||
|
||
# Prepare data for tabulate | ||
table_data = [ | ||
(log_group_name, creation_date, f"{age_days} days" if age_days > 0 else "less than a day") | ||
for log_group_name, creation_date, age_days in log_groups_info | ||
] | ||
|
||
# Print table | ||
headers = ["Log Group", "Created On", "Age"] | ||
print(tabulate(table_data, headers=headers, tablefmt="pretty")) | ||
|
||
|
||
if __name__ == "__main__": | ||
main() |
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,20 +1,104 @@ | ||
# https://github.com/dannysteenman/aws-toolbox | ||
# | ||
# License: MIT | ||
# | ||
# This script finds and deletes all unused EC2 keypairs in a single AWS Region | ||
""" | ||
Description: This script identifies and optionally deletes unused EC2 key pairs in an AWS account. | ||
It fetches all key pairs in the specified region, determines which ones are currently associated | ||
with running EC2 instances, and identifies the unused key pairs. The script can perform a dry run | ||
to show which key pairs would be deleted without actually deleting them. | ||
Key features: | ||
- Automatically uses the region specified in the AWS CLI profile | ||
- Supports dry run mode for safe execution | ||
- Provides detailed logging of all operations | ||
- Uses boto3 to interact with AWS EC2 service | ||
- Implements error handling for robustness | ||
Usage: | ||
python ec2_delete_unused_keypairs.py [--dry-run] [--profile PROFILE_NAME] | ||
Author: Danny Steenman | ||
License: MIT | ||
""" | ||
|
||
import argparse | ||
import logging | ||
|
||
import boto3 | ||
from botocore.exceptions import ClientError | ||
|
||
|
||
def setup_logging(): | ||
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") | ||
return logging.getLogger(__name__) | ||
|
||
|
||
def get_ec2_client_and_resource(): | ||
try: | ||
ec2_client = boto3.client("ec2") | ||
ec2_resource = boto3.resource("ec2") | ||
return ec2_client, ec2_resource | ||
except ClientError as e: | ||
logger.error(f"Failed to create EC2 client or resource: {e}") | ||
raise | ||
|
||
|
||
def get_all_key_pairs(ec2_resource): | ||
try: | ||
key_pairs = list(ec2_resource.key_pairs.all()) | ||
logger.info(f"All Keys: {len(key_pairs)} : {[kp.name for kp in key_pairs]}") | ||
return key_pairs | ||
except ClientError as e: | ||
logger.error(f"Failed to retrieve key pairs: {e}") | ||
return [] | ||
|
||
|
||
def get_used_key_pairs(ec2_resource): | ||
try: | ||
used_keys = set(instance.key_name for instance in ec2_resource.instances.all() if instance.key_name) | ||
logger.info(f"Used Keys: {len(used_keys)} : {used_keys}") | ||
return used_keys | ||
except ClientError as e: | ||
logger.error(f"Failed to retrieve used key pairs: {e}") | ||
return set() | ||
|
||
|
||
def delete_unused_key_pairs(ec2_resource, unused_keys, dry_run=False): | ||
deleted_count = 0 | ||
for key_name in unused_keys: | ||
try: | ||
if not dry_run: | ||
ec2_resource.KeyPair(key_name).delete() | ||
logger.info(f"Deleted unused key pair: {key_name}") | ||
else: | ||
logger.info(f"Would delete unused key pair: {key_name}") | ||
deleted_count += 1 | ||
except ClientError as e: | ||
logger.error(f"Failed to delete key pair {key_name}: {e}") | ||
return deleted_count | ||
|
||
|
||
def main(dry_run=False): | ||
ec2_client, ec2_resource = get_ec2_client_and_resource() | ||
|
||
all_key_pairs = get_all_key_pairs(ec2_resource) | ||
used_keys = get_used_key_pairs(ec2_resource) | ||
|
||
unused_keys = [key_pair.name for key_pair in all_key_pairs if key_pair.name not in used_keys] | ||
logger.info(f"Unused Keys: {len(unused_keys)} : {unused_keys}") | ||
|
||
if not unused_keys: | ||
logger.info("No unused key pairs found.") | ||
return | ||
|
||
deleted_count = delete_unused_key_pairs(ec2_resource, unused_keys, dry_run) | ||
|
||
action = "Would delete" if dry_run else "Deleted" | ||
logger.info(f"{action} {deleted_count} unused key pair(s).") | ||
|
||
ec2 = boto3.resource("ec2") | ||
key_pairs = ec2.key_pairs.all() | ||
|
||
used_keys = set([instance.key_name for instance in ec2.instances.all()]) | ||
unused_keys = [ | ||
key_pair.name for key_pair in key_pairs if key_pair.name not in used_keys | ||
] | ||
if __name__ == "__main__": | ||
logger = setup_logging() | ||
|
||
for key_name in unused_keys: | ||
ec2.KeyPair(key_name).delete() | ||
parser = argparse.ArgumentParser(description="Delete unused EC2 key pairs") | ||
parser.add_argument("--dry-run", action="store_true", help="Perform a dry run without actually deleting key pairs") | ||
args = parser.parse_args() | ||
|
||
print(f"Deleted {len(unused_keys)} unused key pairs.") | ||
main(dry_run=args.dry_run) |
This file was deleted.
Oops, something went wrong.
This file was deleted.
Oops, something went wrong.
Oops, something went wrong.