From 3a64110e497824a46edb8093a83558f5c6c5381d Mon Sep 17 00:00:00 2001 From: Nathan Falvey Date: Fri, 27 Feb 2026 22:55:15 +0000 Subject: [PATCH] Refactor update_profile.py to enhance Gitea data fetching and README generation --- update_profile.py | 149 +++++++++++++++++++++++++++++++++++++--------- 1 file changed, 122 insertions(+), 27 deletions(-) diff --git a/update_profile.py b/update_profile.py index a883846..e87e35e 100644 --- a/update_profile.py +++ b/update_profile.py @@ -1,34 +1,129 @@ -import datetime import os -import re +import requests +import datetime -def generate_stats(): - now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S") - return f"āœ… My Gitea instance is alive and well!\nšŸ•’ Last update: {now}." +# --- Configuration --- +GITEA_URL = "https://gitea.nathan-falvey.synology.me" +USERNAME = "nathan" +BOT_NAME = "Gitea Action" # The name used in your git config in the .yaml +GITEA_TOKEN = os.getenv("GITEA_TOKEN") + +def format_bytes(size_bytes): + """Converts bytes to a human-readable string.""" + if size_bytes == 0: return "0 B" + for unit in ['B', 'KB', 'MB', 'GB', 'TB']: + if size_bytes < 1024.0: + return f"{size_bytes:.2f} {unit}" + size_bytes /= 1024.0 + return f"{size_bytes:.2f} PB" + +def get_uptime(): + """Reads Linux system uptime.""" + try: + with open("/proc/uptime", "r") as f: + seconds = float(f.readline().split()[0]) + return str(datetime.timedelta(seconds=int(seconds))) + except: return "Running in Container" + +def fetch_gitea_data(): + headers = {"Authorization": f"token {GITEA_TOKEN}"} + + # 1. System & User Info + ver = requests.get(f"{GITEA_URL}/api/v1/version", headers=headers).json().get("version", "N/A") + repos = requests.get(f"{GITEA_URL}/api/v1/user/repos?type=owner", headers=headers).json() + + # 2. Activity Feed (Filtered) + all_feeds = requests.get(f"{GITEA_URL}/api/v1/users/{USERNAME}/feeds", headers=headers).json() + # Filter out pushes from the bot + human_feeds = [f for f in all_feeds if f['act_user']['login'] != BOT_NAME][:10] + # Specific list for "Recent Pushes" + recent_pushes = [f for f in all_feeds if f['op_type'] == 'push_repo' and f['act_user']['login'] != BOT_NAME][:5] + + repo_stats = [] + lang_totals = {} + total_instance_size = 0 + + for r in repos: + name, owner = r['name'], r['owner']['login'] + + # Languages & Size + langs = requests.get(f"{GITEA_URL}/api/v1/repos/{owner}/{name}/languages", headers=headers).json() + for l, b in langs.items(): + lang_totals[l] = lang_totals.get(l, 0) + b + + # Commits (Gitea returns total count in the header of the commits endpoint) + c_req = requests.get(f"{GITEA_URL}/api/v1/repos/{owner}/{name}/commits?limit=1", headers=headers) + commit_count = c_req.headers.get("X-Total", "0") + + repo_size = r['size'] * 1024 + total_instance_size += repo_size + + repo_stats.append({ + "name": name, "url": r['html_url'], "commits": commit_count, + "issues": r['open_issues_count'], "size": repo_size + }) + + # Identify most active repo (by commit count) + most_active = max(repo_stats, key=lambda x: int(x['commits'])) if repo_stats else None + + return { + "ver": ver, "repos": repo_stats, "langs": lang_totals, + "feeds": human_feeds, "pushes": recent_pushes, + "total_size": total_instance_size, "uptime": get_uptime(), + "most_active": most_active + } + +def build_readme(data): + now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M") + + # Header + md = f"# šŸ› ļø {USERNAME}'s Dev Hub\n\n" + md += f"**Gitea Instance:** `v{data['ver']}` | **Uptime:** `{data['uptime']}` | **Last Updated:** `{now}`\n\n" + + # Top Languages & Total Space + sorted_langs = sorted(data['langs'].items(), key=lambda x: x[1], reverse=True) + lang_line = ", ".join([f"**{l[0]}** ({format_bytes(l[1])})" for l in sorted_langs[:5]]) + + md += "### šŸ“Š Stats Summary\n" + md += f"* **Total Storage Used:** `{format_bytes(data['total_size'])}` across {len(data['repos'])} repositories.\n" + md += f"* **Language Footprint:** {lang_line}\n" + if data['most_active']: + md += f"* **Most Active Project:** [{data['most_active']['name']}]({data['most_active']['url']}) with **{data['most_active']['commits']}** commits.\n" + md += "\n---\n" + + # Repo Table + md += "### šŸ“‚ Repository Breakdown\n" + md += "| Repository | Commits | Issues | Space |\n| :--- | :--- | :--- | :--- |\n" + for r in data['repos']: + md += f"| [{r['name']}]({r['url']}) | {r['commits']} | {r['issues']} | {format_bytes(r['size'])} |\n" + md += "\n" + + # Recent Pushes (Human Only) + md += "### šŸš€ Recent Human Pushes\n" + if not data['pushes']: md += "*No recent manual pushes.*\n" + for p in data['pushes']: + date = p['created'].split('T')[0] + md += f"* **{date}** - Pushed to `{p['repo']['name']}`\n" + md += "\n" + + # Global Activity + md += "### šŸ•’ Latest Changes\n" + for f in data['feeds']: + date = f['created'].split('T')[0] + action = f['op_type'].replace('_', ' ') + md += f"* **{date}**: {action} in `{f['repo']['name']}`\n" + + return md def main(): - readme_path = "README.md" - - if not os.path.exists(readme_path): - print(f"{readme_path} not found!") - return - - with open(readme_path, "r", encoding="utf-8") as f: - content = f.read() - - # Generate new dynamic content - stats_content = generate_stats() - - # Replace content between the markers - pattern = r".*?" - replacement = f"\n{stats_content}\n" - new_content = re.sub(pattern, replacement, content, flags=re.DOTALL) - - # 4. Save the file back to disk - with open(readme_path, "w", encoding="utf-8") as f: - f.write(new_content) - - print("README.md updated locally.") + try: + data = fetch_gitea_data() + markdown = build_readme(data) + with open("README.md", "w", encoding="utf-8") as f: + f.write(markdown) + print("README.md rebuilt successfully.") + except Exception as e: + print(f"Error: {e}") if __name__ == "__main__": main() \ No newline at end of file