diff --git a/.github/linters/.isort.cfg b/.github/linters/.isort.cfg index ed59415..9738cc8 100644 --- a/.github/linters/.isort.cfg +++ b/.github/linters/.isort.cfg @@ -1,4 +1,4 @@ [settings] profile = black known_third_party = github3,dateutil,dotenv -known_first_party = auth \ No newline at end of file +known_first_party = auth,markdown \ No newline at end of file diff --git a/.gitignore b/.gitignore index 15d75ea..7e52bbc 100644 --- a/.gitignore +++ b/.gitignore @@ -143,3 +143,8 @@ cython_debug/ # IDEA .idea/** + +# Node.js +node_modules/ +package-lock.json +package.json diff --git a/README.md b/README.md index 5b4da19..9ba9369 100644 --- a/README.md +++ b/README.md @@ -60,16 +60,17 @@ This action can be configured to authenticate with GitHub App Installation or Pe #### Other Configuration Options -| field | required | default | description | -| -------------------- | -------- | ---------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `ACTIVITY_METHOD` | false | `"pushed"` | How to get the last active date of the repository. Defaults to `pushed`, which is the last time any branch had a push. Can also be set to `default_branch_updated` to instead measure from the latest commit on the default branch (good for filtering out dependabot ) | -| `GH_ENTERPRISE_URL` | false | `""` | URL of GitHub Enterprise instance to use for auth instead of github.com | -| `INACTIVE_DAYS` | true | | The number of days used to determine if repository is stale, based on `push` events | -| `EXEMPT_REPOS` | false | | Comma separated list of repositories to exempt from being flagged as stale. Supports Unix shell-style wildcards. ie. `EXEMPT_REPOS = "stale-repos,test-repo,conf-*"` | -| `EXEMPT_TOPICS` | false | | Comma separated list of topics to exempt from being flagged as stale | -| `ORGANIZATION` | false | | The organization to scan for stale repositories. If no organization is provided, this tool will search through repositories owned by the GH_TOKEN owner | -| `ADDITIONAL_METRICS` | false | | Configure additional metrics like days since last release or days since last pull request. This allows for more detailed reporting on repository activity. To include both metrics, set `ADDITIONAL_METRICS: "release,pr"` | -| `SKIP_EMPTY_REPORTS` | false | `true` | Skips report creation when no stale repositories are identified. Setting this input to `false` means reports are always created, even when they contain no results. | +| field | required | default | description | +| -------------------------- | -------- | ---------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| `ACTIVITY_METHOD` | false | `"pushed"` | How to get the last active date of the repository. Defaults to `pushed`, which is the last time any branch had a push. Can also be set to `default_branch_updated` to instead measure from the latest commit on the default branch (good for filtering out dependabot ) | +| `GH_ENTERPRISE_URL` | false | `""` | URL of GitHub Enterprise instance to use for auth instead of github.com | +| `INACTIVE_DAYS` | true | | The number of days used to determine if repository is stale, based on `push` events | +| `EXEMPT_REPOS` | false | | Comma separated list of repositories to exempt from being flagged as stale. Supports Unix shell-style wildcards. ie. `EXEMPT_REPOS = "stale-repos,test-repo,conf-*"` | +| `EXEMPT_TOPICS` | false | | Comma separated list of topics to exempt from being flagged as stale | +| `ORGANIZATION` | false | | The organization to scan for stale repositories. If no organization is provided, this tool will search through repositories owned by the GH_TOKEN owner | +| `ADDITIONAL_METRICS` | false | | Configure additional metrics like days since last release or days since last pull request. This allows for more detailed reporting on repository activity. To include both metrics, set `ADDITIONAL_METRICS: "release,pr"` | +| `SKIP_EMPTY_REPORTS` | false | `true` | Skips report creation when no stale repositories are identified. Setting this input to `false` means reports are always created, even when they contain no results. | +| `WORKFLOW_SUMMARY_ENABLED` | false | `false` | When set to `true`, automatically adds the stale repository report to the GitHub Actions workflow summary. This eliminates the need to manually add a step to display the Markdown content in the workflow summary. | ### Example workflow @@ -124,6 +125,40 @@ jobs: token: ${{ secrets.GITHUB_TOKEN }} ``` +### Using Workflow Summary + +You can automatically include the stale repository report in your GitHub Actions workflow summary by setting `WORKFLOW_SUMMARY_ENABLED: true`. This eliminates the need for additional steps to display the results. + +```yaml +name: stale repo identifier + +on: + workflow_dispatch: + schedule: + - cron: "3 2 1 * *" + +permissions: + contents: read + +jobs: + build: + name: stale repo identifier + runs-on: ubuntu-latest + + steps: + - name: Run stale_repos tool + uses: github/stale-repos@v3 + env: + GH_TOKEN: ${{ secrets.GH_TOKEN }} + ORGANIZATION: ${{ secrets.ORGANIZATION }} + EXEMPT_TOPICS: "keep,template" + INACTIVE_DAYS: 365 + ADDITIONAL_METRICS: "release,pr" + WORKFLOW_SUMMARY_ENABLED: true +``` + +When `WORKFLOW_SUMMARY_ENABLED` is set to `true`, the stale repository report will be automatically added to the GitHub Actions workflow summary, making it easy to see the results directly in the workflow run page. + ### Example stale_repos.md output ```markdown diff --git a/env.py b/env.py index fef0bfc..27d868e 100644 --- a/env.py +++ b/env.py @@ -28,6 +28,8 @@ class EnvVars: ghe (str): The GitHub Enterprise URL to use for authentication skip_empty_reports (bool): If true, Skips report creation when no stale repositories are identified + workflow_summary_enabled (bool): If true, adds the markdown report to GitHub + Actions workflow summary """ def __init__( @@ -39,6 +41,7 @@ def __init__( gh_token: str | None, ghe: str | None, skip_empty_reports: bool, + workflow_summary_enabled: bool, ): self.gh_app_id = gh_app_id self.gh_app_installation_id = gh_app_installation_id @@ -47,6 +50,7 @@ def __init__( self.gh_token = gh_token self.ghe = ghe self.skip_empty_reports = skip_empty_reports + self.workflow_summary_enabled = workflow_summary_enabled def __repr__(self): return ( @@ -58,6 +62,7 @@ def __repr__(self): f"{self.gh_token}," f"{self.ghe}," f"{self.skip_empty_reports}," + f"{self.workflow_summary_enabled}," ) @@ -120,6 +125,7 @@ def get_env_vars( ghe = os.getenv("GH_ENTERPRISE_URL") gh_app_enterprise_only = get_bool_env_var("GITHUB_APP_ENTERPRISE_ONLY") skip_empty_reports = get_bool_env_var("SKIP_EMPTY_REPORTS", True) + workflow_summary_enabled = get_bool_env_var("WORKFLOW_SUMMARY_ENABLED") if gh_app_id and (not gh_app_private_key_bytes or not gh_app_installation_id): raise ValueError( @@ -142,4 +148,5 @@ def get_env_vars( gh_token, ghe, skip_empty_reports, + workflow_summary_enabled, ) diff --git a/markdown.py b/markdown.py new file mode 100644 index 0000000..f6c2efd --- /dev/null +++ b/markdown.py @@ -0,0 +1,98 @@ +"""Markdown utilities for stale repository reporting.""" + +import os + + +def write_to_markdown( + inactive_repos, + inactive_days_threshold, + additional_metrics=None, + workflow_summary_enabled=False, + file=None, +): + """Write the list of inactive repos to a markdown file. + + Args: + inactive_repos: A list of dictionaries containing the repo, days inactive, + the date of the last push, repository visibility (public/private), + days since the last release, and days since the last pr + inactive_days_threshold: The threshold (in days) for considering a repo as inactive. + additional_metrics: A list of additional metrics to include in the report. + workflow_summary_enabled: If True, adds the report to GitHub Actions workflow summary. + file: A file object to write to. If None, a new file will be created. + + """ + inactive_repos = sorted( + inactive_repos, key=lambda x: x["days_inactive"], reverse=True + ) + + # Generate markdown content + content = generate_markdown_content( + inactive_repos, inactive_days_threshold, additional_metrics + ) + + # Write to file + with file or open("stale_repos.md", "w", encoding="utf-8") as markdown_file: + markdown_file.write(content) + print("Wrote stale repos to stale_repos.md") + + # Write to GitHub step summary if enabled + if workflow_summary_enabled and os.environ.get("GITHUB_STEP_SUMMARY"): + with open( + os.environ["GITHUB_STEP_SUMMARY"], "a", encoding="utf-8" + ) as summary_file: + summary_file.write(content) + print("Added stale repos to workflow summary") + + +def generate_markdown_content( + inactive_repos, inactive_days_threshold, additional_metrics=None +): + """Generate markdown content for the inactive repos report. + + Args: + inactive_repos: A list of dictionaries containing the repo, days inactive, + the date of the last push, repository visibility (public/private), + days since the last release, and days since the last pr + inactive_days_threshold: The threshold (in days) for considering a repo as inactive. + additional_metrics: A list of additional metrics to include in the report. + + Returns: + str: The generated markdown content. + """ + content = "# Inactive Repositories\n\n" + content += ( + f"The following repos have not had a push event for more than " + f"{inactive_days_threshold} days:\n\n" + ) + content += "| Repository URL | Days Inactive | Last Push Date | Visibility |" + + # Include additional metrics columns if configured + if additional_metrics: + if "release" in additional_metrics: + content += " Days Since Last Release |" + if "pr" in additional_metrics: + content += " Days Since Last PR |" + content += "\n| --- | --- | --- | --- |" + if additional_metrics: + if "release" in additional_metrics: + content += " --- |" + if "pr" in additional_metrics: + content += " --- |" + content += "\n" + + for repo_data in inactive_repos: + content += ( + f"| {repo_data['url']} " + f"| {repo_data['days_inactive']} " + f"| {repo_data['last_push_date']} " + f"| {repo_data['visibility']} |" + ) + if additional_metrics: + if "release" in additional_metrics: + content += f" {repo_data['days_since_last_release']} |" + if "pr" in additional_metrics: + content += f" {repo_data['days_since_last_pr']} |" + content += "\n" + + return content diff --git a/stale_repos.py b/stale_repos.py index ccf1967..d8ead53 100755 --- a/stale_repos.py +++ b/stale_repos.py @@ -11,6 +11,7 @@ from env import get_env_vars import auth +from markdown import write_to_markdown def main(): # pragma: no cover @@ -38,6 +39,7 @@ def main(): # pragma: no cover ghe = env_vars.ghe gh_app_enterprise_only = env_vars.gh_app_enterprise_only skip_empty_reports = env_vars.skip_empty_reports + workflow_summary_enabled = env_vars.workflow_summary_enabled # Auth to GitHub.com or GHE github_connection = auth.auth_to_github( @@ -72,7 +74,12 @@ def main(): # pragma: no cover if inactive_repos or not skip_empty_reports: output_to_json(inactive_repos) - write_to_markdown(inactive_repos, inactive_days_threshold, additional_metrics) + write_to_markdown( + inactive_repos, + inactive_days_threshold, + additional_metrics, + workflow_summary_enabled, + ) else: print("Reporting skipped; no stale repos found.") @@ -235,61 +242,6 @@ def get_active_date(repo): return active_date -def write_to_markdown( - inactive_repos, inactive_days_threshold, additional_metrics=None, file=None -): - """Write the list of inactive repos to a markdown file. - - Args: - inactive_repos: A list of dictionaries containing the repo, days inactive, - the date of the last push, repository visibility (public/private), - days since the last release, and days since the last pr - inactive_days_threshold: The threshold (in days) for considering a repo as inactive. - additional_metrics: A list of additional metrics to include in the report. - file: A file object to write to. If None, a new file will be created. - - """ - inactive_repos = sorted( - inactive_repos, key=lambda x: x["days_inactive"], reverse=True - ) - with file or open("stale_repos.md", "w", encoding="utf-8") as markdown_file: - markdown_file.write("# Inactive Repositories\n\n") - markdown_file.write( - f"The following repos have not had a push event for more than " - f"{inactive_days_threshold} days:\n\n" - ) - markdown_file.write( - "| Repository URL | Days Inactive | Last Push Date | Visibility |" - ) - # Include additional metrics columns if configured - if additional_metrics: - if "release" in additional_metrics: - markdown_file.write(" Days Since Last Release |") - if "pr" in additional_metrics: - markdown_file.write(" Days Since Last PR |") - markdown_file.write("\n| --- | --- | --- | --- |") - if additional_metrics: - if "release" in additional_metrics: - markdown_file.write(" --- |") - if "pr" in additional_metrics: - markdown_file.write(" --- |") - markdown_file.write("\n") - for repo_data in inactive_repos: - markdown_file.write( - f"| {repo_data['url']} \ -| {repo_data['days_inactive']} \ -| {repo_data['last_push_date']} \ -| {repo_data['visibility']} |" - ) - if additional_metrics: - if "release" in additional_metrics: - markdown_file.write(f" {repo_data['days_since_last_release']} |") - if "pr" in additional_metrics: - markdown_file.write(f" {repo_data['days_since_last_pr']} |") - markdown_file.write("\n") - print("Wrote stale repos to stale_repos.md") - - def output_to_json(inactive_repos, file=None): """Convert the list of inactive repos to a json string. diff --git a/test_env.py b/test_env.py index f1fbd4e..c0a6350 100644 --- a/test_env.py +++ b/test_env.py @@ -54,6 +54,7 @@ def test_get_env_vars_with_github_app(self): gh_token="", ghe="", skip_empty_reports=True, + workflow_summary_enabled=False, ) result = get_env_vars(True) self.assertEqual(str(result), str(expected_result)) @@ -79,6 +80,7 @@ def test_get_env_vars_with_token(self): gh_token=TOKEN, ghe="", skip_empty_reports=True, + workflow_summary_enabled=False, ) result = get_env_vars(True) self.assertEqual(str(result), str(expected_result)) @@ -119,6 +121,7 @@ def test_get_env_vars_optional_values(self): gh_token=TOKEN, ghe="", skip_empty_reports=False, + workflow_summary_enabled=False, ) result = get_env_vars(True) self.assertEqual(str(result), str(expected_result)) @@ -143,6 +146,7 @@ def test_get_env_vars_optionals_are_defaulted(self): gh_token="TOKEN", ghe=None, skip_empty_reports=True, + workflow_summary_enabled=False, ) result = get_env_vars(True) self.assertEqual(str(result), str(expected_result)) @@ -168,6 +172,29 @@ def test_get_env_vars_auth_with_github_app_installation_missing_inputs(self): "GH_APP_ID set and GH_APP_INSTALLATION_ID or GH_APP_PRIVATE_KEY variable not set", ) + @patch.dict( + os.environ, + { + "GH_TOKEN": "TOKEN", + "WORKFLOW_SUMMARY_ENABLED": "true", + }, + clear=True, + ) + def test_get_env_vars_with_workflow_summary_enabled(self): + """Test that workflow_summary_enabled is set to True when environment variable is true""" + expected_result = EnvVars( + gh_app_id=None, + gh_app_installation_id=None, + gh_app_private_key_bytes=b"", + gh_app_enterprise_only=False, + gh_token="TOKEN", + ghe=None, + skip_empty_reports=True, + workflow_summary_enabled=True, + ) + result = get_env_vars(True) + self.assertEqual(str(result), str(expected_result)) + if __name__ == "__main__": unittest.main() diff --git a/test_markdown.py b/test_markdown.py new file mode 100644 index 0000000..65f9fd7 --- /dev/null +++ b/test_markdown.py @@ -0,0 +1,220 @@ +"""Unit tests for the markdown module.""" + +import os +import unittest +from datetime import datetime, timedelta, timezone +from unittest.mock import MagicMock, call, patch + +from markdown import write_to_markdown + + +class WriteToMarkdownTestCase(unittest.TestCase): + """ + Unit test case for the write_to_markdown() function. + """ + + def test_write_to_markdown(self): + """Test that the write_to_markdown function writes the expected data to a file. + + This test creates a list of inactive repos and a mock file object using + MagicMock. It then calls the write_to_markdown function with the list of + inactive repos and the mock file object. Finally, it uses the assert_has_calls + method to check that the mock file object was called with the expected data. + + """ + forty_days_ago = datetime.now(timezone.utc) - timedelta(days=40) + thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30) + # Create an unsorted list of inactive repos + inactive_repos = [ + { + "url": "https://github.com/example/repo1", + "days_inactive": 30, + "last_push_date": thirty_days_ago.date().isoformat(), + "visibility": "private", + "days_since_last_release": None, + "days_since_last_pr": None, + }, + { + "url": "https://github.com/example/repo2", + "days_inactive": 40, + "last_push_date": forty_days_ago.date().isoformat(), + "visibility": "public", + "days_since_last_release": None, + "days_since_last_pr": None, + }, + ] + + inactive_days_threshold = 365 + + # Create a mock file object + mock_file = MagicMock() + + # Call the write_to_markdown function with the mock file object + write_to_markdown( + inactive_repos, + inactive_days_threshold, + additional_metrics=["release", "pr"], + file=mock_file, + ) + + # Check that the mock file object was called with the expected data + expected_content = ( + "# Inactive Repositories\n\n" + "The following repos have not had a push event for more than 365 days:\n\n" + "| Repository URL | Days Inactive | Last Push Date | Visibility |" + " Days Since Last Release | Days Since Last PR |\n" + "| --- | --- | --- | --- | --- | --- |\n" + f"| https://github.com/example/repo2 | 40 | {forty_days_ago.date().isoformat()}" + " | public | None | None |\n" + f"| https://github.com/example/repo1 | 30 | {thirty_days_ago.date().isoformat()}" + " | private | None | None |\n" + ) + expected_calls = [ + call.write(expected_content), + ] + mock_file.__enter__.return_value.assert_has_calls(expected_calls) + + +class WriteToMarkdownWithWorkflowSummaryTestCase(unittest.TestCase): + """ + Unit test case for the write_to_markdown() function with workflow summary enabled. + """ + + @patch.dict(os.environ, {"GITHUB_STEP_SUMMARY": "/tmp/test_summary.md"}) + def test_write_to_markdown_with_workflow_summary_enabled(self): + """Test that the write_to_markdown function writes to both file and workflow + summary when enabled. + + This test creates a list of inactive repos and calls the write_to_markdown + function with workflow_summary_enabled=True. It verifies that the content is + written to both the regular file and the GitHub step summary file. + + """ + forty_days_ago = datetime.now(timezone.utc) - timedelta(days=40) + thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30) + # Create an unsorted list of inactive repos + inactive_repos = [ + { + "url": "https://github.com/example/repo1", + "days_inactive": 30, + "last_push_date": thirty_days_ago.date().isoformat(), + "visibility": "private", + "days_since_last_release": None, + "days_since_last_pr": None, + }, + { + "url": "https://github.com/example/repo2", + "days_inactive": 40, + "last_push_date": forty_days_ago.date().isoformat(), + "visibility": "public", + "days_since_last_release": None, + "days_since_last_pr": None, + }, + ] + + inactive_days_threshold = 365 + + # Create mock file objects + mock_file = MagicMock() + mock_summary_file = MagicMock() + + with patch("builtins.open", create=True) as mock_open: + # Configure the mock to return different objects for different files + def open_side_effect( + filename, mode, **_kwargs + ): # pylint: disable=unused-argument + if filename == "/tmp/test_summary.md": + return mock_summary_file + return mock_file + + mock_open.side_effect = open_side_effect + + # Call the write_to_markdown function with workflow summary enabled + write_to_markdown( + inactive_repos, + inactive_days_threshold, + additional_metrics=["release", "pr"], + workflow_summary_enabled=True, + ) + + # Check that both files were written to + expected_content = ( + "# Inactive Repositories\n\n" + "The following repos have not had a push event for more than 365 days:\n\n" + "| Repository URL | Days Inactive | Last Push Date | Visibility |" + " Days Since Last Release | Days Since Last PR |\n" + "| --- | --- | --- | --- | --- | --- |\n" + f"| https://github.com/example/repo2 | 40 | {forty_days_ago.date().isoformat()}" + " | public | None | None |\n" + f"| https://github.com/example/repo1 | 30 | {thirty_days_ago.date().isoformat()}" + " | private | None | None |\n" + ) + + # Verify regular file was written + mock_file.__enter__.return_value.write.assert_called_once_with( + expected_content + ) + # Verify summary file was written + mock_summary_file.__enter__.return_value.write.assert_called_once_with( + expected_content + ) + + def test_write_to_markdown_with_workflow_summary_disabled(self): + """Test that when workflow_summary_enabled is False, only the regular file + is written.""" + inactive_repos = [ + { + "url": "https://github.com/example/repo1", + "days_inactive": 30, + "last_push_date": "2025-01-01", + "visibility": "private", + } + ] + + # Create a mock file object + mock_file = MagicMock() + + with patch("builtins.open", create=True) as mock_open: + mock_open.return_value = mock_file + + # Call the write_to_markdown function with workflow summary disabled + write_to_markdown( + inactive_repos, + 365, + workflow_summary_enabled=False, + ) + + # Verify only one file was opened (the regular stale_repos.md file) + mock_open.assert_called_once_with("stale_repos.md", "w", encoding="utf-8") + + @patch.dict(os.environ, {}, clear=True) + def test_write_to_markdown_with_workflow_summary_enabled_but_no_env_var(self): + """Test that when GITHUB_STEP_SUMMARY is not set, only the regular file is written.""" + inactive_repos = [ + { + "url": "https://github.com/example/repo1", + "days_inactive": 30, + "last_push_date": "2025-01-01", + "visibility": "private", + } + ] + + # Create a mock file object + mock_file = MagicMock() + + with patch("builtins.open", create=True) as mock_open: + mock_open.return_value = mock_file + + # Call the write_to_markdown function with workflow summary enabled but no env var + write_to_markdown( + inactive_repos, + 365, + workflow_summary_enabled=True, + ) + + # Verify only one file was opened (the regular stale_repos.md file) + mock_open.assert_called_once_with("stale_repos.md", "w", encoding="utf-8") + + +if __name__ == "__main__": + unittest.main() diff --git a/test_stale_repos.py b/test_stale_repos.py index 9e470be..8ede878 100644 --- a/test_stale_repos.py +++ b/test_stale_repos.py @@ -31,7 +31,6 @@ get_inactive_repos, is_repo_exempt, output_to_json, - write_to_markdown, ) @@ -354,88 +353,6 @@ def test_get_inactive_repos_with_default_branch_updated(self): assert inactive_repos == expected_inactive_repos -class WriteToMarkdownTestCase(unittest.TestCase): - """ - Unit test case for the write_to_markdown() function. - """ - - def test_write_to_markdown(self): - """Test that the write_to_markdown function writes the expected data to a file. - - This test creates a list of inactive repos and a mock file object using MagicMock. - It then calls the write_to_markdown function with the list of inactive repos and - the mock file object. Finally, it uses the assert_has_calls method to check that - the mock file object was called with the expected data. - - """ - forty_days_ago = datetime.now(timezone.utc) - timedelta(days=40) - thirty_days_ago = datetime.now(timezone.utc) - timedelta(days=30) - # Create an unsorted list of inactive repos - inactive_repos = [ - { - "url": "https://github.com/example/repo1", - "days_inactive": 30, - "last_push_date": thirty_days_ago.date().isoformat(), - "visibility": "private", - "days_since_last_release": None, - "days_since_last_pr": None, - }, - { - "url": "https://github.com/example/repo2", - "days_inactive": 40, - "last_push_date": forty_days_ago.date().isoformat(), - "visibility": "public", - "days_since_last_release": None, - "days_since_last_pr": None, - }, - ] - - inactive_days_threshold = 365 - - # Create a mock file object - mock_file = MagicMock() - - # Call the write_to_markdown function with the mock file object - write_to_markdown( - inactive_repos, - inactive_days_threshold, - additional_metrics=["release", "pr"], - file=mock_file, - ) - - # Check that the mock file object was called with the expected data - expected_calls = [ - call.write("# Inactive Repositories\n\n"), - call.write( - "The following repos have not had a push event for more than 365 days:\n\n" - ), - call.write( - "| Repository URL | Days Inactive | Last Push Date | Visibility |" - ), - call.write(" Days Since Last Release |"), - call.write(" Days Since Last PR |"), - call.write("\n| --- | --- | --- | --- |"), - call.write(" --- |"), - call.write(" --- |"), - call.write("\n"), - call.write( - f"| https://github.com/example/repo2 | 40 |\ - {forty_days_ago.date().isoformat()} | public |" - ), - call.write(" None |"), - call.write(" None |"), - call.write("\n"), - call.write( - f"| https://github.com/example/repo1 | 30 |\ - {thirty_days_ago.date().isoformat()} | private |" - ), - call.write(" None |"), - call.write(" None |"), - call.write("\n"), - ] - mock_file.__enter__.return_value.assert_has_calls(expected_calls) - - @patch.dict(os.environ, {"ACTIVITY_METHOD": "default_branch_updated"}) class GetActiveDateTestCase(unittest.TestCase): """