Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
76 changes: 56 additions & 20 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
get_created_issues,
get_open_issues,
get_open_issues_for_person,
get_resolution_time_by_priority,
get_time_data,
)
from linear.projects import get_projects
Expand Down Expand Up @@ -522,7 +523,7 @@ def get_future_result_with_timeout(
def _build_leaderboard_entries(
days: int,
completed_bugs: list,
completed_new_features: list,
completed_feature_requests: list,
completed_technical_changes: list,
merged_reviews: dict,
merged_authored_prs: dict,
Expand Down Expand Up @@ -568,7 +569,7 @@ def resolve_slug(*identities: str | None) -> str | None:
count_breakdown_by_slug: dict[str, dict[str, int]] = {}
count_breakdown_by_external: dict[str, dict[str, int]] = {}

completed_work = completed_bugs + completed_new_features + completed_technical_changes
completed_work = completed_bugs + completed_feature_requests + completed_technical_changes

for issue in completed_work:
assignee = issue.get("assignee")
Expand Down Expand Up @@ -781,8 +782,8 @@ def _build_priority_stats_context(days: int, _cache_epoch: int) -> dict:
created_priority_future = executor.submit(get_created_issues, 2, "Bug", days)
completed_priority_future = executor.submit(get_completed_issues_summary, 2, "Bug", days)
completed_bugs_future = executor.submit(get_completed_issues_summary, 5, "Bug", days)
completed_new_features_future = executor.submit(
get_completed_issues_summary, 5, "New Feature", days
completed_feature_requests_future = executor.submit(
get_completed_issues_summary, 5, "Feature Request", days
)
completed_technical_changes_future = executor.submit(
get_completed_issues_summary, 5, "Technical Change", days
Expand All @@ -795,11 +796,11 @@ def _build_priority_stats_context(days: int, _cache_epoch: int) -> dict:
]
completed_bugs_result = get_future_result_with_timeout(completed_bugs_future, [])
completed_bugs = [issue for issue in completed_bugs_result if not issue.get("project")]
completed_new_features_result = get_future_result_with_timeout(
completed_new_features_future, []
completed_feature_requests_result = get_future_result_with_timeout(
completed_feature_requests_future, []
)
completed_new_features = [
issue for issue in completed_new_features_result if not issue.get("project")
completed_feature_requests = [
issue for issue in completed_feature_requests_result if not issue.get("project")
]
completed_technical_changes_result = get_future_result_with_timeout(
completed_technical_changes_future, []
Expand All @@ -810,13 +811,13 @@ def _build_priority_stats_context(days: int, _cache_epoch: int) -> dict:

time_data = get_time_data(completed_priority_bugs)
fixes_per_day = (
len(completed_bugs + completed_new_features + completed_technical_changes) / days
len(completed_bugs + completed_feature_requests + completed_technical_changes) / days
if days
else 0
)

total_completed_issues = len(
completed_bugs + completed_new_features + completed_technical_changes
completed_bugs + completed_feature_requests + completed_technical_changes
)
if total_completed_issues:
priority_percentage = int(
Expand Down Expand Up @@ -846,16 +847,16 @@ def _build_open_items_context(days: int, _cache_epoch: int) -> dict:
with ThreadPoolExecutor(max_workers=INDEX_THREADPOOL_MAX_WORKERS) as executor:
open_priority_future = executor.submit(get_open_issues, 2, "Bug")
open_bugs_future = executor.submit(get_open_issues, 5, "Bug")
open_new_features_future = executor.submit(get_open_issues, 5, "New Feature")
open_feature_requests_future = executor.submit(get_open_issues, 5, "Feature Request")
open_technical_changes_future = executor.submit(get_open_issues, 5, "Technical Change")

open_priority_bugs = get_future_result_with_timeout(open_priority_future, [])
open_bugs_result = get_future_result_with_timeout(open_bugs_future, [])
open_new_features_result = get_future_result_with_timeout(open_new_features_future, [])
open_feature_requests_result = get_future_result_with_timeout(open_feature_requests_future, [])
open_technical_changes_result = get_future_result_with_timeout(
open_technical_changes_future, []
)
open_work = open_bugs_result + open_new_features_result + open_technical_changes_result
open_work = open_bugs_result + open_feature_requests_result + open_technical_changes_result

return {
"days": days,
Expand All @@ -876,8 +877,8 @@ def _build_open_items_context(days: int, _cache_epoch: int) -> dict:
def _build_leaderboard_context(days: int, _cache_epoch: int) -> dict:
with ThreadPoolExecutor(max_workers=INDEX_THREADPOOL_MAX_WORKERS) as executor:
completed_bugs_future = executor.submit(get_completed_issues_summary, 5, "Bug", days)
completed_new_features_future = executor.submit(
get_completed_issues_summary, 5, "New Feature", days
completed_feature_requests_future = executor.submit(
get_completed_issues_summary, 5, "Feature Request", days
)
completed_technical_changes_future = executor.submit(
get_completed_issues_summary, 5, "Technical Change", days
Expand All @@ -887,11 +888,11 @@ def _build_leaderboard_context(days: int, _cache_epoch: int) -> dict:

completed_bugs_result = get_future_result_with_timeout(completed_bugs_future, [])
completed_bugs = [issue for issue in completed_bugs_result if not issue.get("project")]
completed_new_features_result = get_future_result_with_timeout(
completed_new_features_future, []
completed_feature_requests_result = get_future_result_with_timeout(
completed_feature_requests_future, []
)
completed_new_features = [
issue for issue in completed_new_features_result if not issue.get("project")
completed_feature_requests = [
issue for issue in completed_feature_requests_result if not issue.get("project")
]
completed_technical_changes_result = get_future_result_with_timeout(
completed_technical_changes_future, []
Expand All @@ -906,7 +907,7 @@ def _build_leaderboard_context(days: int, _cache_epoch: int) -> dict:
leaderboard_entries = _build_leaderboard_entries(
days=days,
completed_bugs=completed_bugs,
completed_new_features=completed_new_features,
completed_feature_requests=completed_feature_requests,
completed_technical_changes=completed_technical_changes,
merged_reviews=merged_reviews,
merged_authored_prs=merged_authored_prs,
Expand All @@ -918,6 +919,33 @@ def _build_leaderboard_context(days: int, _cache_epoch: int) -> dict:
}


@lru_cache(maxsize=INDEX_CONTEXT_CACHE_MAXSIZE)
def _build_resolution_by_priority_context(days: int, _cache_epoch: int) -> dict:
with ThreadPoolExecutor(max_workers=INDEX_THREADPOOL_MAX_WORKERS) as executor:
completed_bugs_future = executor.submit(get_completed_issues_summary, 5, "Bug", days)
completed_feature_requests_future = executor.submit(
get_completed_issues_summary, 5, "Feature Request", days
)

completed_bugs_result = get_future_result_with_timeout(completed_bugs_future, [])
completed_feature_requests_result = get_future_result_with_timeout(
completed_feature_requests_future, []
)

completed_non_project_issues = [
issue
for issue in completed_bugs_result + completed_feature_requests_result
if not issue.get("project")
]

resolution_stats = get_resolution_time_by_priority(completed_non_project_issues)

return {
"days": days,
"resolution_stats": resolution_stats,
}


# use a query string parameter for days on the index route
@app.route("/")
def index():
Expand All @@ -933,6 +961,14 @@ def index_priority_stats_partial():
return render_template("partials/index_priority_stats.html", **context)


@app.route("/partials/index/resolution-by-priority")
def index_resolution_by_priority_partial():
days = request.args.get("days", default=30, type=int)
cache_epoch = int(time.time() / INDEX_CACHE_TTL_SECONDS)
context = _build_resolution_by_priority_context(days, cache_epoch)
return render_template("partials/index_resolution_by_priority.html", **context)


@app.route("/partials/index/open-items")
def index_open_items_partial():
days = request.args.get("days", default=30, type=int)
Expand Down
6 changes: 3 additions & 3 deletions jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def normalize_identity(value: str | None) -> str:

items = (
get_completed_issues(5, "Bug", days)
+ get_completed_issues(5, "New Feature", days)
+ get_completed_issues(5, "Feature Request", days)
+ get_completed_issues(5, "Technical Change", days)
)
items = [item for item in items if not item.get("project")]
Expand Down Expand Up @@ -506,7 +506,7 @@ def post_stale():
cr_prs = get_prs_with_changes_requested_by_reviewer()
stale_issues = get_stale_issues_by_assignee(
get_open_issues(5, "Bug")
+ get_open_issues(5, "New Feature")
+ get_open_issues(5, "Feature Request")
+ get_open_issues(5, "Technical Change"),
7,
)
Expand Down Expand Up @@ -729,7 +729,7 @@ def post_weekly_changelog():

issues = (
get_completed_issues(5, "Bug", 7)
+ get_completed_issues(5, "New Feature", 7)
+ get_completed_issues(5, "Feature Request", 7)
+ get_completed_issues(5, "Technical Change", 7)
)
if not issues:
Expand Down
48 changes: 48 additions & 0 deletions linear/issues.py
Original file line number Diff line number Diff line change
Expand Up @@ -376,6 +376,54 @@ def by_platform(issues):
)


PRIORITY_LABELS = {
1: "Urgent",
2: "High",
3: "Medium",
4: "Low",
5: "Very Low",
}


def by_priority(issues):
priority_issues = {}
for issue in issues:
priority = issue.get("priority")
if priority is None:
continue
priority_issues.setdefault(priority, []).append(issue)
return dict(sorted(priority_issues.items()))


def get_resolution_time_by_priority(issues):
"""Return average/p95 resolution time (in days) per priority level.

Only issues with both createdAt and completedAt populated are included.
Priorities with no resolved issues in the window are omitted.
"""
stats = []
for priority, priority_issues in by_priority(issues).items():
time_data = get_time_data(priority_issues)
resolved_count = sum(
1
for issue in priority_issues
if _parse_linear_datetime(issue.get("completedAt"))
and _parse_linear_datetime(issue.get("createdAt"))
)
if not resolved_count:
continue
stats.append(
{
"priority": priority,
"label": PRIORITY_LABELS.get(priority, f"P{priority}"),
"count": resolved_count,
"avg_days": time_data["lead"]["avg"],
"p95_days": time_data["lead"]["p95"],
}
)
return stats


def _parse_linear_datetime(value):
if not value:
return None
Expand Down
5 changes: 5 additions & 0 deletions templates/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ <h2>Priority Bug Stats</h2>
</form>
<div id="priority-stats" aria-busy="true"></div>
#}
<section id="resolution-by-priority" aria-busy="true"></section>
<section id="open-items" aria-busy="true"></section>
<section id="leaderboard" aria-busy="true"></section>
{% endblock %}
Expand Down Expand Up @@ -116,6 +117,10 @@ <h2>Priority Bug Stats</h2>
// `/partials/index/priority-stats?days=${days}`,
// renderPlatformChart
// );
loadSection(
'resolution-by-priority',
`/partials/index/resolution-by-priority?days=${days}`
);
loadSection('open-items', `/partials/index/open-items?days=${days}`);
loadSection('leaderboard', `/partials/index/leaderboard?days=${days}`);
</script>
Expand Down
22 changes: 22 additions & 0 deletions templates/partials/index_resolution_by_priority.html
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
<h2>Average Resolution Time by Priority</h2>
<p>
<small
>Non-project items completed in the last {{ days }} day{{ '' if days == 1 else 's' }}.</small
>
</p>
{% if resolution_stats %}
<div class="grid">
{% for stat in resolution_stats %}
<div>
<article>
<header>{{ stat.label }}</header>
<h1>{{ stat.avg_days }}d</h1>
<small>{{ stat.count }} resolved &middot; p95 {{ stat.p95_days }}d</small>
</article>
</div>
{% endfor %}
</div>
{% else %}
<article>No resolved non-project items in this window.</article>
{% endif %}
<hr />
Loading