Skip to content

Commit 7fdeda2

Browse files
authored
fix: skip out of order PR updates (#3324)
* fix: skip stale updates * feat: reject out of order updates in main util as well * fix: do not need this here
1 parent aa70919 commit 7fdeda2

File tree

6 files changed

+52
-18
lines changed

6 files changed

+52
-18
lines changed

.github/workflows/bot-bot.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ jobs:
5252
if: success() && ! env.CI_SKIP
5353
timeout-minutes: 210
5454
run: |
55-
export TIMEOUT=7200
55+
export TIMEOUT=2100
5656
export RUN_URL="https://github.com/regro/cf-scripts/actions/runs/${RUN_ID}"
5757
5858
pushd cf-graph

conda_forge_tick/git_utils.py

Lines changed: 35 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
import threading
1111
import time
1212
from abc import ABC, abstractmethod
13-
from datetime import datetime
13+
from datetime import datetime, timezone
1414
from email import utils
1515
from functools import cached_property
1616
from pathlib import Path
@@ -1218,6 +1218,16 @@ def _munge_dict(dest, src, keys):
12181218
return pr_json
12191219

12201220

1221+
def parse_pr_json_last_updated(pr_data: Union[Dict, LazyJson]) -> Optional[datetime]:
1222+
"""Parse the last updated time from a PR json blob. If it is not present, return None."""
1223+
last_updated = pr_data.get("updated_at", None)
1224+
if last_updated is not None:
1225+
last_updated = datetime.fromisoformat(last_updated)
1226+
if last_updated.tzinfo is None:
1227+
last_updated = last_updated.replace(tzinfo=timezone.utc)
1228+
return last_updated
1229+
1230+
12211231
def lazy_update_pr_json(
12221232
pr_json: Union[Dict, LazyJson], force: bool = False
12231233
) -> Union[Dict, LazyJson]:
@@ -1240,6 +1250,8 @@ def lazy_update_pr_json(
12401250
pr_json : dict-like
12411251
A dict-like object with the current PR information.
12421252
"""
1253+
last_updated = parse_pr_json_last_updated(pr_json)
1254+
12431255
hdrs = {
12441256
"Authorization": f"token {get_bot_token()}",
12451257
"Accept": "application/vnd.github.v3+json",
@@ -1270,9 +1282,19 @@ def lazy_update_pr_json(
12701282
)
12711283

12721284
if r.status_code == 200:
1273-
pr_json = trim_pr_json_keys(pr_json, src_pr_json=r.json())
1274-
pr_json["ETag"] = r.headers["ETag"]
1275-
pr_json["Last-Modified"] = r.headers["Last-Modified"]
1285+
# I have seen things come in out of order for reasons I do not
1286+
# fully understand. We do not update in this case. - MRB
1287+
new_last_updated = parse_pr_json_last_updated(r.json())
1288+
if (
1289+
last_updated is None
1290+
or new_last_updated is None
1291+
or new_last_updated >= last_updated
1292+
):
1293+
pr_json = trim_pr_json_keys(pr_json, src_pr_json=r.json())
1294+
pr_json["ETag"] = r.headers["ETag"]
1295+
pr_json["Last-Modified"] = r.headers["Last-Modified"]
1296+
else:
1297+
pr_json = trim_pr_json_keys(pr_json)
12761298
else:
12771299
pr_json = trim_pr_json_keys(pr_json)
12781300

@@ -1358,8 +1380,11 @@ def close_out_labels(
13581380
pr_obj = get_pr_obj_from_pr_json(pr_json, gh)
13591381
pr_obj.create_comment(
13601382
"Due to the `bot-rerun` label I'm closing "
1361-
"this PR. I will make another one as"
1362-
f" appropriate.\n\n<sub>This message was generated by {get_bot_run_url()} "
1383+
"this PR. I will make another PR as"
1384+
" appropriate. You should expect to "
1385+
"wait at least a few hours, or possibly "
1386+
"much longer, for a new PR."
1387+
f"\n\n<sub>This message was generated by {get_bot_run_url()} "
13631388
"- please use this URL for debugging.</sub>",
13641389
)
13651390
pr_obj.close()
@@ -1404,7 +1429,10 @@ def close_out_dirty_prs(
14041429
pr_obj.create_comment(
14051430
"I see that this PR has conflicts, and I'm the only committer. "
14061431
"I'm going to close this PR and will make another one as"
1407-
f" appropriate.\n\n<sub>This was generated by {get_bot_run_url()} - "
1432+
" appropriate. You should expect to "
1433+
"wait at least a few hours, or possibly "
1434+
"much longer, for a new PR."
1435+
f"\n\n<sub>This was generated by {get_bot_run_url()} - "
14081436
"please use this URL for debugging.</sub>",
14091437
)
14101438
pr_obj.close()

conda_forge_tick/lazy_json_backends.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import base64
12
import contextlib
23
import functools
34
import glob
@@ -942,7 +943,11 @@ def main_cache(ctx: CliContext):
942943
def _get_pth_blob_sha_and_content(pth, gh):
943944
try:
944945
cnt = gh.get_repo("regro/cf-graph-countyfair").get_contents(pth)
945-
return cnt.sha, cnt.decoded_content.decode("utf-8")
946+
# I was using the decoded_content attribute here, but it seems that
947+
# every once and a while github does not send the encoding correctly
948+
# so I switched to doing the decoding by hand.
949+
data = base64.b64decode(cnt.content.encode("utf-8")).decode("utf-8")
950+
return cnt.sha, data
946951
except github.UnknownObjectException:
947952
return None, None
948953

conda_forge_tick/make_migrators.py

Lines changed: 7 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -95,8 +95,8 @@
9595

9696
logger = logging.getLogger(__name__)
9797

98-
PR_LIMIT = 5
99-
MAX_PR_LIMIT = 50
98+
PR_LIMIT = 2
99+
MAX_PR_LIMIT = 20
100100
MAX_SOLVER_ATTEMPTS = 50
101101
CHECK_SOLVABLE_TIMEOUT = 90 # 90 days
102102
DEFAULT_MINI_MIGRATORS = [
@@ -370,10 +370,10 @@ def add_rebuild_migration_yaml(
370370
pr_limits = [
371371
min(2, nominal_pr_limit),
372372
nominal_pr_limit,
373-
min(int(nominal_pr_limit * 4), MAX_PR_LIMIT),
374-
min(int(nominal_pr_limit * 3), MAX_PR_LIMIT),
375373
min(int(nominal_pr_limit * 2), MAX_PR_LIMIT),
376-
min(int(nominal_pr_limit * 1.5), MAX_PR_LIMIT),
374+
min(int(nominal_pr_limit * 1.75), MAX_PR_LIMIT),
375+
min(int(nominal_pr_limit * 1.50), MAX_PR_LIMIT),
376+
min(int(nominal_pr_limit * 1.25), MAX_PR_LIMIT),
377377
min(nominal_pr_limit, MAX_PR_LIMIT),
378378
]
379379

@@ -709,6 +709,7 @@ def create_migration_yaml_creator(
709709
cfp_gx,
710710
pinnings=pinnings_together,
711711
full_graph=gx,
712+
pr_limit=1,
712713
),
713714
)
714715
except Exception as e:
@@ -809,7 +810,7 @@ def initialize_migrators(
809810
version_migrator = Version(
810811
python_nodes=python_nodes,
811812
graph=gx,
812-
pr_limit=PR_LIMIT * 4,
813+
pr_limit=PR_LIMIT * 2,
813814
piggy_back_migrations=_make_mini_migrators_with_defaults(
814815
extra_mini_migrators=[
815816
PipWheelMigrator(),

conda_forge_tick/migrators/migration_yaml.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -127,7 +127,7 @@ def __init__(
127127
yaml_contents: str,
128128
name: str,
129129
graph: nx.DiGraph = None,
130-
pr_limit: int = 50,
130+
pr_limit: int = 0,
131131
top_level: Set["PackageName"] = None,
132132
cycles: Optional[Sequence["PackageName"]] = None,
133133
migration_number: Optional[int] = None,
@@ -407,7 +407,7 @@ def __init__(
407407
graph: nx.DiGraph,
408408
pin_impact: Optional[int] = None,
409409
full_graph: Optional[nx.DiGraph] = None,
410-
pr_limit: int = 1,
410+
pr_limit: int = 0,
411411
bump_number: int = 1,
412412
effective_graph: nx.DiGraph = None,
413413
pinnings: Optional[List[int]] = None,

conda_forge_tick/migrators/noarch_python_min.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -420,7 +420,7 @@ class NoarchPythonMinMigrator(Migrator):
420420
def __init__(
421421
self,
422422
*,
423-
pr_limit: int = 10,
423+
pr_limit: int = 0,
424424
graph: nx.DiGraph = None,
425425
effective_graph: nx.DiGraph = None,
426426
piggy_back_migrations: Sequence[MiniMigrator] | None = None,

0 commit comments

Comments
 (0)