Skip to content

Commit

Permalink
Fixes Scrapinghub jobs.list Method Called with Incorrect Filter Param…
Browse files Browse the repository at this point in the history
…eter
  • Loading branch information
VMRuiz committed May 6, 2024
1 parent f0d8ea5 commit 9a7c456
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions spidermon/contrib/scrapy/monitors/monitors.py
Original file line number Diff line number Diff line change
Expand Up @@ -569,7 +569,7 @@ def _get_jobs(self, states, number_of_jobs):
start=start,
state=states,
count=count,
filters=dict(has_tag=tags) if tags else None,
has_tag=tags or None,
)
total_jobs.extend(current_jobs)

Expand All @@ -584,7 +584,7 @@ def _get_jobs(self, states, number_of_jobs):

def _get_tags_to_filter(self):
"""
Return the intersect of the desired tags to filter and
Return a list of tags with the intersection of the desired tags to filter and
the ones from the current job.
"""
desired_tags = self.crawler.settings.getlist(SPIDERMON_JOBS_COMPARISON_TAGS)
Expand All @@ -596,7 +596,7 @@ def _get_tags_to_filter(self):
return {}

tags_to_filter = set(desired_tags) & set(current_tags)
return sorted(tags_to_filter)
return list(sorted(tags_to_filter))

def get_threshold(self):
number_of_jobs = self.crawler.settings.getint(SPIDERMON_JOBS_COMPARISON)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ def test_arguments_passed_to_zyte_client(
state=list(states),
# Count goes from pending number of jobs up to 1000
count=min(number_of_jobs - n * 1000, 1000),
filters={"has_tag": list(tags)},
has_tag=list(tags),
)
# One call to api every 1000 expected jobs
for n in range(0, math.ceil(number_of_jobs / 1000))
Expand Down

0 comments on commit 9a7c456

Please sign in to comment.