6 Commits
1.0.0 ... 1.0.1

Author SHA1 Message Date
etienne-hd
5e0ec1cc34 Bump: lbc ->1.1.2 2026-02-14 22:56:00 +01:00
etienne-hd
84a4345835 Changed: _HANDLER_MAX_ATTEMPTS and _HANDLER_INITIAL_BACKOFF is now in the constructor of Searcher class 2026-02-14 22:42:56 +01:00
Oussema Bouafif
9ac92ceed5 Remove ad-hoc quick retry script from contribution 2026-02-14 15:17:38 +01:00
Oussema Bouafif
12c66ed108 Add quick retry behavior test script for handler failure scenarios 2026-02-14 15:07:39 +01:00
Oussema Bouafif
432b962d23 Improve notification reliability with handler retries and post-success ID marking 2026-02-14 14:54:23 +01:00
etienne-hd
19bd8976ce Bump: lbc ->1.1.0
Changed: 'models' folder has been renamed to 'model'
2025-12-24 16:23:05 +01:00
8 changed files with 51 additions and 17 deletions

View File

@@ -4,7 +4,7 @@
**Stay notified when new ads appear on Leboncoin**
```python
from models import Search, Parameters
from model import Search, Parameters
import lbc
def handle(ad: lbc.Ad, search_name: str):
@@ -73,7 +73,7 @@ Each `Search` object should be configured with the rules for the ads you want to
For example, if you want to track ads for a **Porsche 944** priced between 0€ and 25,000€ anywhere in France:
```python
from models import Search, Parameters
from model import Search, Parameters
Search(
name="Porsche 944",
@@ -120,7 +120,7 @@ You can configure a proxy, here is an example:
```python
from lbc import Proxy
from models import Search
from model import Search
proxy = Proxy(
host="127.0.0.1",

View File

@@ -1,4 +1,4 @@
from models import Search, Parameters
from model import Search, Parameters
import lbc
def handle(ad: lbc.Ad, search_name: str):

View File

@@ -1,7 +1,7 @@
from lbc import Proxy, Ad
from .parameters import Parameters
from dataclasses import dataclass
from typing import Callable
from typing import Callable, Optional
@dataclass
class Search:
@@ -9,4 +9,4 @@ class Search:
parameters: Parameters
delay: float
handler: Callable[[Ad, str], None]
proxy: Proxy = None
proxy: Optional[Proxy] = None

View File

@@ -1 +1 @@
lbc==1.0.9
lbc==1.1.2

View File

@@ -26,11 +26,14 @@ class ID:
logger.exception("An error occurred while attempting to open the id.json file.")
return ids
def add(self, id: str) -> bool:
if not id in self._ids:
self._ids.append(id)
def contains(self, id_: str) -> bool:
return id_ in self._ids
def add(self, id_: str) -> bool:
if not id_ in self._ids:
self._ids.append(id_)
with open("id.json", "w") as f:
json.dump(self._ids[-MAX_ID:], f, indent=3)
self._ids = self._ids[-MAX_ID:]
return True
return False
return False

View File

@@ -1,4 +1,4 @@
from models import Search
from model import Search
from lbc import Client, Sort
from .id import ID
from .logger import logger
@@ -8,11 +8,34 @@ import threading
from typing import List, Union
class Searcher:
def __init__(self, searches: Union[List[Search], Search], request_verify: bool = True):
def __init__(self, searches: Union[List[Search], Search], request_verify: bool = True,
handler_max_attempts: int = 3, handler_initial_backoff: float = 2.0):
self._searches: List[Search] = searches if isinstance(searches, list) else [searches]
self._request_verify = request_verify
self._handler_max_attempts = handler_max_attempts
self._handler_initial_backoff = handler_initial_backoff
self._id = ID()
def _handle_with_retry(self, search: Search, ad) -> bool:
for attempt in range(1, self._handler_max_attempts + 1):
try:
search.handler(ad, search.name)
return True
except Exception:
if attempt == self._handler_max_attempts:
logger.exception(
f"[{search.name}] Handler failed for ad {ad.id} after {attempt} attempts."
)
return False
delay = self._handler_initial_backoff * (2 ** (attempt - 1))
logger.warning(
f"[{search.name}] Handler failed for ad {ad.id}. "
f"Retrying in {delay:.0f}s ({attempt}/{self._handler_max_attempts})."
)
time.sleep(delay)
return False
def _search(self, search: Search) -> None:
client = Client(proxy=search.proxy, request_verify=self._request_verify)
while True:
@@ -20,12 +43,20 @@ class Searcher:
try:
response = client.search(**search.parameters._kwargs, sort=Sort.NEWEST)
logger.debug(f"Successfully found {response.total} ad{'s' if response.total > 1 else ''}.")
ads = [ad for ad in response.ads if self._id.add(ad.id)]
ads = [ad for ad in response.ads if not self._id.contains(ad.id)]
if len(ads):
logger.info(f"Successfully found {len(ads)} new ad{'s' if len(ads) > 1 else ''}!")
notified = 0
for ad in ads:
search.handler(ad, search.name)
except:
if self._handle_with_retry(search, ad) and self._id.add(ad.id):
notified += 1
if len(ads) and notified != len(ads):
logger.warning(
f"[{search.name}] {len(ads) - notified} ad{'s were' if len(ads) - notified > 1 else ' was'} not marked as seen and will be retried."
)
except Exception:
logger.exception(f"An error occured.")
time.sleep(search.delay - (time.time() - before) if search.delay - (time.time() - before) > 0 else 0)
@@ -37,4 +68,4 @@ class Searcher:
for search in self._searches:
threading.Thread(target=self._search, args=(search,), name=search.name).start()
time.sleep(5) # Add latency between each thread to prevent spam
return True
return True