Skip to content

Commit

Permalink
fix: make logging call uniform
Browse files Browse the repository at this point in the history
  • Loading branch information
raphael0202 committed Dec 14, 2022
1 parent c9b6d9e commit 390e144
Show file tree
Hide file tree
Showing 15 changed files with 40 additions and 36 deletions.
2 changes: 1 addition & 1 deletion robotoff/app/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -472,7 +472,7 @@ def on_post(self, req: falcon.Request, resp: falcon.Response):
server_domain = req.get_param("server_domain", required=True)

if server_domain != settings.OFF_SERVER_DOMAIN:
logger.info(f"Rejecting image import from {server_domain}")
logger.info("Rejecting image import from %s", server_domain)
resp.media = {
"status": "rejected",
}
Expand Down
4 changes: 2 additions & 2 deletions robotoff/app/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ def send_event(
"device_id": device_id,
"barcode": barcode,
}
logger.debug(f"Event: {event}")
logger.debug("Event: %s", event)
response = requests.post(api_url, json=event)
logger.debug(f"Event API response: {response}")
logger.debug("Event API response: %s", response)
return response
2 changes: 1 addition & 1 deletion robotoff/cli/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ def regenerate_ocr_insights(
with db:
imported = importer.import_insights(predictions, settings.OFF_SERVER_DOMAIN)

logger.info(f"Import finished, {imported} insights imported")
logger.info("Import finished, %s insights imported", imported)


@app.command()
Expand Down
10 changes: 5 additions & 5 deletions robotoff/elasticsearch/export.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def _delete_existing_data(self, index: str) -> None:
doc_type=settings.ELASTICSEARCH_TYPE,
)

logger.info(f"Deleted %d documents from {index}", resp["deleted"])
logger.info("Deleted %d documents from %s", resp["deleted"], index)

def _get_data(self, index: str) -> Iterable[tuple[str, dict]]:
if index == settings.ElasticsearchIndex.PRODUCT:
Expand All @@ -37,7 +37,7 @@ def _get_data(self, index: str) -> Iterable[tuple[str, dict]]:
def load_index(self, index: str, index_filepath: Path) -> None:
"""Creates the given index if it doesn't already exist."""
if not self.es_client.indices.exists(index=index):
logger.info(f"Creating index: {index}")
logger.info("Creating index: %s", index)
with open(index_filepath, "rb") as f:
conf = orjson.loads(f.read())
self.es_client.indices.create(index=index, body=conf)
Expand All @@ -52,14 +52,14 @@ def export_index_data(self, index: str) -> int:
but only async ones for categories.
Returns the number of rows inserted into the index."""
logger.info(f"Deleting existing {index} data...")
logger.info("Deleting existing %s data...", index)
self._delete_existing_data(index)

index_data = self._get_data(index)

logger.info(f"Starting {index} export to Elasticsearch...")
logger.info("Starting %s export to Elasticsearch...", index)

rows_inserted = perform_export(self.es_client, index_data, index)

logger.info(f"Inserted %d rows for index {index}", rows_inserted)
logger.info("Inserted %d rows for index %s", rows_inserted, index)
return rows_inserted
2 changes: 1 addition & 1 deletion robotoff/insights/extraction.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ def get_predictions_from_product_name(
def extract_ocr_predictions(
barcode: str, ocr_url: str, prediction_types: Iterable[PredictionType]
) -> list[Prediction]:
logger.info(f"Generating OCR predictions from OCR {ocr_url}")
logger.info("Generating OCR predictions from OCR %s", ocr_url)

predictions_all: list[Prediction] = []
source_image = get_source_from_url(ocr_url)
Expand Down
10 changes: 5 additions & 5 deletions robotoff/insights/importer.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ def is_recent_image(

for upload_datetime in remaining_datetimes:
if upload_datetime - image_datetime > max_timedelta:
logger.debug(f"More recent image: {upload_datetime} > {image_datetime}")
logger.debug("More recent image: %s > %s", upload_datetime, image_datetime)
return False

return True
Expand Down Expand Up @@ -306,7 +306,7 @@ def import_insights(
)
if to_delete:
to_delete_ids = [insight.id for insight in to_delete]
logger.info(f"Deleting {len(to_delete_ids)} insights")
logger.info("Deleting %s insights", len(to_delete_ids))
ProductInsight.delete().where(
ProductInsight.id.in_(to_delete_ids)
).execute()
Expand Down Expand Up @@ -862,7 +862,7 @@ def is_in_barcode_range(barcode: str, tag: str) -> bool:
brand_prefix = get_brand_prefix()

if not in_barcode_range(brand_prefix, tag, barcode):
logger.info(f"Barcode {barcode} of brand {tag} not in barcode range")
logger.info("Barcode %s of brand %s not in barcode range", barcode, tag)
return False

return True
Expand Down Expand Up @@ -938,7 +938,7 @@ def is_valid_product_prediction(
"""
if not product:
# the product does not exist (deleted)
logger.info(f"Prediction of deleted product {prediction.barcode}")
logger.info("Prediction of deleted product %s", prediction.barcode)
return False

if prediction.source_image and not is_valid_image(
Expand Down Expand Up @@ -1138,7 +1138,7 @@ def import_predictions(
updated_prediction_types_by_barcode[barcode] = set(
prediction.type for prediction in product_predictions_group
)
logger.info(f"{predictions_imported} predictions imported")
logger.info("%s predictions imported", predictions_imported)
return updated_prediction_types_by_barcode


Expand Down
2 changes: 1 addition & 1 deletion robotoff/logos.py
Original file line number Diff line number Diff line change
Expand Up @@ -344,7 +344,7 @@ def generate_insights_from_annotated_logos(
imported = import_insights(predictions, server_domain)

if imported:
logger.info(f"{imported} logo insights imported after annotation")
logger.info("%s logo insights imported after annotation", imported)
return imported


Expand Down
2 changes: 1 addition & 1 deletion robotoff/off.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def update_product(
status = json.get("status_verbose")

if status != "fields saved":
logger.warning(f"Unexpected status during product update: {status}")
logger.warning("Unexpected status during product update: %s", status)


def move_to(barcode: str, to: ServerType, timeout: Optional[int] = 10) -> bool:
Expand Down
2 changes: 1 addition & 1 deletion robotoff/prediction/category/matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def load_resources():
get_intersect_categories_ingredients()

for lang in SUPPORTED_LANG:
logger.info(f"Loading NLP for {lang}...")
logger.info("Loading NLP for %s...", lang)
get_lemmatizing_nlp(lang)


Expand Down
4 changes: 2 additions & 2 deletions robotoff/prediction/object_detection/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,10 +213,10 @@ def load_all(cls):
model_name = model.value
file_path = settings.MODELS_DIR / model_name
if file_path.is_dir():
logger.info(f"Model '{model_name}' found")
logger.info("Model %s found", model_name)
cls.models[model_name] = cls.load(model_name, file_path)
else:
logger.info(f"Missing model: '{model_name}'")
logger.info("Missing model: %s", model_name)
cls._loaded = True

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion robotoff/scheduler/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def generate_insights():
imported = import_insights(
product_predictions_iter, server_domain=settings.OFF_SERVER_DOMAIN
)
logger.info(f"{imported} category insights imported")
logger.info("%s category insights imported", imported)


def transform_insight_iter(insights_iter: Iterable[dict]):
Expand Down
2 changes: 1 addition & 1 deletion robotoff/taxonomy.py
Original file line number Diff line number Diff line change
Expand Up @@ -286,7 +286,7 @@ def fetch_taxonomy(
r = http_session.get(url, timeout=120) # might take some time
if r.status_code >= 300:
raise requests.HTTPError(
"Taxonomy download at %s returned status code {r.status_code}", url
"Taxonomy download at %s returned status code %s", url, r.status_code
)
data = r.json()
except Exception as e:
Expand Down
2 changes: 1 addition & 1 deletion robotoff/workers/tasks/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ def delete_product_insights_job(barcode: str, server_domain: str):
we must delete all of the associated predictions and insights that have
not been annotated.
"""
logger.info(f"Product {barcode} deleted, deleting associated insights...")
logger.info("Product %s deleted, deleting associated insights...", barcode)
deleted_predictions = (
Prediction.delete()
.where(
Expand Down
22 changes: 13 additions & 9 deletions robotoff/workers/tasks/import_image.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def run_import_image_job(
image = get_image_from_url(image_url, error_raise=False, session=http_session)

if image is None:
logger.info(f"Error while downloading image {image_url}")
logger.info("Error while downloading image %s", image_url)
return

source_image = get_source_from_url(image_url)
Expand Down Expand Up @@ -106,7 +106,7 @@ def import_insights_from_image(
image = get_image_from_url(image_url, error_raise=False, session=http_session)

if image is None:
logger.info(f"Error while downloading image {image_url}")
logger.info("Error while downloading image %s", image_url)
return

source_image = get_source_from_url(image_url)
Expand All @@ -133,7 +133,7 @@ def import_insights_from_image(

with db:
imported = import_insights(predictions, server_domain)
logger.info(f"Import finished, {imported} insights imported")
logger.info("Import finished, %s insights imported", imported)


def save_image_job(batch: list[tuple[str, str]], server_domain: str):
Expand Down Expand Up @@ -221,7 +221,7 @@ def run_nutrition_table_object_detection(
image = get_image_from_url(image_url, error_raise=False, session=http_session)

if image is None:
logger.info(f"Error while downloading image {image_url}")
logger.info("Error while downloading image %s", image_url)
return

source_image = get_source_from_url(image_url)
Expand Down Expand Up @@ -250,7 +250,7 @@ def run_nutriscore_object_detection(barcode: str, image_url: str, server_domain:
image = get_image_from_url(image_url, error_raise=False, session=http_session)

if image is None:
logger.info(f"Error while downloading image {image_url}")
logger.info("Error while downloading image %s", image_url)
return

source_image = get_source_from_url(image_url)
Expand Down Expand Up @@ -312,7 +312,7 @@ def run_logo_object_detection(
image = get_image_from_url(image_url, error_raise=False, session=http_session)

if image is None:
logger.info(f"Error while downloading image {image_url}")
logger.info("Error while downloading image %s", image_url)
return

source_image = get_source_from_url(image_url)
Expand Down Expand Up @@ -342,7 +342,7 @@ def run_logo_object_detection(
).id
)

logger.info(f"{len(logo_ids)} logos found for image {source_image}")
logger.info("%s logos found for image %s", len(logo_ids), source_image)
if logo_ids and process_logos:
enqueue_job(
process_created_logos,
Expand All @@ -369,15 +369,19 @@ def process_created_logos(image_prediction_id: int, server_domain: str):
add_logos_to_ann(image_instance, logos)
except (HTTPError, Timeout) as e:
logger.info(
f"Request error during logo addition to ANN: {type(e).__name__}, {e}"
"Request error during logo addition to ANN: %s, %s",
type(e).__name__,
e,
)
return

try:
save_nearest_neighbors(logos)
except (HTTPError, Timeout) as e:
logger.info(
f"Request error during ANN batch query: {type(e).__name__}: {e}",
"Request error during ANN batch query: %s, %s",
type(e).__name__,
e,
)
return

Expand Down
8 changes: 4 additions & 4 deletions robotoff/workers/tasks/product_updated.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def update_insights_job(barcode: str, server_domain: str):
1. Generate new predictions related to the product's category and name.
2. Regenerate all insights from the product associated predictions.
"""
logger.info(f"Running `update_insights` for product {barcode} ({server_domain})")
logger.info("Running `update_insights` for product %s (%s)", barcode, server_domain)

try:
with Lock(
Expand All @@ -45,7 +45,7 @@ def update_insights_job(barcode: str, server_domain: str):
updated_product_predict_insights(barcode, product_dict, server_domain)
logger.info("Refreshing insights...")
imported = refresh_insights(barcode, server_domain)
logger.info(f"{imported} insights created after refresh")
logger.info("%s insights created after refresh", imported)
except LockedResourceException:
logger.info(
f"Couldn't acquire product_update lock, skipping product_update for product {barcode}"
Expand Down Expand Up @@ -85,7 +85,7 @@ def add_category_insight(barcode: str, product: JSONType, server_domain: str) ->
prediction.barcode = barcode

imported = import_insights(product_predictions, server_domain)
logger.info(f"{imported} category insight imported for product {barcode}")
logger.info("%s category insight imported for product %s", imported, barcode)

return bool(imported)

Expand All @@ -102,7 +102,7 @@ def updated_product_predict_insights(
logger.info("Generating predictions from product name...")
predictions_all = get_predictions_from_product_name(barcode, product_name)
imported = import_insights(predictions_all, server_domain)
logger.info(f"{imported} insights imported for product {barcode}")
logger.info("%s insights imported for product %s", imported, barcode)

if imported:
updated = True
Expand Down

0 comments on commit 390e144

Please sign in to comment.