diff --git a/robotoff/insights/annotate.py b/robotoff/insights/annotate.py index b3387fb156..f3fc1e5b67 100644 --- a/robotoff/insights/annotate.py +++ b/robotoff/insights/annotate.py @@ -1,3 +1,15 @@ +""" +This file allows to annotate products. + +To check whether the annotation already exists or not (and save it and send it to the Open Food Facts database), use the following commands: + + from robotoff.insights.annotate import annotate + annotate(insight: ProductInsight, annotation: int, update: bool = True, data: Optional[dict] = None, auth: Optional[OFFAuthentication] = None) + + +If you don't want to update the Open Food Facts database but only save the insight annotation (if the update is performed on the client side for example), you can call `annotate()` with `update=False`. +""" + import abc import datetime from dataclasses import dataclass @@ -26,17 +38,6 @@ from robotoff.types import InsightType from robotoff.utils import get_logger -""" -This file allows to annotate products. - -To check whether the annotation already exists or not (and save it and send it to the Open Food Facts database), use the following commands: - from robotoff.insights.annotate import annotate - annotate(insight: ProductInsight, annotation: int, update: bool = True, data: Optional[dict] = None, auth: Optional[OFFAuthentication] = None) - -If you don't want to update the Open Food Facts database but only save the insight annotation (if the update is performed on the client side for example), you can call `annotate()` with `update=False`. -""" - - logger = get_logger(__name__) diff --git a/robotoff/insights/importer.py b/robotoff/insights/importer.py index 8f56825c73..5c9649dd94 100644 --- a/robotoff/insights/importer.py +++ b/robotoff/insights/importer.py @@ -190,7 +190,7 @@ def is_reserved_barcode(barcode: str) -> bool: def sort_candidates(candidates: Iterable[ProductInsight]) -> list[ProductInsight]: """Sort candidates by priority, using as keys: - - priority, specified by data["priority"], candidate with lowest priority + - priority, specified by `data["priority"]`, candidate with lowest priority values (high priority) come first - source image upload datetime (most recent first): images IDs are auto-incremented integers, so the most recent images have the highest IDs. @@ -455,10 +455,10 @@ def generate_candidates( product: Optional[Product], predictions: list[Prediction], ) -> Iterator[ProductInsight]: - """From a list of Predictions associated with a product, yield - candidate ProductInsights for import. + """From a list of `Prediction`s associated with a product, yield + candidate `ProductInsight`s for import. - The types of all Predictions must be a subset of the required types + The types of all `Prediction`s must be a subset of the required types available by calling `InsightImporter.get_required_prediction_types`. This method must be implemented in subclasses. @@ -479,11 +479,12 @@ def get_insight_update( list[ProductInsight], ]: """Return a tuple containing: - - a list of ProductInsight to create - - a list of ProductInsight to update, as (insight, reference_insight) + + - a list of `ProductInsight` to create + - a list of `ProductInsight` to update, as (`insight`, `reference_insight`) tuples, where `insight` is the candidate and `reference_insight` is the insight already in DB - - a list of ProductInsight to delete + - a list of `ProductInsight` to delete :param candidates: candidate predictions :param reference_insights: existing insights of this type and product @@ -560,7 +561,7 @@ def sort_candidates( ) -> list[ProductInsight]: """Sort candidates by priority, using as keys: - - priority, specified by data["priority"], candidate with lowest priority + - priority, specified by `data["priority"]`, candidate with lowest priority values (high priority) come first - source image upload datetime (most recent first): images IDs are auto-incremented integers, so the most recent images have the highest IDs. @@ -597,12 +598,12 @@ def sort_candidates( def is_conflicting_insight( cls, candidate: ProductInsight, reference: ProductInsight ) -> bool: - """Return True if a candidate ProductInsight conflicts with an + """Return `True` if a candidate `ProductInsight` conflicts with an existing or another candidate insight, in which case the candidate insight won't be imported. - :param candidate: The candidate ProductInsight to import - :param reference: A ProductInsight, either another candidate or an + :param candidate: The candidate `ProductInsight` to import + :param reference: A `ProductInsight`, either another candidate or an insight that exists in DB """ pass @@ -614,7 +615,13 @@ def add_fields( product: Optional[Product], timestamp: datetime.datetime, ): - """Add mandatory insight fields.""" + """Add mandatory insight fields (`id`, `timestamp`, + `automatic_processing`,...). + + :param insight: the insight to update + :param product: the `Product` associated with the insight + :param timestamp: insight creation datetime + """ barcode = insight.barcode insight.reserved_barcode = is_reserved_barcode(barcode) insight.id = str(uuid.uuid4()) diff --git a/robotoff/off.py b/robotoff/off.py index e95acbe777..64d1826684 100644 --- a/robotoff/off.py +++ b/robotoff/off.py @@ -506,8 +506,6 @@ def delete_image_pipeline( :param product_id: identifier of the product :param image_id: ID of the image to delete (number) :param auth: user authentication data - :param server_domain: the server domain to use, default to - BaseURLProvider.server_domain() """ product = get_product(product_id, ["images"]) @@ -552,8 +550,6 @@ def unselect_image( :param product_id: identifier of the product :param image_field: field name of the image to unselect, ex: front_fr :param auth: user authentication data - :param server_domain: the server domain to use, default to - BaseURLProvider.server_domain() :param timeout: request timeout value in seconds, defaults to 15s :return: the request Response """ diff --git a/robotoff/workers/tasks/import_image.py b/robotoff/workers/tasks/import_image.py index 345de99cb8..591950e443 100644 --- a/robotoff/workers/tasks/import_image.py +++ b/robotoff/workers/tasks/import_image.py @@ -280,7 +280,6 @@ def run_logo_object_detection(product_id: ProductIdentifier, image_url: str): :param product_id: identifier of the product :param image_url: URL of the image to use - :param server_domain: The server domain associated with the image """ logger.info("Running logo object detection for %s, image %s", product_id, image_url) diff --git a/robotoff/workers/tasks/product_updated.py b/robotoff/workers/tasks/product_updated.py index 30f2dafc98..0c95d6f28a 100644 --- a/robotoff/workers/tasks/product_updated.py +++ b/robotoff/workers/tasks/product_updated.py @@ -64,8 +64,7 @@ def add_category_insight(product_id: ProductIdentifier, product: JSONType): """Predict categories for product and import predicted category insight. :param product_id: identifier of the product - :param product: product as retrieved from application - :return: True if at least one category insight was imported + :param product: product as retrieved from MongoDB """ logger.info("Predicting product categories...") # predict category using matching algorithm on product name @@ -97,6 +96,12 @@ def add_category_insight(product_id: ProductIdentifier, product: JSONType): def updated_product_predict_insights( product_id: ProductIdentifier, product: JSONType ) -> None: + """Predict and import category insights and insights-derived from product + name. + + :param product_id: identifier of the product + :param product: product as retrieved from MongoDB + """ add_category_insight(product_id, product) product_name = product.get("product_name")