adult = $adult; } /** * @return self::ADULT_* */ public function getAdult() { return $this->adult; } /** * Likelihood that this is a medical image. * * Accepted values: UNKNOWN, VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, * VERY_LIKELY * * @param self::MEDICAL_* $medical */ public function setMedical($medical) { $this->medical = $medical; } /** * @return self::MEDICAL_* */ public function getMedical() { return $this->medical; } /** * Likelihood that the request image contains racy content. Racy content may * include (but is not limited to) skimpy or sheer clothing, strategically * covered nudity, lewd or provocative poses, or close-ups of sensitive body * areas. * * Accepted values: UNKNOWN, VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, * VERY_LIKELY * * @param self::RACY_* $racy */ public function setRacy($racy) { $this->racy = $racy; } /** * @return self::RACY_* */ public function getRacy() { return $this->racy; } /** * Spoof likelihood. The likelihood that an modification was made to the * image's canonical version to make it appear funny or offensive. * * Accepted values: UNKNOWN, VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, * VERY_LIKELY * * @param self::SPOOF_* $spoof */ public function setSpoof($spoof) { $this->spoof = $spoof; } /** * @return self::SPOOF_* */ public function getSpoof() { return $this->spoof; } /** * Likelihood that this image contains violent content. Violent content may * include death, serious harm, or injury to individuals or groups of * individuals. * * Accepted values: UNKNOWN, VERY_UNLIKELY, UNLIKELY, POSSIBLE, LIKELY, * VERY_LIKELY * * @param self::VIOLENCE_* $violence */ public function setViolence($violence) { $this->violence = $violence; } /** * @return self::VIOLENCE_* */ public function getViolence() { return $this->violence; } } // Adding a class alias for backwards compatibility with the previous class name. class_alias(GoogleCloudVisionV1p1beta1SafeSearchAnnotation::class, 'Google_Service_Vision_GoogleCloudVisionV1p1beta1SafeSearchAnnotation');