From a153e18463e261a628b13fd10d55bcf3531e0192 Mon Sep 17 00:00:00 2001 From: Nicolas Modrzyk Date: Thu, 6 Dec 2018 09:35:50 +0900 Subject: [PATCH] #13441 [Clojure] update specs for random and optimzer --- .../org/apache/clojure_mxnet/optimizer.clj | 25 +++++++++---------- .../src/org/apache/clojure_mxnet/random.clj | 11 ++++---- 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj b/contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj index 18a2444f2459..672090a899b3 100644 --- a/contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj +++ b/contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj @@ -24,11 +24,10 @@ (org.apache.mxnet.optimizer SGD DCASGD NAG AdaDelta RMSProp AdaGrad Adam SGLD) (org.apache.mxnet FactorScheduler))) -(s/def ::int-or-float (s/or :f float? :i int?)) -(s/def ::learning-rate ::int-or-float) -(s/def ::momentum ::int-or-float) -(s/def ::wd ::int-or-float) -(s/def ::clip-gradient ::int-or-float) +(s/def ::learning-rate number?) +(s/def ::momentum number?) +(s/def ::wd number?) +(s/def ::clip-gradient number?) (s/def ::lr-scheduler #(instance? FactorScheduler %)) (s/def ::sgd-opts (s/keys :opt-un [::learning-rate ::momentum ::wd ::clip-gradient ::lr-scheduler])) @@ -44,7 +43,7 @@ ([] (sgd {}))) -(s/def ::lambda ::int-or-float) +(s/def ::lambda number?) (s/def ::dcasgd-opts (s/keys :opt-un [::learning-rate ::momentum ::lambda ::wd ::clip-gradient ::lr-scheduler])) (defn dcasgd @@ -78,9 +77,9 @@ ([] (nag {}))) -(s/def ::rho ::int-or-float) -(s/def ::rescale-gradient ::int-or-float) -(s/def ::epsilon ::int-or-float) +(s/def ::rho number?) +(s/def ::rescale-gradient number?) +(s/def ::epsilon number?) (s/def ::ada-delta-opts (s/keys :opt-un [::rho ::rescale-gradient ::epsilon ::wd ::clip-gradient])) (defn ada-delta @@ -97,8 +96,8 @@ ([] (ada-delta {}))) -(s/def gamma1 ::int-or-float) -(s/def gamma2 ::int-or-float) +(s/def gamma1 number?) +(s/def gamma2 number?) (s/def ::rms-prop-opts (s/keys :opt-un [::learning-rate ::rescale-gradient ::gamma1 ::gamma2 ::wd ::clip-gradient])) (defn rms-prop @@ -145,8 +144,8 @@ ([] (ada-grad {}))) -(s/def ::beta1 ::int-or-float) -(s/def ::beta2 ::int-or-float) +(s/def ::beta1 number?) +(s/def ::beta2 number?) (s/def ::adam-opts (s/keys :opt-un [::learning-rate ::beta1 ::beta2 ::epsilon ::decay-factor ::wd ::clip-gradient ::lr-scheduler])) (defn adam diff --git a/contrib/clojure-package/src/org/apache/clojure_mxnet/random.clj b/contrib/clojure-package/src/org/apache/clojure_mxnet/random.clj index 9239c078c434..0ec2039ba79b 100644 --- a/contrib/clojure-package/src/org/apache/clojure_mxnet/random.clj +++ b/contrib/clojure-package/src/org/apache/clojure_mxnet/random.clj @@ -23,9 +23,8 @@ [org.apache.clojure-mxnet.util :as util]) (:import (org.apache.mxnet Context Random))) -(s/def ::int-or-float (s/or :f float? :i int?)) -(s/def ::low ::int-or-float) -(s/def ::high ::int-or-float) +(s/def ::low number?) +(s/def ::high number?) (s/def ::shape-vec (s/coll-of pos-int? :kind vector?)) (s/def ::ctx #(instance? Context %)) (s/def ::uniform-opts (s/keys :opt-un [::ctx])) @@ -48,8 +47,8 @@ ([low high shape-vec] (uniform low high shape-vec {}))) -(s/def ::loc ::int-or-float) -(s/def ::scale ::int-or-float) +(s/def ::loc number?) +(s/def ::scale number?) (s/def ::normal-opts (s/keys :opt-un [::ctx])) (defn normal @@ -70,7 +69,7 @@ ([loc scale shape-vec] (normal loc scale shape-vec {}))) -(s/def ::seed-state ::int-or-float) +(s/def ::seed-state number?) (defn seed " Seed the random number generators in mxnet. This seed will affect behavior of functions in this module,