Skip to content
This repository has been archived by the owner on Nov 17, 2023. It is now read-only.

Commit

Permalink
#13441 [Clojure] update specs for random and optimzer
Browse files Browse the repository at this point in the history
  • Loading branch information
hellonico committed Dec 6, 2018
1 parent 99a0be8 commit a153e18
Show file tree
Hide file tree
Showing 2 changed files with 17 additions and 19 deletions.
25 changes: 12 additions & 13 deletions contrib/clojure-package/src/org/apache/clojure_mxnet/optimizer.clj
Original file line number Diff line number Diff line change
Expand Up @@ -24,11 +24,10 @@
(org.apache.mxnet.optimizer SGD DCASGD NAG AdaDelta RMSProp AdaGrad Adam SGLD)
(org.apache.mxnet FactorScheduler)))

(s/def ::int-or-float (s/or :f float? :i int?))
(s/def ::learning-rate ::int-or-float)
(s/def ::momentum ::int-or-float)
(s/def ::wd ::int-or-float)
(s/def ::clip-gradient ::int-or-float)
(s/def ::learning-rate number?)
(s/def ::momentum number?)
(s/def ::wd number?)
(s/def ::clip-gradient number?)
(s/def ::lr-scheduler #(instance? FactorScheduler %))
(s/def ::sgd-opts (s/keys :opt-un [::learning-rate ::momentum ::wd ::clip-gradient ::lr-scheduler]))

Expand All @@ -44,7 +43,7 @@
([]
(sgd {})))

(s/def ::lambda ::int-or-float)
(s/def ::lambda number?)
(s/def ::dcasgd-opts (s/keys :opt-un [::learning-rate ::momentum ::lambda ::wd ::clip-gradient ::lr-scheduler]))

(defn dcasgd
Expand Down Expand Up @@ -78,9 +77,9 @@
([]
(nag {})))

(s/def ::rho ::int-or-float)
(s/def ::rescale-gradient ::int-or-float)
(s/def ::epsilon ::int-or-float)
(s/def ::rho number?)
(s/def ::rescale-gradient number?)
(s/def ::epsilon number?)
(s/def ::ada-delta-opts (s/keys :opt-un [::rho ::rescale-gradient ::epsilon ::wd ::clip-gradient]))

(defn ada-delta
Expand All @@ -97,8 +96,8 @@
([]
(ada-delta {})))

(s/def gamma1 ::int-or-float)
(s/def gamma2 ::int-or-float)
(s/def gamma1 number?)
(s/def gamma2 number?)
(s/def ::rms-prop-opts (s/keys :opt-un [::learning-rate ::rescale-gradient ::gamma1 ::gamma2 ::wd ::clip-gradient]))

(defn rms-prop
Expand Down Expand Up @@ -145,8 +144,8 @@
([]
(ada-grad {})))

(s/def ::beta1 ::int-or-float)
(s/def ::beta2 ::int-or-float)
(s/def ::beta1 number?)
(s/def ::beta2 number?)
(s/def ::adam-opts (s/keys :opt-un [::learning-rate ::beta1 ::beta2 ::epsilon ::decay-factor ::wd ::clip-gradient ::lr-scheduler]))

(defn adam
Expand Down
11 changes: 5 additions & 6 deletions contrib/clojure-package/src/org/apache/clojure_mxnet/random.clj
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@
[org.apache.clojure-mxnet.util :as util])
(:import (org.apache.mxnet Context Random)))

(s/def ::int-or-float (s/or :f float? :i int?))
(s/def ::low ::int-or-float)
(s/def ::high ::int-or-float)
(s/def ::low number?)
(s/def ::high number?)
(s/def ::shape-vec (s/coll-of pos-int? :kind vector?))
(s/def ::ctx #(instance? Context %))
(s/def ::uniform-opts (s/keys :opt-un [::ctx]))
Expand All @@ -48,8 +47,8 @@
([low high shape-vec]
(uniform low high shape-vec {})))

(s/def ::loc ::int-or-float)
(s/def ::scale ::int-or-float)
(s/def ::loc number?)
(s/def ::scale number?)
(s/def ::normal-opts (s/keys :opt-un [::ctx]))

(defn normal
Expand All @@ -70,7 +69,7 @@
([loc scale shape-vec]
(normal loc scale shape-vec {})))

(s/def ::seed-state ::int-or-float)
(s/def ::seed-state number?)
(defn seed
" Seed the random number generators in mxnet.
This seed will affect behavior of functions in this module,
Expand Down

0 comments on commit a153e18

Please sign in to comment.