Skip to content

Commit cfcdc6a

Browse files
authored
Merge pull request #165 from JuliaML/auto-juliaformatter-pr
[AUTO] JuliaFormatter.jl run
2 parents 6741aeb + d52f916 commit cfcdc6a

File tree

14 files changed

+1390
-1340
lines changed

14 files changed

+1390
-1340
lines changed

docs/make.jl

Lines changed: 20 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -3,42 +3,25 @@ using Documenter, LossFunctions
33
DocMeta.setdocmeta!(LossFunctions, :DocTestSetup, :(using LossFunctions); recursive=true)
44

55
makedocs(
6-
modules=[LossFunctions],
7-
authors="Christof Stocker, Tom Breloff, Alex Williams",
8-
repo="https://github.com/JuliaML/LossFunctions.jl/blob/{commit}{path}#{line}",
9-
sitename="LossFunctions.jl",
10-
format=Documenter.HTML(
11-
prettyurls=get(ENV, "CI", "false") == "true",
12-
canonical="https://JuliaML.github.io/LossFunctions.jl",
13-
assets=["assets/style.css", "assets/favicon.ico"]
14-
),
15-
pages=[
16-
hide("Home" => "index.md"),
17-
"Introduction" => [
18-
"introduction/gettingstarted.md",
19-
"introduction/motivation.md",
20-
],
21-
"User's Guide" => [
22-
"user/interface.md",
23-
"user/aggregate.md",
24-
],
25-
"Available Losses" => [
26-
"losses/distance.md",
27-
"losses/margin.md",
28-
"losses/other.md",
29-
],
30-
"Advances Topics" => [
31-
"advanced/extend.md",
32-
"advanced/developer.md",
33-
],
34-
hide("Indices" => "indices.md"),
35-
"acknowledgements.md",
36-
"LICENSE.md",
37-
]
6+
modules=[LossFunctions],
7+
authors="Christof Stocker, Tom Breloff, Alex Williams",
8+
repo="https://github.com/JuliaML/LossFunctions.jl/blob/{commit}{path}#{line}",
9+
sitename="LossFunctions.jl",
10+
format=Documenter.HTML(
11+
prettyurls=get(ENV, "CI", "false") == "true",
12+
canonical="https://JuliaML.github.io/LossFunctions.jl",
13+
assets=["assets/style.css", "assets/favicon.ico"]
14+
),
15+
pages=[
16+
hide("Home" => "index.md"),
17+
"Introduction" => ["introduction/gettingstarted.md", "introduction/motivation.md"],
18+
"User's Guide" => ["user/interface.md", "user/aggregate.md"],
19+
"Available Losses" => ["losses/distance.md", "losses/margin.md", "losses/other.md"],
20+
"Advances Topics" => ["advanced/extend.md", "advanced/developer.md"],
21+
hide("Indices" => "indices.md"),
22+
"acknowledgements.md",
23+
"LICENSE.md"
24+
]
3825
)
3926

40-
deploydocs(
41-
repo="github.com/JuliaML/LossFunctions.jl.git",
42-
devbranch="master",
43-
push_preview=true
44-
)
27+
deploydocs(repo="github.com/JuliaML/LossFunctions.jl.git", devbranch="master", push_preview=true)

src/LossFunctions.jl

Lines changed: 57 additions & 48 deletions
Original file line numberDiff line numberDiff line change
@@ -16,58 +16,67 @@ include("losses.jl")
1616
include("io.jl")
1717

1818
export
19-
# trait functions
20-
Loss,
21-
SupervisedLoss,
22-
MarginLoss,
23-
DistanceLoss,
24-
deriv, deriv2,
25-
isdistancebased, ismarginbased,
26-
isminimizable, isdifferentiable,
27-
istwicedifferentiable,
28-
isconvex, isstrictlyconvex,
29-
isstronglyconvex, isnemitski,
30-
isunivfishercons, isfishercons,
31-
islipschitzcont, islocallylipschitzcont,
32-
isclipable, isclasscalibrated, issymmetric,
19+
# trait functions
20+
Loss,
21+
SupervisedLoss,
22+
MarginLoss,
23+
DistanceLoss,
24+
deriv,
25+
deriv2,
26+
isdistancebased,
27+
ismarginbased,
28+
isminimizable,
29+
isdifferentiable,
30+
istwicedifferentiable,
31+
isconvex,
32+
isstrictlyconvex,
33+
isstronglyconvex,
34+
isnemitski,
35+
isunivfishercons,
36+
isfishercons,
37+
islipschitzcont,
38+
islocallylipschitzcont,
39+
isclipable,
40+
isclasscalibrated,
41+
issymmetric,
3342

34-
# margin-based losses
35-
ZeroOneLoss,
36-
LogitMarginLoss,
37-
PerceptronLoss,
38-
HingeLoss,
39-
L1HingeLoss,
40-
L2HingeLoss,
41-
SmoothedL1HingeLoss,
42-
ModifiedHuberLoss,
43-
L2MarginLoss,
44-
ExpLoss,
45-
SigmoidLoss,
46-
DWDMarginLoss,
43+
# margin-based losses
44+
ZeroOneLoss,
45+
LogitMarginLoss,
46+
PerceptronLoss,
47+
HingeLoss,
48+
L1HingeLoss,
49+
L2HingeLoss,
50+
SmoothedL1HingeLoss,
51+
ModifiedHuberLoss,
52+
L2MarginLoss,
53+
ExpLoss,
54+
SigmoidLoss,
55+
DWDMarginLoss,
4756

48-
# distance-based losses
49-
LPDistLoss,
50-
L1DistLoss,
51-
L2DistLoss,
52-
PeriodicLoss,
53-
HuberLoss,
54-
EpsilonInsLoss,
55-
L1EpsilonInsLoss,
56-
L2EpsilonInsLoss,
57-
LogitDistLoss,
58-
QuantileLoss,
59-
LogCoshLoss,
57+
# distance-based losses
58+
LPDistLoss,
59+
L1DistLoss,
60+
L2DistLoss,
61+
PeriodicLoss,
62+
HuberLoss,
63+
EpsilonInsLoss,
64+
L1EpsilonInsLoss,
65+
L2EpsilonInsLoss,
66+
LogitDistLoss,
67+
QuantileLoss,
68+
LogCoshLoss,
6069

61-
# other losses
62-
MisclassLoss,
63-
PoissonLoss,
64-
CrossEntropyLoss,
70+
# other losses
71+
MisclassLoss,
72+
PoissonLoss,
73+
CrossEntropyLoss,
6574

66-
# meta losses
67-
ScaledLoss,
68-
WeightedMarginLoss,
75+
# meta losses
76+
ScaledLoss,
77+
WeightedMarginLoss,
6978

70-
# reexport mean
71-
mean
79+
# reexport mean
80+
mean
7281

7382
end # module

src/io.jl

Lines changed: 25 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -1,26 +1,34 @@
11
Base.print(io::IO, loss::SupervisedLoss, args...) = print(io, typeof(loss).name.name, args...)
22
Base.print(io::IO, loss::L1DistLoss, args...) = print(io, "L1DistLoss", args...)
33
Base.print(io::IO, loss::L2DistLoss, args...) = print(io, "L2DistLoss", args...)
4-
Base.print(io::IO, loss::LPDistLoss{P}, args...) where {P} = print(io, typeof(loss).name.name, " with P = $(P)", args...)
5-
Base.print(io::IO, loss::L1EpsilonInsLoss, args...) = print(io, typeof(loss).name.name, " with \$\\epsilon\$ = $(loss.ε)", args...)
6-
Base.print(io::IO, loss::L2EpsilonInsLoss, args...) = print(io, typeof(loss).name.name, " with \$\\epsilon\$ = $(loss.ε)", args...)
7-
Base.print(io::IO, loss::QuantileLoss, args...) = print(io, typeof(loss).name.name, " with \$\\tau\$ = $(loss.τ)", args...)
8-
Base.print(io::IO, loss::SmoothedL1HingeLoss, args...) = print(io, typeof(loss).name.name, " with \$\\gamma\$ = $(loss.gamma)", args...)
9-
Base.print(io::IO, loss::HuberLoss, args...) = print(io, typeof(loss).name.name, " with \$\\alpha\$ = $(loss.d)", args...)
4+
Base.print(io::IO, loss::LPDistLoss{P}, args...) where {P} =
5+
print(io, typeof(loss).name.name, " with P = $(P)", args...)
6+
Base.print(io::IO, loss::L1EpsilonInsLoss, args...) =
7+
print(io, typeof(loss).name.name, " with \$\\epsilon\$ = $(loss.ε)", args...)
8+
Base.print(io::IO, loss::L2EpsilonInsLoss, args...) =
9+
print(io, typeof(loss).name.name, " with \$\\epsilon\$ = $(loss.ε)", args...)
10+
Base.print(io::IO, loss::QuantileLoss, args...) =
11+
print(io, typeof(loss).name.name, " with \$\\tau\$ = $(loss.τ)", args...)
12+
Base.print(io::IO, loss::SmoothedL1HingeLoss, args...) =
13+
print(io, typeof(loss).name.name, " with \$\\gamma\$ = $(loss.gamma)", args...)
14+
Base.print(io::IO, loss::HuberLoss, args...) =
15+
print(io, typeof(loss).name.name, " with \$\\alpha\$ = $(loss.d)", args...)
1016
Base.print(io::IO, loss::DWDMarginLoss, args...) = print(io, typeof(loss).name.name, " with q = $(loss.q)", args...)
11-
Base.print(io::IO, loss::PeriodicLoss, args...) = print(io, typeof(loss).name.name, " with c = $(round(2π / loss.k, digits=1))", args...)
17+
Base.print(io::IO, loss::PeriodicLoss, args...) =
18+
print(io, typeof(loss).name.name, " with c = $(round(2π / loss.k, digits=1))", args...)
1219
Base.print(io::IO, loss::ScaledLoss{T,K}, args...) where {T,K} = print(io, "$(K) * ($(loss.loss))", args...)
1320

1421
_round(num) = round(num) == round(num, digits=1) ? round(Int, num) : round(num, digits=1)
1522
function _relation(num)
16-
if num <= 0
17-
"negative only"
18-
elseif num >= 1
19-
"positive only"
20-
elseif num < 0.5
21-
"1:$(_round((1-num)/num)) weighted"
22-
else
23-
"$(_round(num/(1-num))):1 weighted"
24-
end
23+
if num <= 0
24+
"negative only"
25+
elseif num >= 1
26+
"positive only"
27+
elseif num < 0.5
28+
"1:$(_round((1-num)/num)) weighted"
29+
else
30+
"$(_round(num/(1-num))):1 weighted"
31+
end
2532
end
26-
Base.print(io::IO, loss::WeightedMarginLoss{T,W}, args...) where {T,W} = print(io, "$(_relation(W)) $(loss.loss)", args...)
33+
Base.print(io::IO, loss::WeightedMarginLoss{T,W}, args...) where {T,W} =
34+
print(io, "$(_relation(W)) $(loss.loss)", args...)

src/losses.jl

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,12 @@
22
Scalar = Union{Number,CategoricalValue}
33

44
# fallback to unary evaluation
5-
(loss::DistanceLoss)(output::Number, target::Number) = loss(output - target)
6-
deriv(loss::DistanceLoss, output::Number, target::Number) = deriv(loss, output - target)
5+
(loss::DistanceLoss)(output::Number, target::Number) = loss(output - target)
6+
deriv(loss::DistanceLoss, output::Number, target::Number) = deriv(loss, output - target)
77
deriv2(loss::DistanceLoss, output::Number, target::Number) = deriv2(loss, output - target)
8-
(loss::MarginLoss)(output::Number, target::Number) = loss(target * output)
9-
deriv(loss::MarginLoss, output::Number, target::Number) = target * deriv(loss, target * output)
10-
deriv2(loss::MarginLoss, output::Number, target::Number) = deriv2(loss, target * output)
8+
(loss::MarginLoss)(output::Number, target::Number) = loss(target * output)
9+
deriv(loss::MarginLoss, output::Number, target::Number) = target * deriv(loss, target * output)
10+
deriv2(loss::MarginLoss, output::Number, target::Number) = deriv2(loss, target * output)
1111

1212
# broadcasting behavior
1313
Broadcast.broadcastable(loss::SupervisedLoss) = Ref(loss)
@@ -34,7 +34,7 @@ include("losses/weighted.jl")
3434
Return sum of `loss` values over the iterables `outputs` and `targets`.
3535
"""
3636
function sum(loss::SupervisedLoss, outputs, targets)
37-
sum(loss(ŷ, y) for (ŷ, y) in zip(outputs, targets))
37+
sum(loss(ŷ, y) for (ŷ, y) in zip(outputs, targets))
3838
end
3939

4040
"""
@@ -45,9 +45,9 @@ The `weights` determine the importance of each observation. The option
4545
`normalize` divides the result by the sum of the weights.
4646
"""
4747
function sum(loss::SupervisedLoss, outputs, targets, weights; normalize=true)
48-
s = sum(w * loss(ŷ, y) for (ŷ, y, w) in zip(outputs, targets, weights))
49-
n = normalize ? sum(weights) : one(first(weights))
50-
s / n
48+
s = sum(w * loss(ŷ, y) for (ŷ, y, w) in zip(outputs, targets, weights))
49+
n = normalize ? sum(weights) : one(first(weights))
50+
s / n
5151
end
5252

5353
"""
@@ -56,7 +56,7 @@ end
5656
Return mean of `loss` values over the iterables `outputs` and `targets`.
5757
"""
5858
function mean(loss::SupervisedLoss, outputs, targets)
59-
mean(loss(ŷ, y) for (ŷ, y) in zip(outputs, targets))
59+
mean(loss(ŷ, y) for (ŷ, y) in zip(outputs, targets))
6060
end
6161

6262
"""
@@ -67,7 +67,7 @@ The `weights` determine the importance of each observation. The option
6767
`normalize` divides the result by the sum of the weights.
6868
"""
6969
function mean(loss::SupervisedLoss, outputs, targets, weights; normalize=true)
70-
m = mean(w * loss(ŷ, y) for (ŷ, y, w) in zip(outputs, targets, weights))
71-
n = normalize ? sum(weights) : one(first(weights))
72-
m / n
70+
m = mean(w * loss(ŷ, y) for (ŷ, y, w) in zip(outputs, targets, weights))
71+
n = normalize ? sum(weights) : one(first(weights))
72+
m / n
7373
end

0 commit comments

Comments
 (0)