MOO*_*OON 4 python optimization julia jax
我一直在使用 JAX(Python 中的自动微分库)和 Zygote(Julia 中的自动微分库)来实现高斯-牛顿最小化方法。我在 Jax 中发现了一个@jit宏,它可以在大约 0.6 秒内运行我的 Python 代码,而不使用@jit. Julia 在大约 40 秒内运行了该代码。Julia 或 Zygote 中是否有等效的@jit结果可以带来更好的性能?
这是我使用的代码:
Python
from jax import grad, jit, jacfwd
import jax.numpy as jnp
import numpy as np
import time
def gaussian(x, params):
amp = params[0]
mu = params[1]
sigma = params[2]
amplitude = amp/(jnp.abs(sigma)*jnp.sqrt(2*np.pi))
arg = ((x-mu)/sigma)
return amplitude*jnp.exp(-0.5*(arg**2))
def myjacobian(x, params):
return jacfwd(gaussian, argnums = 1)(x, params)
def op(jac):
return jnp.matmul(
jnp.linalg.inv(jnp.matmul(jnp.transpose(jac),jac)),
jnp.transpose(jac))
def res(x, data, params):
return data - gaussian(x, params)
@jit
def step(x, data, params):
residuals = res(x, data, params)
jacobian_operation = op(myjacobian(x, params))
temp = jnp.matmul(jacobian_operation, residuals)
return params + temp
N = 2000
x = np.linspace(start = -100, stop = 100, num= N)
data = gaussian(x, [5.65, 25.5, 37.23])
ini = jnp.array([0.9, 5., 5.0])
t1 = time.time()
for i in range(5000):
ini = step(x, data, ini)
t2 = time.time()
print('t2-t1: ', t2-t1)
ini
Run Code Online (Sandbox Code Playgroud)
朱莉娅
using Zygote
function gaussian(x::Union{Vector{Float64}, Float64}, params::Vector{Float64})
amp = params[1]
mu = params[2]
sigma = params[3]
amplitude = amp/(abs(sigma)*sqrt(2*pi))
arg = ((x.-mu)./sigma)
return amplitude.*exp.(-0.5.*(arg.^2))
end
function myjacobian(x::Vector{Float64}, params::Vector{Float64})
output = zeros(length(x), length(params))
for (index, ele) in enumerate(x)
output[index,:] = collect(gradient((params)->gaussian(ele, params), params))[1]
end
return output
end
function op(jac::Matrix{Float64})
return inv(jac'*jac)*jac'
end
function res(x::Vector{Float64}, data::Vector{Float64}, params::Vector{Float64})
return data - gaussian(x, params)
end
function step(x::Vector{Float64}, data::Vector{Float64}, params::Vector{Float64})
residuals = res(x, data, params)
jacobian_operation = op(myjacobian(x, params))
temp = jacobian_operation*residuals
return params + temp
end
N = 2000
x = collect(range(start = -100, stop = 100, length= N))
params = vec([5.65, 25.5, 37.23])
data = gaussian(x, params)
ini = vec([0.9, 5., 5.0])
@time for i in range(start = 1, step = 1, length = 5000)
ini = step(x, data, ini)
end
ini
Run Code Online (Sandbox Code Playgroud)
您的 Julia 代码执行了许多不惯用的操作,并且会降低您的性能。这不是完整的概述,但它应该可以为您提供一个好的开始思路。
第一件事是传递paramsaVector是一个坏主意。这意味着它必须在堆上分配,并且编译器不知道它有多长。相反,使用 aTuple可以进行更多优化。其次,不要gaussian对 a Vectorof xs 采取行动。相反,编写标量版本并广播它。具体来说,通过这些更改,您将拥有
function gaussian(x::Number, params::NTuple{3, Float64})
amp, mu, sigma = params
# The next 2 lines should probably be done outside this function, but I'll leave them here for now.
amplitude = amp/(abs(sigma)*sqrt(2*pi))
arg = ((x-mu)/sigma)
return amplitude*exp(-0.5*(arg^2))
end
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
397 次 |
| 最近记录: |