Skip to content

Commit

Permalink
Merge branch 'ptiede:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
erandichavez authored Jan 15, 2025
2 parents 771ad06 + fa08071 commit 360a0b0
Show file tree
Hide file tree
Showing 32 changed files with 615 additions and 292 deletions.
4 changes: 2 additions & 2 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
name = "Comrade"
uuid = "99d987ce-9a1e-4df8-bc0b-1ea019aa547b"
authors = ["Paul Tiede <ptiede91@gmail.com>"]
version = "0.11.3"
version = "0.11.5"

[deps]
AbstractMCMC = "80f14c24-f653-4e6a-9b94-39d6b0f70001"
Expand Down Expand Up @@ -89,7 +89,7 @@ ParameterHandling = "0.4, 0.5"
Pigeons = "0.3, 0.4"
PolarizedTypes = "0.1"
PrettyTables = "1, 2"
Pyehtim = "0.1"
Pyehtim = "0.2"
RecipesBase = "1"
Reexport = "1"
SpecialFunctions = "0.10, 1, 2"
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Composable Modeling of Radio Emission

## Documentation
[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://ptiede.github.io/Comrade.jl/dev/)
[![Stable](https://img.shields.io/badge/docs-stable-blue.svg)](https://ptiede.github.io/Comrade.jl/v0.11.4/)
[![Dev](https://img.shields.io/badge/docs-dev-blue.svg)](https://ptiede.github.io/Comrade.jl/dev/)
## Repo Status
[![Build Status](/~https://github.com/ptiede/Comrade.jl/actions/workflows/ci.yml/badge.svg)](/~https://github.com/ptiede/Comrade.jl/actions/)
Expand Down
16 changes: 0 additions & 16 deletions docs/CondaPkg.toml

This file was deleted.

8 changes: 6 additions & 2 deletions docs/tutorials.jl
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,14 @@ withenv("JULIA_DEBUG"=>"Literate") do
jl_expr = "using Literate;"*
"preprocess(path, str) = replace(str, \"__DIR = @__DIR__\" => \"__DIR = \\\"\$(dirname(path))\\\"\");"*
"Literate.markdown(\"$(p_)\", \"$(joinpath(OUTPUT, d))\";"*
"name=\"$name\", execute=false, flavor=Literate.DocumenterFlavor(),"*
"name=\"$name\", execute=true, flavor=Literate.DocumenterFlavor(),"*
"preprocess=Base.Fix1(preprocess, \"$(p_)\"))"
cm = `julia --project=$(@__DIR__) -e $(jl_expr)`
run(cm)
try
run(cm)
catch e
@warn "there was an issues with $cm\n $e"
end

end
end
2 changes: 1 addition & 1 deletion examples/advanced/HybridImaging/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ CairoMakie = "0.12"
Distributions = "0.25"
Optimization = "4"
Plots = "1"
Pyehtim = "0.1"
Pyehtim = "0.2"
StableRNGs = "1"
StatsBase = "0.34"
VLBIImagePriors = "0.9"
34 changes: 16 additions & 18 deletions examples/advanced/HybridImaging/main.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,14 @@
import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide



# # Hybrid Imaging of a Black Hole

# In this tutorial, we will use **hybrid imaging** to analyze the 2017 EHT data.
Expand All @@ -16,18 +27,6 @@
# This is the approach we will take in this tutorial to analyze the April 6 2017 EHT data
# of M87.

import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide

ENV["GKSwstype"] = "nul" #hide


# ## Loading the Data

# To get started we will load Comrade
Expand Down Expand Up @@ -154,12 +153,10 @@ skym = SkyModel(sky, skyprior, g; metadata=skymetadata)
using Enzyme
post = VLBIPosterior(skym, intmodel, dvis; admode=set_runtime_activity(Enzyme.Reverse))

# To sample from our prior we can do
xrand = prior_sample(rng, post)

# and then plot the results
# We can sample from the prior to see what the model looks like
using DisplayAs #hide
import CairoMakie as CM
xrand = prior_sample(rng, post)
CM.activate!(type = "png", px_per_unit=1) #hide
gpl = imagepixels(μas2rad(200.0), μas2rad(200.0), 128, 128)
fig = imageviz(intensitymap(skymodel(post, xrand), gpl));
Expand All @@ -176,7 +173,7 @@ fig |> DisplayAs.PNG |> DisplayAs.Text #hide
using Optimization
using OptimizationOptimJL
xopt, sol = comrade_opt(post, LBFGS();
initial_params=xrand, maxiters=2000, g_tol=1e0)
initial_params=xrand, maxiters=2000, g_tol=1e0);


# First we will evaluate our fit by plotting the residuals
Expand All @@ -187,7 +184,8 @@ fig |> DisplayAs.PNG |> DisplayAs.Text #hide
# These residuals suggest that we are substantially overfitting the data. This is a common
# side effect of MAP imaging. As a result if we plot the image we see that there
# is substantial high-frequency structure in the image that isn't supported by the data.
imageviz(intensitymap(skymodel(post, xopt), gpl), figure=(;resolution=(500, 400),))
fig = imageviz(intensitymap(skymodel(post, xopt), gpl), figure=(;resolution=(500, 400),));
fig |> DisplayAs.PNG |> DisplayAs.Text #hide


# To improve our results we will now move to Posterior sampling. This is the main method
Expand Down
2 changes: 1 addition & 1 deletion examples/beginner/GeometricModeling/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,6 @@ Distributions = "0.25"
Optimization = "4"
Pigeons = "0.4"
Plots = "1"
Pyehtim = "0.1"
Pyehtim = "0.2"
StableRNGs = "1"
VLBIImagePriors = "0.9"
32 changes: 18 additions & 14 deletions examples/beginner/GeometricModeling/main.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
import Pkg; #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide


# # Geometric Modeling of EHT Data

# `Comrade` has been designed to work with the EHT and ngEHT.
Expand All @@ -9,22 +19,15 @@
# In this tutorial, we will construct a similar model and fit it to the data in under
# 50 lines of code (sans comments). To start, we load Comrade and some other packages we need.

import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide
ENV["GKSwstype"] = "nul" #hide

# To get started we load Comrade.
#-

using Comrade


# Currently we use eht-imaging for data management, however this will soon be replaced
# by a pure Julia solution.
using Pyehtim

# For reproducibility we use a stable random number genreator
using StableRNGs
rng = StableRNG(42)
Expand Down Expand Up @@ -81,7 +84,7 @@ prior = (
ξτ= Uniform(0.0, π),
f = Uniform(0.0, 1.0),
σG = Uniform(μas2rad(1.0), μas2rad(100.0)),
τG = Uniform(0.0, 1.0),
τG = Exponential(1.0),
ξG = Uniform(0.0, 1π),
xG = Uniform(-μas2rad(80.0), μas2rad(80.0)),
yG = Uniform(-μas2rad(80.0), μas2rad(80.0))
Expand Down Expand Up @@ -146,8 +149,8 @@ fpost = asflat(post)
p = prior_sample(rng, post)

# and then transform it to transformed space using T
logdensityof(cpost, Comrade.TV.inverse(cpost, p))
logdensityof(fpost, Comrade.TV.inverse(fpost, p))
logdensityof(cpost, Comrade.inverse(cpost, p))
logdensityof(fpost, Comrade.inverse(fpost, p))

# note that the log densit is not the same since the transformation has causes a jacobian to ensure volume is preserved.

Expand Down Expand Up @@ -238,3 +241,4 @@ DisplayAs.Text(DisplayAs.PNG(p))
# and the model, divided by the data's error:
p = residual(post, chain[end]);
DisplayAs.Text(DisplayAs.PNG(p))

2 changes: 1 addition & 1 deletion examples/beginner/LoadingData/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@ Pyehtim = "3d61700d-6e5b-419a-8e22-9c066cf00468"

[compat]
Plots = "1"
Pyehtim = "0.1"
Pyehtim = "0.2"
22 changes: 9 additions & 13 deletions examples/beginner/LoadingData/main.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,3 @@
# # Loading Data into Comrade

# The VLBI field does not have a standardized data format, and the EHT uses a
# particular uvfits format similar to the optical interferometry oifits format.
# As a result, we reuse the excellent `eht-imaging` package to load data into `Comrade`.

# Once the data is loaded, we then convert the data into the tabular format `Comrade`
# expects. Note that this may change to a Julia package as the Julia radio
# astronomy group grows.

# To get started, we will load `Comrade` and `Plots` to enable visualizations of the data
import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Expand All @@ -18,11 +7,18 @@ Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide

ENV["GKSwstype"] = "nul" #hide
# # Loading Data into Comrade

# The VLBI field does not have a standardized data format, and the EHT uses a
# particular uvfits format similar to the optical interferometry oifits format.
# As a result, we reuse the excellent `eht-imaging` package to load data into `Comrade`.

using Comrade
# Once the data is loaded, we then convert the data into the tabular format `Comrade`
# expects. Note that this may change to a Julia package as the Julia radio
# astronomy group grows.

# To get started, we will load `Comrade` and `Plots` to enable visualizations of the data
using Comrade
using Plots

# We also load Pyehtim since it loads eht-imaging into Julia using PythonCall and exports
Expand Down
2 changes: 1 addition & 1 deletion examples/intermediate/ClosureImaging/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,6 @@ Distributions = "0.25"
Optimization = "4"
Pkg = "1"
Plots = "1"
Pyehtim = "0.1"
Pyehtim = "0.2"
StableRNGs = "1"
VLBIImagePriors = "0.9"
23 changes: 11 additions & 12 deletions examples/intermediate/ClosureImaging/main.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,13 @@
import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide
ENV["GKSwstype"] = "nul"; #hide

# # Imaging a Black Hole using only Closure Quantities

# In this tutorial, we will create a preliminary reconstruction of the 2017 M87 data on April 6
Expand All @@ -21,17 +31,6 @@
#
# In this tutorial, we will do closure-only modeling of M87 to produce a posterior of images of M87.

import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide
ENV["GKSwstype"] = "nul"; #hide



# To get started, we will load Comrade
using Comrade
Expand Down Expand Up @@ -146,7 +145,7 @@ post = VLBIPosterior(skym, dlcamp, dcphase; admode=set_runtime_activity(Enzyme.R
using Optimization
using OptimizationOptimJL
xopt, sol = comrade_opt(post, LBFGS();
maxiters=1000, initial_params=prior_sample(rng, post))
maxiters=1000, initial_params=prior_sample(rng, post));


# First we will evaluate our fit by plotting the residuals
Expand Down
30 changes: 16 additions & 14 deletions examples/intermediate/PolarizedImaging/main.jl
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide

# # Polarized Image and Instrumental Modeling

# In this tutorial, we will analyze a simulated simple polarized dataset to demonstrate
Expand Down Expand Up @@ -85,14 +94,6 @@
# In the rest of the tutorial, we are going to solve for all of these instrument model terms in
# while re-creating the polarized image from the first [`EHT results on M87`](https://iopscience.iop.org/article/10.3847/2041-8213/abe71d).

import Pkg #hide
__DIR = @__DIR__ #hide
pkg_io = open(joinpath(__DIR, "pkg.log"), "w") #hide
Pkg.activate(__DIR; io=pkg_io) #hide
Pkg.develop(; path=joinpath(__DIR, "..", "..", ".."), io=pkg_io) #hide
Pkg.instantiate(; io=pkg_io) #hide
Pkg.precompile(; io=pkg_io) #hide
close(pkg_io) #hide


# ## Load the Data
Expand Down Expand Up @@ -192,7 +193,7 @@ mimg = intensitymap(mpr, grid)
# For the image metadata we specify the grid and the total flux of the image, which is 1.0.
# Note that we specify the total flux out front since it is degenerate with an overall shift
# in the gain amplitudes.
skymeta = (; mimg=mimg./flux(mimg), ftot=0.6)
skymeta = (; mimg=mimg./flux(mimg), ftot=0.6);


# We use again use a GMRF prior similar to the [Imaging a Black Hole using only Closure Quantities](@ref) tutorial
Expand Down Expand Up @@ -331,12 +332,13 @@ xopt, sol = comrade_opt(post, Optimisers.Adam();

# Now let's evaluate our fits by plotting the residuals
using Plots
Plots.default(fmt = :png)
residual(post, xopt)

# These look reasonable, although there may be some minor overfitting.
# Let's compare our results to the ground truth values we know in this example.
# First, we will load the polarized truth
imgtrue = load_fits(joinpath(__DIR, "..", "..", "Data", "polarized_gaussian.fits"), IntensityMap{StokesParams})
imgtrue = load_fits(joinpath(__DIR, "..", "..", "Data", "polarized_gaussian.fits"), IntensityMap{StokesParams});
# Select a reasonable zoom in of the image.
imgtruesub = regrid(imgtrue, imagepixels(fovx, fovy, nx*4, ny*4))
img = intensitymap(Comrade.skymodel(post, xopt), axisdims(imgtruesub))
Expand Down Expand Up @@ -366,16 +368,16 @@ gamp_ratio = caltable(exp.(xopt.instrument.lgrat))
# expected since gain ratios are typically stable over the course of an observation and the constant
# offset was removed in the EHT calibration process.
gphaseR = caltable(xopt.instrument.gpR)
p = Plots.plot(gphaseR, layout=(3,3), size=(650,500));
Plots.plot!(p, gphase_ratio, layout=(3,3), size=(650,500));
p = Plots.plot(gphaseR, layout=(3,3), size=(650,500), label="R Gain Phase");
Plots.plot!(p, gphase_ratio, layout=(3,3), size=(650,500), label="Gain Phase Ratio");
p |> DisplayAs.PNG |> DisplayAs.Text
#-
# Moving to the amplitudes we see largely stable gain amplitudes on the right circular polarization except for LMT which is
# known and due to pointing issues during the 2017 observation. Again the gain ratios are stable and close to unity. Typically
# we expect that apriori calibration should make the gain ratios close to unity.
gampr = caltable(exp.(xopt.instrument.lgR))
p = Plots.plot(gampr, layout=(3,3), size=(650,500))
Plots.plot!(p, gamp_ratio, layout=(3,3), size=(650,500))
p = Plots.plot(gampr, layout=(3,3), size=(650,500), label="R Gain Amp.");
Plots.plot!(p, gamp_ratio, layout=(3,3), size=(650,500), label="Gain Amp. Ratio")
p |> DisplayAs.PNG |> DisplayAs.Text
#-

Expand Down
2 changes: 1 addition & 1 deletion examples/intermediate/StokesIImaging/Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,6 @@ Distributions = "0.25"
Optimization = "4"
Pkg = "1"
Plots = "1"
Pyehtim = "0.1"
Pyehtim = "0.2"
StableRNGs = "1"
VLBIImagePriors = "0.9"
Loading

0 comments on commit 360a0b0

Please sign in to comment.