Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ slurm_logs/*.sh
*.err
*.out
*.txt
*.log

stash/
.gitignore
Expand Down
8 changes: 8 additions & 0 deletions dags/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,14 @@ def poke(self, context) -> bool:
' git fetch origin main\n'
' git merge origin/main --no-edit\n'
"fi\n"
# --- Separate check for unpushed commits (ahead of origin) ---
'read behind ahead < <(git rev-list --left-right --count origin/' + f'{REPO_BRANCH}' + '...HEAD)\n'
'if [[ "$ahead" -gt 0 ]]; then\n'
f' echo "Branch is ahead of origin/{REPO_BRANCH} by $ahead commits. Pushing..."\n'
f' git push origin HEAD:{REPO_BRANCH}\n'
"else\n"
' echo "Branch is up to date with origin/{REPO_BRANCH}"\n'
"fi\n"
),
)
) >> (
Expand Down
14 changes: 6 additions & 8 deletions pipeline.jl
Original file line number Diff line number Diff line change
Expand Up @@ -135,14 +135,12 @@ flush(stdout);
mkpath(dirName)
end

df = h5open(joinpath(outdir, "almanac/$(runname).h5")) do f
DataFrame(read(f["$(parg["tele"])/$(mjd)/exposures"]))
end
df = read_almanac_exp_df(joinpath(outdir, "almanac/$(runname).h5"), parg["tele"], mjd)

# check if chip is in the llist of chips in df.something[expid] (waiting on Andy Casey to update alamanc)
rawpath = build_raw_path(
df.observatory[expid], df.mjd[expid], chip, df.exposure[expid])
cartid = parseCartID(df.cartid[expid])
df.observatory[expid], chip, df.mjd[expid], lpad(df.exposure_int[expid], 8, "0"))
cartid = df.cartidInt[expid]
# decompress and convert apz data format to a standard 3D cube of reads
cubedat, hdr_dict = apz2cube(rawpath)

Expand Down Expand Up @@ -242,7 +240,7 @@ flush(stdout);
# need to clean up exptype to account for FPI versus ARCLAMP
outfname = join(
["ar2D", df.observatory[expid], df.mjd[expid],
chip, df.exposure[expid], df.exptype[expid]],
last(df.exposure_str[expid],4), chip, df.exptype[expid]],
"_")
# probably change to FITS to make astronomers happy (this JLD2, which is HDF5, is just for debugging)

Expand All @@ -262,8 +260,8 @@ flush(stdout);

# come back to tuning the chi2perdofcut once more rigorously establish noise model
function process_2Dcal(fname; chi2perdofcut = 100)
sname = split(fname, "_")
tele, mjd, chip, expid = sname[(end - 4):(end - 1)]
sname = split(split(split(fname, "/")[end],".h5")[1], "_")
fnameType, tele, mjd, expnum, chip, exptype = sname[(end - 5):end]

dimage = load(fname, "dimage")
ivarimage = load(fname, "ivarimage")
Expand Down
39 changes: 17 additions & 22 deletions pipeline_2d_1d.jl
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ flush(stdout);
using DataFrames, EllipsisNotation, StatsBase
using AstroTime # can remove after Adam merges the PR to recast as Float
using ParallelDataTransfer, ProgressMeter
using ApogeeReduction

src_dir = "./"
include(src_dir * "src/ar1D.jl")
Expand All @@ -126,16 +127,15 @@ flush(stdout);
##### 1D stage
@everywhere begin
function process_1D(fname)
sname = split(split(fname, "/")[end], "_")
fnameType, tele, mjd, chip, expid = sname[(end - 5):(end - 1)]
sname = split(split(split(fname, "/")[end],".h5")[1], "_")
fnameType, tele, mjd, expnum, chip, exptype = sname[(end - 5):end]

# how worried should I be about loading this every time?
falm = h5open(parg["outdir"] * "almanac/$(parg["runname"]).h5")
dfalmanac = DataFrame(read(falm["$(parg["tele"])/$(mjd)/exposures"]))
dfalmanac.cartidInt = parseCartID.(dfalmanac.cartid)
falm = h5open(joinpath(parg["outdir"], "almanac/$(parg["runname"]).h5"))
dfalmanac = read_almanac_exp_df(falm, parg["tele"], mjd)

med_center_to_fiber_func, x_prof_min, x_prof_max_ind, n_sub, min_prof_fib, max_prof_fib,
all_y_prof, all_y_prof_deriv = gh_profiles(
tele, mjd, chip, expid; n_sub = 100)
all_y_prof, all_y_prof_deriv = gh_profiles(tele, mjd, expnum, chip; n_sub = 100)

fnamecal = if (fnameType == "ar2D")
replace(fname, "ar2D" => "ar2Dcal")
Expand Down Expand Up @@ -180,24 +180,23 @@ flush(stdout);
# this is the path to the underlying fluxing file.
# it is symlinked below to an exposure-specific file (linkPath).
calPath = get_fluxing_file(
dfalmanac, parg["outdir"], mjd, tele, expid, fluxing_chip = "c")
expid_num = parse(Int, last(expid, 4)) #this is silly because we translate right back
fibtargDict = get_fibTargDict(falm, tele, parse(Int, mjd), expid_num)
dfalmanac, parg["outdir"], tele, mjd, expnum, fluxing_chip = "c")
expid_num = parse(Int, last(expnum, 4)) #this is silly because we translate right back
fibtargDict = get_fibTargDict(falm, tele, mjd, expid_num)
fiberTypeList = map(x -> fibtargDict[x], 1:300)

if isnothing(calPath)
# TODO uncomment this
@warn "No fluxing file available for $(tele) $(mjd) $(chip) $(expid)"
@warn "No fluxing file available for $(tele) $(mjd) $(expnum) $(chip)"
relthrpt = ones(size(flux_1d, 2))
bitmsk_relthrpt = 2^2 * ones(Int, size(flux_1d, 2))
elseif !isfile(calPath)
error("Fluxing file for $(tele) $(mjd) $(chip) $(expid) does not exist")
error("Fluxing file $(calPath) for $(tele) $(mjd) $(expnum) $(chip) does not exist")
else
calPath = abspath(calPath)
linkPath = abspath(joinpath(
dirname(fname), "relFlux_$(tele)_$(mjd)_$(chip)_$(expid).h5"))
dirname(fname), "relFlux_$(tele)_$(mjd)_$(expnum)_$(chip).h5"))
if !islink(linkPath)
symlink(calPath, linkPath)
symlink(abspath(calPath), linkPath)
end
relthrpt = load(linkPath, "relthrpt")
relthrptr = reshape(relthrpt, (1, length(relthrpt)))
Expand Down Expand Up @@ -246,9 +245,7 @@ end

list2Dexp = []
for mjd in unique_mjds
f = h5open(parg["outdir"] * "almanac/$(parg["runname"]).h5")
df = DataFrame(read(f["$(parg["tele"])/$(mjd)/exposures"]))
close(f)
df = read_almanac_exp_df(joinpath(parg["outdir"], "almanac/$(parg["runname"]).h5"), parg["tele"], mjd)
function get_2d_name_partial(expid)
parg["outdir"] * "/apred/$(mjd)/" *
replace(get_1d_name(expid, df), "ar1D" => "ar2D") * ".h5"
Expand Down Expand Up @@ -302,9 +299,7 @@ all2Dcal = replace.(all2D, "ar2D" => "ar2Dcal")
## get all OBJECT files (happy to add any other types that see sky?)
list1DexpObject = []
for mjd in unique_mjds
f = h5open(parg["outdir"] * "almanac/$(parg["runname"]).h5")
df = DataFrame(read(f["$(parg["tele"])/$(mjd)/exposures"]))
close(f)
df = read_almanac_exp_df(joinpath(parg["outdir"], "almanac/$(parg["runname"]).h5"), parg["tele"], mjd)
function get_1d_name_partial(expid)
if df.imagetyp[expid] == "Object"
return parg["outdir"] * "/apred/$(mjd)/" * get_1d_name(expid, df, cal = true) * ".h5"
Expand Down Expand Up @@ -342,7 +337,7 @@ flush(stdout);
println("Solving skyline wavelength solution:");
flush(stdout);
all1DObjectSkyPeaks = replace.(
replace.(all1DObject, "ar1Dcal" => "skyLine_peaks"), "ar1D" => "skyLine_peaks")
replace.(all1DObject, "ar1Dcal" => "skyLinePeaks"), "ar1D" => "skyLinePeaks")
@showprogress pmap(get_and_save_sky_wavecal, all1DObjectSkyPeaks)

## TODO when are we going to split into individual fiber files? Then we should be writing fiber type to the file name
Expand Down
2 changes: 2 additions & 0 deletions src/ApogeeReduction.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
module ApogeeReduction

export N_FIBERS, N_XPIX, N_CHIPS
const N_FIBERS = 300
const N_XPIX = 2048
const N_CHIPS = 3

include("utils.jl")
include("ar3D.jl")
Expand Down
94 changes: 53 additions & 41 deletions src/ar1D.jl
Original file line number Diff line number Diff line change
Expand Up @@ -278,11 +278,11 @@
end

"""
Given an open HDF.file, `f`, and the telescope, mjd, and a "short" expid, return a dictionary
Given an open HDF.file, `f`, and the telescope, mjd, and expnum, return a dictionary
mapping fiber id to fiber type.
"""
function get_fibTargDict(f, tele, mjd, exposure_id)
exposure_id = short_expid_to_long(mjd, exposure_id)
function get_fibTargDict(f, tele, mjd, expnum)
exposure_id = short_expid_to_long(mjd, expnum)

Check warning on line 285 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L284-L285

Added lines #L284 - L285 were not covered by tests

# translate confSummary/almanac terminology to AR.jl terminology
fiber_type_names = Dict(
Expand All @@ -304,18 +304,19 @@
# TODO Andrew thinks the fibers with category "" might be serendipitous targets

mjdfps2plate = get_fps_plate_divide(tele)
configName, configIdCol, target_type_col = if mjd > mjdfps2plate
configName, configIdCol, target_type_col = if parse(Int, mjd) > mjdfps2plate

Check warning on line 307 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L307

Added line #L307 was not covered by tests
"fps", "configid", "category"
else
"plates", "plateid", "target_type" # TODO should this be source_type?
end

df_exp = DataFrame(read(f["$(tele)/$(mjd)/exposures"]))
if !(exposure_id in df_exp.exposure)
df_exp = read_almanac_exp_df(f, tele, mjd)

Check warning on line 313 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L313

Added line #L313 was not covered by tests

if !(exposure_id in df_exp.exposure_str)

Check warning on line 315 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L315

Added line #L315 was not covered by tests
@warn "Exposure $(exposure_id) not found in $(tele)/$(mjd)/exposures"
return Dict(1:300 .=> "fiberTypeFail")
end
exposure_info = df_exp[findfirst(df_exp[!, "exposure"] .== exposure_id), :]
exposure_info = df_exp[findfirst(df_exp[!, "exposure_str"] .== exposure_id), :]

Check warning on line 319 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L319

Added line #L319 was not covered by tests
configid = exposure_info[configIdCol]

fibtargDict = if exposure_info.exptype == "OBJECT"
Expand All @@ -336,13 +337,22 @@
fiber_types = map(df_fib[!, target_type_col]) do t
if t in keys(fiber_type_names)
fiber_type_names[t]

else
# @warn "Unknown fiber type for $(tele)/$(mjd)/fibers/$(configName)/$(configid): $(repr(t))"
"fiberTypeFail"
end
end
Dict(df_fib[!, fiberid_col] .=> fiber_types)
fibernum_col = df_fib[!, fiberid_col]

Check warning on line 345 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L345

Added line #L345 was not covered by tests
# println(typeof(fibernum_col))
fibernumvec = if fibernum_col isa AbstractVector{<:Integer}
fibernum_col
elseif fibernum_col isa AbstractVector{<:String}
parse.(Int, fibernum_col)

Check warning on line 350 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L347-L350

Added lines #L347 - L350 were not covered by tests
else
@warn "Fiber numbers are neither integers or strings"
fibernum_col

Check warning on line 353 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L352-L353

Added lines #L352 - L353 were not covered by tests
end
Dict(fibernumvec .=> fiber_types)

Check warning on line 355 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L355

Added line #L355 was not covered by tests
catch e
rethrow(e)
@warn "Failed to get any fiber type information for $(tele)/$(mjd)/fibers/$(configName)/$(configid) (exposure $(exposure_id)). Returning fiberTypeFail for all fibers."
Expand All @@ -353,7 +363,7 @@
Dict(1:300 .=> "cal")
end

if mjd > mjdfps2plate
if parse(Int, mjd) > mjdfps2plate

Check warning on line 366 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L366

Added line #L366 was not covered by tests
fpifib1, fpifib2 = get_fpi_guide_fiberID(tele)
fibtargDict[fpifib1] = "fpiguide"
fibtargDict[fpifib2] = "fpiguide"
Expand All @@ -363,15 +373,17 @@

# hardcoded to use chip c only for now
# must use dome flats, not quartz flats (need fiber runs to telescope)
function get_fluxing_file(dfalmanac, parent_dir, mjd, tele, expidstr; fluxing_chip = "c")
expidfull = parse(Int, expidstr)
# use full exposure_id
function get_fluxing_file(dfalmanac, parent_dir, tele, mjd, expnum; fluxing_chip = "c")
exposure_id = parse(Int, short_expid_to_long(mjd, expnum))

Check warning on line 378 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L377-L378

Added lines #L377 - L378 were not covered by tests
df_mjd = sort(
dfalmanac[(dfalmanac.mjd .== parse(Int, mjd)) .& (dfalmanac.observatory .== tele), :],
:exposure)
expIndex = findfirst(df_mjd.exposure .== expidfull)
expIndex = findfirst(df_mjd.exposure_int .== exposure_id)

Check warning on line 382 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L382

Added line #L382 was not covered by tests
cartId = df_mjd.cartidInt[expIndex]
expIndex_before = findlast((df_mjd.imagetyp .== "DomeFlat") .& (df_mjd.exposure .< expidfull))
expIndex_after = findfirst((df_mjd.imagetyp .== "DomeFlat") .& (df_mjd.exposure .> expidfull))
# this needs to have cuts that match those in make_runlist_dome_flats.jl
expIndex_before = findlast((df_mjd.imagetyp .== "DomeFlat") .& (df_mjd.exposure_int .< exposure_id) .& (df_mjd.nreadInt .> 3))
expIndex_after = findfirst((df_mjd.imagetyp .== "DomeFlat") .& (df_mjd.exposure_int .> exposure_id) .& (df_mjd.nreadInt .> 3))

Check warning on line 386 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L385-L386

Added lines #L385 - L386 were not covered by tests
valid_before = if !isnothing(expIndex_before)
all(df_mjd.cartidInt[expIndex_before:expIndex] .== cartId) * 1
elseif !isnothing(expIndex_before)
Expand All @@ -389,43 +401,43 @@

if valid_before == 1
return get_fluxing_file_name(
parent_dir, mjd, tele, fluxing_chip, df_mjd.exposure[expIndex_before], cartId)
parent_dir, tele, mjd, last(df_mjd.exposure_str[expIndex_before], 4), fluxing_chip, cartId)
elseif valid_after == 1
return get_fluxing_file_name(
parent_dir, mjd, tele, fluxing_chip, df_mjd.exposure[expIndex_after], cartId)
parent_dir, tele, mjd, last(df_mjd.exposure_str[expIndex_after], 4), fluxing_chip, cartId)
# any of the cases below here we could consider using a global file
elseif valid_before == 2
return get_fluxing_file_name(
parent_dir, mjd, tele, fluxing_chip, df_mjd.exposure[expIndex_before], cartId)
parent_dir, tele, mjd, last(df_mjd.exposure_str[expIndex_before], 4), fluxing_chip, cartId)
elseif valid_after == 2
return get_fluxing_file_name(
parent_dir, mjd, tele, fluxing_chip, df_mjd.exposure[expIndex_after], cartId)
parent_dir, tele, mjd, last(df_mjd.exposure_str[expIndex_after], 4), fluxing_chip, cartId)
else
return nothing
end
end

# TODO: switch to meta data dict and then save wavecal flags etc.
function reinterp_spectra(fname; wavecal_type = "wavecal_skyline")
function reinterp_spectra(fname; wavecal_type = "waveCalSkyLine")

Check warning on line 421 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L421

Added line #L421 was not covered by tests
# might need to add in telluric div functionality here?

sname = split(split(fname, "/")[end], "_")
fnameType, tele, mjd, chip, expid = sname[(end - 5):(end - 1)]
sname = split(split(split(fname, "/")[end],".h5")[1], "_")
fnameType, tele, mjd, expnum, chip, exptype = sname[(end - 5):end]

Check warning on line 425 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L424-L425

Added lines #L424 - L425 were not covered by tests

# could shift this to a preallocation step
outflux = zeros(length(logUniWaveAPOGEE), 300)
outvar = zeros(length(logUniWaveAPOGEE), 300)
outmsk = zeros(Int, length(logUniWaveAPOGEE), 300)
cntvec = zeros(Int, length(logUniWaveAPOGEE), 300)
outflux = zeros(length(logUniWaveAPOGEE), N_FIBERS)
outvar = zeros(length(logUniWaveAPOGEE), N_FIBERS)
outmsk = zeros(Int, length(logUniWaveAPOGEE), N_FIBERS)
cntvec = zeros(Int, length(logUniWaveAPOGEE), N_FIBERS)

Check warning on line 431 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L428-L431

Added lines #L428 - L431 were not covered by tests

pixvec = 1:(3 * 2048)
flux_stack = zeros(3 * 2048, 300)
ivar_stack = zeros(3 * 2048, 300)
mask_stack = zeros(Int, 3 * 2048, 300)
wave_stack = zeros(3 * 2048, 300)
chipBit_stack = zeros(Int, 3 * 2048, 300)
pixvec = 1:(N_CHIPS * N_XPIX)
flux_stack = zeros(N_CHIPS * N_XPIX, N_FIBERS)
ivar_stack = zeros(N_CHIPS * N_XPIX, N_FIBERS)
mask_stack = zeros(Int, N_CHIPS * N_XPIX, N_FIBERS)
wave_stack = zeros(N_CHIPS * N_XPIX, N_FIBERS)
chipBit_stack = zeros(Int, N_CHIPS * N_XPIX, N_FIBERS)

Check warning on line 438 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L433-L438

Added lines #L433 - L438 were not covered by tests

ingestBit = zeros(Int, 300)
ingestBit = zeros(Int, N_FIBERS)

Check warning on line 440 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L440

Added line #L440 was not covered by tests

# add a for loop over the exposures (stop thinking about "visits" for now)
# probably just generate ap1D file names from the alamanc files
Expand All @@ -440,10 +452,10 @@
chipWaveSoln = f["chipWaveSoln"]
close(f)
else #this is a terrible global fallback, just so we get something to look at
chipWaveSoln = zeros(2048, 300, 3)
chipWaveSoln = zeros(N_XPIX, N_FIBERS, N_CHIPS)

Check warning on line 455 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L455

Added line #L455 was not covered by tests
for (chipind, chip) in enumerate(["a", "b", "c"])
chipWaveSoln[:, :, chipind] .= rough_linear_wave.(
1:2048, a = roughwave_dict[tele][chip][1], b = roughwave_dict[tele][chip][2])
1:N_XPIX, a = roughwave_dict[tele][chip][1], b = roughwave_dict[tele][chip][2])
end
println("No wavecal found for $(fname), using fallback")
flush(stdout)
Expand All @@ -458,11 +470,11 @@
mask_1d = f["mask_1d"]
close(f)

flux_stack[(1:2048) .+ (3 - chipind) * 2048, :] .= flux_1d[end:-1:1, :]
ivar_stack[(1:2048) .+ (3 - chipind) * 2048, :] .= ivar_1d[end:-1:1, :]
mask_stack[(1:2048) .+ (3 - chipind) * 2048, :] .= mask_1d[end:-1:1, :]
wave_stack[(1:2048) .+ (3 - chipind) * 2048, :] .= chipWaveSoln[end:-1:1, :, chipind]
chipBit_stack[(1:2048) .+ (3 - chipind) * 2048, :] .+= 2^(chipind)
flux_stack[(1:N_XPIX) .+ (3 - chipind) * N_XPIX, :] .= flux_1d[end:-1:1, :]
ivar_stack[(1:N_XPIX) .+ (3 - chipind) * N_XPIX, :] .= ivar_1d[end:-1:1, :]
mask_stack[(1:N_XPIX) .+ (3 - chipind) * N_XPIX, :] .= mask_1d[end:-1:1, :]
wave_stack[(1:N_XPIX) .+ (3 - chipind) * N_XPIX, :] .= chipWaveSoln[end:-1:1, :, chipind]
chipBit_stack[(1:N_XPIX) .+ (3 - chipind) * N_XPIX, :] .+= 2^(chipind)

Check warning on line 477 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L473-L477

Added lines #L473 - L477 were not covered by tests
end

noBadBits = (mask_stack .& bad_pix_bits .== 0)
Expand All @@ -474,7 +486,7 @@
(ivar_stack .> (10^-20))

## need to propagate the bit mask
for fiberindx in 1:300
for fiberindx in 1:N_FIBERS

Check warning on line 489 in src/ar1D.jl

View check run for this annotation

Codecov / codecov/patch

src/ar1D.jl#L489

Added line #L489 was not covered by tests
good_pix_fiber = good_pix[:, fiberindx]
flux_fiber = flux_stack[good_pix_fiber, fiberindx]
ivar_fiber = ivar_stack[good_pix_fiber, fiberindx]
Expand Down
7 changes: 3 additions & 4 deletions src/ar2Dcal.jl
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
using DataFrames

function calStrip(fname)
calType, telescope, chip, mjdstart, mjdend = split(
split(split(fname, "/")[end], ".")[1], "_")
return vcat(
abspath(fname), calType, telescope, chip, parse(Int, mjdstart), parse(Int, mjdend))
sname = split(split(split(fname, "/")[end],".h5")[1], "_")
calType, tele, chip, mjdstart, mjdend = sname[(end - 4):end]
return vcat(abspath(fname), calType, tele, chip, parse(Int, mjdstart), parse(Int, mjdend))

Check warning on line 6 in src/ar2Dcal.jl

View check run for this annotation

Codecov / codecov/patch

src/ar2Dcal.jl#L4-L6

Added lines #L4 - L6 were not covered by tests
end

function cal2df(flist)
Expand Down
Loading
Loading