Skip to content

Commit

Permalink
Merge pull request #82 from andrew-saydjari/main
Browse files Browse the repository at this point in the history
merge back main
  • Loading branch information
andrew-saydjari authored Jan 24, 2024
2 parents fbcfb30 + ee88086 commit 098697e
Show file tree
Hide file tree
Showing 6 changed files with 274 additions and 84 deletions.
2 changes: 2 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ ThreadPinning = "811555cd-349b-4f26-b7bc-1f208b848042"
[compat]
AstroTime = "0.7"
BLISBLAS = "0.1"
BasisFunctions = "0.6"
DataFrames = "1"
EllipsisNotation = "1"
FITSIO = "0.17"
Expand All @@ -46,6 +47,7 @@ PoissonRandom = "0.4"
ProgressMeter = "1"
ShiftedArrays = "2"
SlurmClusterManager = "0.1"
SortFilters = "0.1"
StatsBase = "0.33, 0.34"
Suppressor = "0.2"
ThreadPinning = "0.7"
Expand Down
11 changes: 6 additions & 5 deletions pipeline.jl
Original file line number Diff line number Diff line change
Expand Up @@ -148,12 +148,12 @@ end
starcache = cache_starname(tele,field,plate,mjd,fiberindx,cache_dir=cache_dir,inject_cache_dir=inject_cache_dir)
if (isfile(starcache) & caching)
fvec, fvarvec, cntvec, chipmidtimes, metaexport = deserialize(starcache)
starscale,framecnts,varoffset,varflux,a_relFlux,b_relFlux,c_relFlux = metaexport
starscale,framecnts,varoffset,varflux,a_relFlux,b_relFlux,c_relFlux,cartVisit = metaexport
elseif tele[end]=='i'
warn("Injections not found at injection cache dir!")
else
fvec, fvarvec, cntvec, chipmidtimes, metaexport = stack_out(release_dir,redux_ver,tele,field,plate,mjd,fiberindx,cache_dir=cache_dir)
starscale,framecnts,varoffset,varflux,a_relFlux,b_relFlux,c_relFlux = metaexport
starscale,framecnts,varoffset,varflux,a_relFlux,b_relFlux,c_relFlux,cartVisit = metaexport
if caching
dirName = splitdir(starcache)[1]
if !ispath(dirName)
Expand All @@ -164,7 +164,7 @@ end
end
simplemsk = (cntvec.==framecnts) .& skymsk;

push!(out,(count(simplemsk), starscale, framecnts, chipmidtimes, varoffset, varflux, a_relFlux, b_relFlux, c_relFlux, nanify(fvec[simplemsk],simplemsk), nanify(fvarvec[simplemsk],simplemsk))) # 1
push!(out,(count(simplemsk), starscale, framecnts, chipmidtimes, varoffset, varflux, a_relFlux, b_relFlux, c_relFlux, cartVisit, nanify(fvec[simplemsk],simplemsk), nanify(fvarvec[simplemsk],simplemsk))) # 1

if sky_off
meanLocSky.=0
Expand Down Expand Up @@ -369,8 +369,9 @@ end
(x->x[metai][7], "a_relFlux"),
(x->x[metai][8], "b_relFlux"),
(x->x[metai][9], "c_relFlux"),
(x->x[metai][10], "flux"),
(x->x[metai][11], "fluxerr2"),
(x->x[metai][10], "cartVisit"),
(x->x[metai][11], "flux"),
(x->x[metai][12], "fluxerr2"),
(x->adjfibindx, "adjfiberindx"),

(x->Float64.(x[RVind][1][1]), "RV_pixoff_final"),
Expand Down
2 changes: 1 addition & 1 deletion src/fileNameHandling.jl
Original file line number Diff line number Diff line change
Expand Up @@ -239,5 +239,5 @@ function build_apFluxPaths(release_dir,redux_ver,tele,field,plate,mjd)
fluxFile = build_apFluxPath(release_dir,redux_ver,tele,mjd,chip,expectDome)
push!(fluxFiles,fluxFile)
end
return fluxFiles, expectDome
return fluxFiles, expectDome, cartVisit
end
20 changes: 15 additions & 5 deletions src/ingest.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@

using AstroTime

function getAndWrite_fluxing(release_dir,redux_ver,tele,field,plate,mjd; cache_dir="../local_cache")
flux_paths, domeflat_expid = build_apFluxPaths(release_dir,redux_ver,tele,field,plate,mjd)
function getAndWrite_fluxing(release_dir,redux_ver,tele,field,plate,mjd; cache_dir="../local_cache",nattempts=5)
flux_paths, domeflat_expid, cartVisit = build_apFluxPaths(release_dir,redux_ver,tele,field,plate,mjd)
fluxingcache = cache_fluxname(tele,field,plate,mjd; cache_dir=cache_dir)

hdr = FITSHeader(["pipeline","git_branch","git_commit","domeflat_expid"],["apMADGICS.jl",git_branch,git_commit,string(domeflat_expid)],["","","",""])
hdr = FITSHeader(["pipeline","git_branch","git_commit","domeflat_expid","CARTID"],["apMADGICS.jl",git_branch,git_commit,string(domeflat_expid),string(cartVisit)],["","","","",""])

#should implement this everywhere to avoid race conditions
tmpfname = tempname()*"fits"
Expand All @@ -21,7 +21,16 @@ function getAndWrite_fluxing(release_dir,redux_ver,tele,field,plate,mjd; cache_d
write(h,thrpt,name=chip)
end
close(h)
if !isfile(fluxingcache)
try
for i=1:nattempts
if !isfile(fluxingcache)
mv(tmpfname,fluxingcache,force=true)
break
else
break
end
end
catch
mv(tmpfname,fluxingcache,force=true)
end
end
Expand Down Expand Up @@ -122,6 +131,7 @@ function stack_out(release_dir,redux_ver,tele,field,plate,mjd,fiberindx; varoffs
thrpt = read(f[chip],fiberindx)
thrptDict[chip] = thrpt
end
cartVisit = read_header(f[1])["CARTID"]
close(f)

fill!(outvec,0)
Expand Down Expand Up @@ -222,7 +232,7 @@ function stack_out(release_dir,redux_ver,tele,field,plate,mjd,fiberindx; varoffs
chipmidtimes = zeros(3)
chipmidtimes[goodframeIndx] .= mean.(time_lsts[goodframeIndx]) #consider making this flux weighted (need to worry about skyline variance driving it)
chipmidtimes[.!goodframeIndx] .= NaN
metaexport = (starscale,framecnts,varoffset,(c^2*starscale^p),thrptDict["a"],thrptDict["b"],thrptDict["c"])
metaexport = (starscale,framecnts,varoffset,(c^2*starscale^p),thrptDict["a"],thrptDict["b"],thrptDict["c"],cartVisit)
if telluric_div
return outvec, outvar, cntvec, chipmidtimes, metaexport, telvec
end
Expand Down
23 changes: 17 additions & 6 deletions src/prior_build/get_sky_samples.jl
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ end
skyLineCache = cache_skynameSpec(tele,field,plate,mjd,fiberindx,cache_dir=cache_dir)
fvec, fvarvec, cntvec, chipmidtimes, metaexport = deserialize(skyLineCache)
simplemsk = (cntvec.==maximum(cntvec));
return simplemsk
return simplemsk, fvarvec
end

function sky_smooth_fit(outvec,outvar,simplemsk,Vpoly_scaled)
Expand Down Expand Up @@ -169,7 +169,7 @@ end
function get_sky_samples(adjfibindx;contscale=5e2,loc_parallel=false,seed=2023)

# there is a race condition if loc_parallel is true... so added a shuffle... not a great solution, but fine for testing?
ntuplst = deserialize(prior_dir*"2024_01_20/dr17_dr17_sky_input_lst_plate_cleanManual_"*lpad(adjfibindx,3,"0")*".jdat")
ntuplst = deserialize(prior_dir*"2024_01_22/outlists/sky/dr17_dr17_sky_input_lst_plate_msked_"*lpad(adjfibindx,3,"0")*".jdat")
# if loc_parallel
# rng = MersenneTwister(seed)
# shuffle!(rng,ntuplst)
Expand Down Expand Up @@ -234,19 +234,24 @@ end
end

savename = "sky_prior_disk/skymsk_"*lpad(adjfibindx,3,"0")*".jdat"
if !isfile(savename)
savename1 = "sky_prior_disk/skyvar_"*lpad(adjfibindx,3,"0")*".jdat"
if !(isfile(savename) & isfile(savename1))
pout = if loc_parallel
@showprogress pmap(sky_msk_wrapper,ntuplst);
else
map(sky_msk_wrapper,ntuplst);
end
global skymsk = zeros(8700,size(pout,1));
global skyvar = zeros(8700,size(pout,1));
for i=1:size(pout,1)
skymsk[:,i].=pout[i]
skymsk[:,i].=pout[i][1]
skyvar[:,i].=pout[i][2]
end
serialize(savename,skymsk)
serialize(savename1,skyvar)
else
global skymsk = deserialize(savename)
global skyvar = deserialize(savename1)
end

### Save samples of tell-free sky decomposition for building Tfun/starCont prior
Expand Down Expand Up @@ -292,20 +297,26 @@ end

# this is identical... just saving under a new name, but it is cheap
savename = "sky_prior_disk/skymsk_tellDiv_"*lpad(adjfibindx,3,"0")*".jdat"
if !isfile(savename)
savename1 = "sky_prior_disk/skyvar_tellDiv_"*lpad(adjfibindx,3,"0")*".jdat"
if !(isfile(savename) & isfile(savename1))
pout = if loc_parallel
@showprogress pmap(sky_msk_wrapper,ntuplst);
else
map(sky_msk_wrapper,ntuplst);
end
global skymsk = zeros(8700,size(pout,1));
global skyvar = zeros(8700,size(pout,1));
for i=1:size(pout,1)
skymsk[:,i].=pout[i]
skymsk[:,i].=pout[i][1]
skyvar[:,i].=pout[i][2]
end
serialize(savename,skymsk)
serialize(savename1,skyvar)
else
global skymsk = deserialize(savename)
global skyvar = deserialize(savename1)
end
serialize("sky_prior_disk/chebmsk_exp_"*lpad(adjfibindx,3,"0")*".jdat",chebmsk_exp)
end
end

Expand Down
Loading

0 comments on commit 098697e

Please sign in to comment.