Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 6 additions & 20 deletions Configuration/PyReleaseValidation/scripts/das-up-to-nevents.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,8 +117,7 @@ def no_intersection():
print("No X509 proxy set. Exiting.")
sys.exit(1)

## Check if we are in the cms-bot "environment"
testing = "JENKINS_PREFIX" in os.environ
## Check if we are in the cms-bot "environment"
dataset = args.dataset
events = args.events
threshold = args.threshold
Expand Down Expand Up @@ -205,12 +204,10 @@ def no_intersection():
if (len(golden_data_runs)==0):
no_intersection()

if testing:
golden_data_runs = golden_data_runs[:1] # take only the first run
# building the dataframe, cleaning for bad lumis
golden_data_runs_tocheck = golden_data_runs

if args.precheck and not testing:
if args.precheck:
golden_data_runs_tocheck = []
# Here we check run per run.
# This implies more dasgoclient queries, but smaller outputs
Expand All @@ -223,20 +220,12 @@ def no_intersection():
if events > 0 and sum_events > events:
break
das_opt = "run in %s"%(str([int(g) for g in golden_data_runs_tocheck]))

if testing:
golden_data_runs_tocheck = golden_data_runs[:1] # take only the first run
# in testing mode we just take the first file
das_opt = "run=%s"%(golden_data_runs_tocheck[0])

if not testing:
df = das_lumi_data(dataset,opt=das_opt).merge(das_file_data(dataset,opt=das_opt),on="file",how="inner") # merge file informations with run and lumis
else:
df = das_lumi_data(dataset,opt=das_opt)
df = das_lumi_data(dataset,opt=das_opt).merge(das_file_data(dataset,opt=das_opt),on="file",how="inner") # merge file informations with run and lumis

df["lumis"] = [[int(ff) for ff in f.replace("[","").replace("]","").split(",")] for f in df.lumis.values]

if not args.nogolden and not testing:
if not args.nogolden:

df_rs = []
for r in golden_data_runs_tocheck:
Expand All @@ -262,16 +251,14 @@ def no_intersection():
df.loc[:,"min_lumi"] = [min(f) for f in df.lumis]
df.loc[:,"max_lumi"] = [max(f) for f in df.lumis]
df = df.sort_values(["run","min_lumi","max_lumi"])

if testing:
df = df.head(1) # take only the first file

if site is not None:
df = df.merge(das_file_site(dataset,site),on="file",how="inner")

if args.pandas:
df.to_csv(dataset.replace("/","")+".csv")

if events > 0 and not testing:
if events > 0:
df = df[df["events"] <= events] #jump too big files
df.loc[:,"sum_evs"] = df.loc[:,"events"].cumsum()
df = df[df["sum_evs"] < events]
Expand All @@ -293,4 +280,3 @@ def no_intersection():

sys.exit(0)