Skip to content

Examples

Exotics

CalRatio Displaced Jets (Alice Morris)

Click for more details!
Building image for data processing Dockerfile
FROM lukasheinrich/recast_cvmfs_assisted:20161231
ADD . /DiVertAnalysisCode/
WORKDIR /DiVertAnalysisCode/
RUN echo "image building" && \
    source ~/.bashrc || echo ignore alrb && \
    rcSetup Base,2.4.28 || echo ignore alrb && \
    rc clean && rc find_packages && \
    rc compile
.gitlab-ci.yml (image-building part)
stages:
  - build

variables:
  GIT_SUBMODULE_STRATEGY: recursive

before_script:
  - mkdir -p /root/.ssh
  - source /root/.bashrc || echo ignore alrb
  - echo "${KRB_PASSWORD}" | kinit ${KRB_USERNAME}@CERN.CH
  - klist
  - echo -e "Host svn.cern.ch lxplus.cern.ch\n\tUser ${KRB_USERNAME}\n\tStrictHostKeyChecking no\n\tGSSAPIAuthentication yes\n\tGSSAPIDelegateCredentials yes\n\tProtocol 2\n\tForwa\
rdX11 no\n\tIdentityFile /root/.ssh/id_rsa" >> /root/.ssh/config

build_with_kaniko:
    stage: build
    tags:
      - cvmfs
    image:
        name: gitlab-registry.cern.ch/ci-tools/docker-image-builder
        entrypoint: [""]
    before_script:
        - echo "override global before_script"
    script:
        - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
        - cat /kaniko/.docker/config.json
        - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
Building image for selection and statistical analysis Dockerfile
FROM gitlab-registry.cern.ch/atlas/athena/analysisbase:21.2.247
ADD . /code
WORKDIR /code
RUN source ~/release_setup.sh &&  \
    cd GenerateMCFiles/       &&  \
    root -l -q -b helper.C+   &&  \
    cd ../ExtrapLimitFinder   &&  \
    make clean                &&  \
    make
CMD /bin/bash
.gitlab-ci.yml
stages:
  - build

build_with_kaniko:
    stage: build
    image:
        name: gitlab-registry.cern.ch/ci-tools/docker-image-builder
        entrypoint: [""]
    before_script:
        - echo "override global before_script"
    script:
        - echo "{\"auths\":{\"$CI_REGISTRY\":{\"username\":\"$CI_REGISTRY_USER\",\"password\":\"$CI_REGISTRY_PASSWORD\"}}}" > /kaniko/.docker/config.json
        - cat /kaniko/.docker/config.json
        - /kaniko/executor --context $CI_PROJECT_DIR --dockerfile $CI_PROJECT_DIR/Dockerfile --destination $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
RECAST workflow setup
steps.yml
dataprocessing:
  process:
    process_type: 'interpolated-script-cmd'
    script: |
      /recast_auth/getkrb.sh
      source /root/.bashrc || echo ignore alrb
      source /DiVertAnalysisCode/RootCoreBin/local_setup.sh || echo ignore alrb
      DiVertAnalysisRunner {inputfile} --NEvents {nevents} --CERNBox true
      mv /DiVertAnalysisCode/myoutput/hist-*.root {outputfile}
  publisher:
    publisher_type: 'interpolated-pub'
    publish:
      ntuple: '{outputfile}'
  environment:
    environment_type: 'docker-encapsulated'
    image: gitlab-registry.cern.ch/atlas-phys-exotics-llp-mscrid/36fbanalysis/divertanalysiscode
    imagetag: testing-ci-branch
    resources:
      - kerberos: true
      - CVMFS

eventselection_low:
  process:
    process_type: 'interpolated-script-cmd'
    script: |
      /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      cd /code/GenerateMCFiles/
      python GenerateROOTFilesL1Topo.py -s {inputfile} -o {outputfile} -n -1
  publisher:
    publisher_type: 'interpolated-pub'
    publish:
      slimmedfile: '{outputfile}'
  environment:
    environment_type: docker-encapsulated
    image: gitlab-registry.cern.ch/atlas-phys-exotics-llp-mscrid/36fbanalysis/calratioextrapolation
    imagetag: testing-ci-branch
    resources:
      - kerberos: true

eventselection_high:
  process:
    process_type: 'interpolated-script-cmd'
    script: |
      /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      cd /code/GenerateMCFiles/
      python GenerateROOTFilesTAU60.py -s {inputfile} -o {outputfile} -n -1
  publisher:
    publisher_type: 'interpolated-pub'
    publish:
      slimmedfile: '{outputfile}'
  environment:
    environment_type: 'docker-encapsulated'
    image: gitlab-registry.cern.ch/atlas-phys-exotics-llp-mscrid/36fbanalysis/calratioextrapolation
    imagetag: testing-ci-branch
    resources:
      - kerberos: true

extrapolation:
  process:
    process_type: 'interpolated-script-cmd'
    script: |
      /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      cd /code/
      python newExtrapProcedure.py {inputfile} {outputfile} {lifetime_in_m}
  publisher:
    publisher_type: 'interpolated-pub'
    publish:
      extrapfile: '{outputfile}'
  environment:
    environment_type: 'docker-encapsulated'
    image: gitlab-registry.cern.ch/atlas-phys-exotics-llp-mscrid/36fbanalysis/calratioextrapolation
    imagetag: testing-ci-branch
    resources:
      - kerberos: true

limitsetting:
  process:
    process_type: 'interpolated-script-cmd'
    script: |
      /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      cd /code/ExtrapLimitFinder/   #also feed in systematics values here?
      ./ExtrapLimitFinder -e {inputfile} -A {nA} -B {nB} -C {nC} -D {nD} -f {outputfile} -a -E {ABCDerror} -L {lumi} -u
  publisher:
    publisher_type: 'interpolated-pub'
    publish:
      limitfile: '{outputfile}'
  environment:
    environment_type: 'docker-encapsulated'
    image: gitlab-registry.cern.ch/atlas-phys-exotics-llp-mscrid/36fbanalysis/calratioextrapolation
    imagetag: testing-ci-branch
    resources:
      - kerberos: true
workflow.yml
stages:
  - name: dataprocessing
    dependencies: [init]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {step: init, output: dxaod_file}
        nevents: {step: init, output: n_events}
        outputfile: '{workdir}/ntuple.root'
      step: {$ref: 'steps.yml#/dataprocessing'}
  - name: eventselection_low
    dependencies: [dataprocessing]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: dataprocessing, output: ntuple, unwrap: true}
        outputfile: '{workdir}/slim_mH400_mS100_lt5m_low1.root'
      step: {$ref: 'steps.yml#/eventselection_low'}
  - name: eventselection_high
    dependencies: [dataprocessing]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: dataprocessing, output: ntuple, unwrap: true}
        outputfile: '{workdir}/slim_mH400_mS100_lt5m_high.root'
      step: {$ref: 'steps.yml#/eventselection_high'}
  - name: extrapolation_low
    dependencies: [eventselection_low]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: eventselection_low, output: slimmedfile, unwrap: true}
        outputfile: '{workdir}/plot_newExtrapMethod_mH400_mS50_lt5m_low1.root'
        lifetime_in_m: {stages: init, output: lifetime_in_m, unwrap: true}
      step: {$ref: 'steps.yml#/extrapolation'}
  - name: extrapolation_high
    dependencies: [eventselection_high]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: eventselection_high, output: slimmedfile, unwrap: true}
        outputfile: '{workdir}/plot_newExtrapMethod_mH400_mS50_lt5m_high.root'
        lifetime_in_m: {stages: init, output: lifetime_in_m, unwrap: true}
      step: {$ref: 'steps.yml#/extrapolation'}
  - name: limitsetting_low
    dependencies: [extrapolation_low]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: extrapolation_low, output: extrapfile, unwrap: true}
        outputfile: '{workdir}/limit_mH400_mS100_lt5m_low1.root'
        nA: '7'
        nB: '2'
        nC: '70'
        nD: '57'
        ABCDerror: '0.245'
        lumi: '10.8'
      step: {$ref: 'steps.yml#/limitsetting'}
  - name: limitsetting_high
    dependencies: [extrapolation_high]
    scheduler:
      scheduler_type: singlestep-stage
      parameters:
        inputfile: {stages: extrapolation_high, output: extrapfile, unwrap: true}
        outputfile: '{workdir}/limit_mH400_mS100_lt5m_high.root'
        nA: '10'
        nB: '9'
        nC: '187'
        nD: '253'
        ABCDerror: '0.215'
        lumi: '33.0'
      step: {$ref: 'steps.yml#/limitsetting'}
recast.yml
name: calratio

metadata:
  author: apmorris
  short_description: 'A recast configuration for the CalRatio ATLAS analysis'

spec:
  workflow: workflow.yml

example_inputs:
  default:
    initdata:
      dxaod_file: '/eos/user/a/apmorris/DisplacedJets/RECAST/xAODs/'
      lifetime_in_m: 0.7
      n_events: -1

tests:
- name: test_dataprocessing
  spec: steps.yml#/dataprocessing
  parameters:
    inputfile: '/eos/user/a/apmorris/DisplacedJets/RECAST/xAODs/'
    nevents: 1000
    outputfile: '{workdir}/ntuple.root'
- name: test_selection
  spec: steps.yml#/eventselection_high
  parameters:
    inputfile: '/code/test.root'
    outputfile: '{workdir}/slim.root'
.gitlab-ci.yml
stages:
  - test
  - build

testing:
  tags:
    - docker-privileged
  services:
    - docker:stable-dind
  stage: test
  image: 'recast/recastatlas:v0.3.0'
  script:
    - eval "$(recast auth setup -a $RECAST_USER -a $RECAST_PASS -a $RECAST_TOKEN -a gitlab-registry.cern.ch)"
    - eval "$(recast auth write --basedir authdir)"
    - $(recast catalogue add $PWD)
    - recast catalogue ls
    - $(recast ci cvmfs-helper)
    - recast tests run calratio test_dataprocessing --tag dataproc
    - recast tests run calratio test_selection --tag evsel
  except:
    variables:
      - $CI_COMMIT_MESSAGE =~ /skip tests/
  artifacts:
    paths:
      - recast-test-dataproc
      - recast-test-evsel
    expire_in: 1 day
    when: always

recast:
  tags:
    - docker-privileged
    - size-xl
  services:
    - docker:stable-dind
  stage: build
  image: 'recast/recastatlas:v0.3.0'
  script:
    - eval "$(recast auth setup -a $RECAST_USER -a $RECAST_PASS -a $RECAST_TOKEN -a default)"
    - eval "$(recast auth write --basedir authdir)"
    - $(recast catalogue add $PWD)
    - recast catalogue ls
    - $(recast ci cvmfs-helper)
    - recast run calratio --tag myrun
  artifacts:
    paths:
      - recast-myrun
    expire_in: 1 day
    when: always

MonoHbb (Philipp Gadow)

Click for more details!
Building image for data processing Dockerfile
FROM gitlab-registry.cern.ch/atlas/athena/athanalysis:21.2.247
ADD . /xampp/XAMPPmonoH
WORKDIR /xampp/build
RUN source ~/release_setup.sh &&  \
    sudo chown -R atlas /xampp && \
    cmake ../XAMPPmonoH && \
    make -j4
.gitlab-ci.yml
variables:
  # all submodules will be cloned recursively upon start of CI job
  GIT_SUBMODULE_STRATEGY: recursive
  GIT_SSL_NO_VERIFY: "true"

stages:
  - docker

build_image:
  # description: triggers a build of the project as a Docker image
  stage: docker
  variables:
    TO: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
  tags:
    - docker-image-build
  script:
    - ignore
Building image for statistical analysis Dockerfile
FROM gitlab-registry.cern.ch/atlas/athena/analysisbase:21.2.247
ADD . /code
WORKDIR /code
RUN sudo chown -R atlas /code && \
    source /home/atlas/release_setup.sh && \
    cd /code && \
    source /code/setup.sh MonoH && \
    gmake -j5 && \
    echo "time stamp: $(date)" > /code/build.stamp && \
    git rev-parse --short HEAD > /code/build.revision
.gitlab-ci.yml (image-building part)
variables:
  # all submodules will be cloned recursively upon start of CI job
  GIT_SUBMODULE_STRATEGY: recursive
  GIT_SSL_NO_VERIFY: "true"

stages:
  - docker

build_image:
  stage: docker
  variables:
    TO: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
  tags:
    - docker-image-build
  script:
    - ignore
RECAST workflow setup
steps.yml
selection_stage:
  process:
    process_type: interpolated-script-cmd
    script: |
      source /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      cd /xampp/XAMPPmonoH
      echo '-------------------'
      echo 'Directory contents:'
      ls *
      echo '-------------------'
      if [ -z "{PRWFILE_mc16}" ]; then
        echo "Using internal/central PRW files"
      else
        echo 'Setting paths to pile-up reweighting files:'
        echo {PRWFILE_mc16}
        export PRWFILE={PRWFILE_mc16}
      fi
      echo '---------------------------'
      echo 'Start running analysis code'
      ls {input_mc16}
      source XAMPPmonoH/recast/recast_run.sh {input_mc16} {dsid} {datayears} {lumi} {xsecpb} {nevents}
      echo 'Done running analysis code'
      echo '---------------------------'
      echo 'Run directory contents:'
      ls *
      echo '---------------------------'
      echo 'Copying output'
      sudo cp /xampp/XAMPPmonoH/output_recast/recast_signal_WSMaker.root {outputfile}
      echo 'Script done'
  publisher:
    publisher_type: interpolated-pub
    publish:
      selected_signal_mc16: '{outputfile}'
    glob: true
  environment:
    environment_type: docker-encapsulated
    image: lukasheinrich/prodmonos
    imagetag: recast_monosbb-fix-fix
    resources:
      - kerberos: true

merging_stage:
  process:
    process_type: interpolated-script-cmd
    script: |
      source /home/atlas/release_setup.sh
      echo '---------------------------'
      echo 'Directory contents:'
      ls *
      echo '---------------------------'
      echo '---------------------------'
      echo 'Merging samples'
      hadd recast_signal_WSMaker.root {selected_signal_mc16a} {selected_signal_mc16d}
      echo '---------------------------'
      echo 'Updated directory contents:'
      ls *
      echo '---------------------------'
      echo 'Copying output'
      sudo cp recast_signal_WSMaker.root {outputfile}
      echo 'Script done'
  publisher:
    publisher_type: interpolated-pub
    publish:
      selected_signal: '{outputfile}'
    glob: true
  environment:
    environment_type: docker-encapsulated
    image: gitlab-registry.cern.ch/atlas-mpp-xampp/xamppmonoh
    imagetag: recast_monosbb-fix
fitting_stage:
  process:
    process_type: interpolated-script-cmd
    interpreter: bash
    script: |
      source /recast_auth/getkrb.sh
      source /home/atlas/release_setup.sh
      echo '-------------------'
      echo 'Directory contents:'
      ls *
      echo '-------------------'
      echo 'Setting up environment'
      export FITINPUT={limit_backgrounds}
      export SIGNAL_0LEPTON={limit_inputs_signal}
      export INPUTVERSION=`cat configs/monoH_700_012lep_151617_recast.conf | grep InputVersion | sed 's/.* //g'`
      echo "FITINPUT=${{FITINPUT}}"
      echo "SIGNAL_0LEPTON=${{SIGNAL_0LEPTON}}"
      echo "INPUTVERSION=${{INPUTVERSION}}"
      echo '-------------------'
      echo 'Set up signal theory uncertainties'
      cp {signaltheoryuncertainty} /code/sigsys_signal.txt
      cat /code/sigsys_signal.txt
      echo 'Apply signal theory systematic uncertainties!'
      sed -i 's/SignalSystRECAST               false/SignalSystRECAST               true/g' /code/configs/monoH_700_012lep_151617_recast.conf
      echo '-------------------'
      echo 'Start statistical analysis'
      source setup.sh MonoH
      source /code/recast_setup.sh
      MakeWorkspace configs/monoH_700_012lep_151617_recast.conf recast
      echo 'Expected limit'
      python scripts/getLimit.py $INPUTVERSION.recast 1 1
      echo 'Observed limit'
      python scripts/getLimit.py $INPUTVERSION.recast 0 1
      sudo mv root-files/ {outputdir}
      echo '-------------------'
      echo 'Script done'
  publisher:
    publisher_type: interpolated-pub
    publish:
      outputs: '{outputdir}'
  environment:
    environment_type: docker-encapsulated
    image: gitlab-registry.cern.ch/monohbb/2017/fitting
    imagetag: recast_CONF2018
    resources:
      - kerberos: true
workflow.yml
stages:
- name: selection_stage_mc16a
  dependencies: [init]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      input_mc16: {step: init, output: input_mc16a}
      outputfile: '{workdir}/recast_signal_WSMaker_mc16a.root'
      persistentdir: '{workdir}'
      dsid: {step: init, output: dsid}
      datayears: data1516
      nevents: {step: init, output: nevents}
      xsecpb: {step: init, output: xsecpb}
      lumi: {step: init, output: lumi_mc16a}
      PRWFILE_mc16: {step: init, output: PRWFILE_mc16a}
    step: {$ref: 'steps.yml#/selection_stage'}
- name: selection_stage_mc16d
  dependencies: [init]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      input_mc16: {step: init, output: input_mc16d}
      outputfile: '{workdir}/recast_signal_WSMaker_mc16d.root'
      persistentdir: '{workdir}'
      datayears: data17
      dsid: {step: init, output: dsid}
      nevents: {step: init, output: nevents}
      xsecpb: {step: init, output: xsecpb}
      lumi: {step: init, output: lumi_mc16d}
      PRWFILE_mc16: {step: init, output: PRWFILE_mc16d}
    step: {$ref: 'steps.yml#/selection_stage'}
- name: merging_stage
  dependencies: [selection_stage_mc16a, selection_stage_mc16d]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      selected_signal_mc16a: {step: selection_stage_mc16a, output: selected_signal_mc16}
      selected_signal_mc16d: {step: selection_stage_mc16d, output: selected_signal_mc16}
      outputfile: '{workdir}/recast_signal_WSMaker.root'
      persistentdir: '{workdir}'
    step: {$ref: 'steps.yml#/merging_stage'}
- name: fitting_stage
  dependencies: [merging_stage]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      limit_backgrounds: {step: init, output: limit_backgrounds}
      signaltheoryuncertainty: {step: init, output: signaltheoryuncertainty}
      limit_inputs_signal: {step: merging_stage, output: selected_signal}
      persistentdir: '{workdir}'
      outputdir: '{workdir}'
    step: {$ref: 'steps.yml#/fitting_stage'}
recast.yml
name: examples/monohbb

metadata:
  author: Alex Schuy (alexschuy), Paul Philipp Gadow (pgadow)
  short_description: 'RECAST of Mono-h(bb) analysis with 79.8/fb: MET + H(bb).'

spec:
  toplevel: gitlab-cern:recast-monosbb/workflow:specs
  workflow: workflow.yml

example_inputs:
  default:
    dataopts:
      inputarchive: https://gitlab.cern.ch/api/v4/projects/recast-monosbb%2Fmono-sbb-config/repository/archive.zip?sha=master
      archivematch: '*/data/'
    initdata:
      input_mc16a: 'monosbb/mc16_13TeV.311378.MadGraphPy8EG_A14NNP23LO_monoSbb_zp500_dm200_dh50.deriv.DAOD_EXOT24.e7346_s3126_r9364_p3565.txt'
      input_mc16d: 'monosbb/mc16_13TeV.311378.MadGraphPy8EG_A14NNP23LO_monoSbb_zp500_dm200_dh50.deriv.DAOD_EXOT24.e7346_s3126_r10201_p3565.txt'
      dsid: '311378'
      nevents: '100'  # put -1 to run over all events
      lumi_mc16a: '36.21465331125'
      lumi_mc16d: '43.5934630485'
      xsecpb: '1.024'
      PRWFILE_mc16a: 'PRW/pileup_mc16a_dsid311378_FS.root'
      PRWFILE_mc16d: 'PRW/pileup_mc16d_dsid311378_FS.root'
      limit_backgrounds: 'root://eosuser.cern.ch//eos/project/r/recast/atlas/ATLAS-CONF-2018-039/background/'
      signaltheoryuncertainty: 'theory-uncertainties/sys_mc16_13TeV.311378.MadGraphPy8EG_A14NNP23LO_monoSbb_zp500_dm200_dh50.txt'


results:
- name: Expected and observed 95% CLs limits on nominal signal cross-section (the POI in the fit)
  relpath: fitting_stage/root-files/MonoH_0700_exocirculation_2018-06-24.recast_obs/limit_1.txt


tests:
- name: test_selection_mc16a
  spec: steps.yml#/selection_stage
  parameters:
    input_mc16: '{readdir0}/monosbb/mc16_13TeV.311378.MadGraphPy8EG_A14NNP23LO_monoSbb_zp500_dm200_dh50.deriv.DAOD_EXOT24.e7346_s3126_r9364_p3565.txt'
    dsid: '311378'
    nevents: '100'
    datayears: 'data1516'
    lumi: '36.21465331125'
    xsecpb: '1.024'
    PRWFILE_mc16: '{readdir0}/PRW/pileup_mc16a_dsid311378_FS.root'
    outputfile: '{workdir}/output.root'
  data:
  - monosbb/data
- name: test_selection_mc16d
  spec: steps.yml#/selection_stage
  parameters:
    input_mc16: '{readdir0}/monosbb/mc16_13TeV.311378.MadGraphPy8EG_A14NNP23LO_monoSbb_zp500_dm200_dh50.deriv.DAOD_EXOT24.e7346_s3126_r10201_p3565.txt'
    dsid: '311378'
    nevents: '100'
    lumi: '43.5934630485'
    datayears: 'data17'
    xsecpb: '1.024'
    PRWFILE_mc16: '{readdir0}/PRW/pileup_mc16d_dsid311378_FS.root'
    outputfile: '{workdir}/output.root'
  data:
  - monosbb/data
.gitlab-ci.yml
recast:
  tags:
  - docker-privileged
  services:
  - docker:stable-dind
  stage: build
  image: "recast/recastatlas:v0.3.0"
  variables:
    GIT_SUBMODULE_STRATEGY: recursive
  script:
  # authenticate
  - eval "$(recast auth setup -a $RECAST_USER -a $RECAST_PASS -a $RECAST_TOKEN -a default)"
  - eval "$(recast auth write --basedir authdir)"

  # add my workflow
  - $(recast catalogue add $PWD)
  - recast catalogue ls
  - recast catalogue describe examples/monohbb
  - recast catalogue check examples/monohbb

  # run the workflow
  - recast run examples/monohbb --tag myrun
  artifacts:
    paths:
    - recast-myrun
    expire_in: 1 day
    when: always

W'(lnu) (Magnar Bugge)

Click for more details!
Building image for data processing (muon) Dockerfile
FROM gitlab-registry.cern.ch/atlas/athena/analysisbase:21.2.247
ADD . /code
WORKDIR /code
RUN sudo chown -R atlas /code && \
    source /home/atlas/release_setup.sh && \
    echo 'Starting build of muon channel analysis' && \
    mkdir build && \
    cd build && \
    cmake ../source && \
    make && \
    echo 'Done with build of muon channel analysis' && \
    echo "time stamp: $(date)" > /code/build.stamp && \
    git rev-parse --short HEAD > /code/build.revision
.gitlab-ci.yml
variables:
  # all submodules will be cloned recursively upon start of CI job
  GIT_SUBMODULE_STRATEGY: recursive
  GIT_SSL_NO_VERIFY: "true"

stages:
  - docker

build_image:
  stage: docker
  variables:
    TO: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
  tags:
    - docker-image-build
  script:
    - ignore
Building image for statistical analysis Dockerfile
FROM rootproject/root-cc7:latest
USER root
ADD . /code
WORKDIR /code
RUN chown -R root /code
RUN bash -c 'source /usr/local/bin/thisroot.sh && \
    echo "Starting build of statistical analysis" && \
    source build.sh && \
    echo "Done with build of statistical analysis" && \
    echo "time stamp: $(date)" > /code/build.stamp && \
    git rev-parse --short HEAD > /code/build.revision'
.gitlab-ci.yml (image-building part)
variables:
  # all submodules will be cloned recursively upon start of CI job
  GIT_SUBMODULE_STRATEGY: recursive
  GIT_SSL_NO_VERIFY: "true"

stages:
  - docker

build_image:
  stage: docker
  variables:
    TO: $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_SLUG
  tags:
    - docker-image-build
  script:
    - ignore
RECAST workflow setup
electronAnalysis.yml
process:
  process_type: interpolated-script-cmd
  script: |
    /recast_auth/getkrb.sh
    source /home/atlas/release_setup.sh
    echo '-------------------'
    echo 'Directory contents:'
    ls *
    echo '-------------------'
    echo 'Setting up environment'
    source build/x86*/setup.sh
    cd scripts
    echo '---------------------------'
    echo 'Copying pile-up config files'
    mkdir pileUpConfigs/
    cp {PU_config_mc16a} pileUpConfigs/PURW_mc16a.root
    cp {PU_config_mc16d} pileUpConfigs/PURW_mc16d.root
    cp {PU_config_mc16e} pileUpConfigs/PURW_mc16e.root
    echo '---------------------------'
    echo 'Resolving input datasets'
    echo '---------------------------'
    echo {inputdata_xrootd} > xrdInputPath.txt
    python resolveInputs.py
    echo '---------------------------'
    echo 'Start running analysis code'
    echo '---------------------------'
    ./run.sh $(cat inputFiles.txt) {NevtsToProcess}
    echo '---------------------------'
    echo 'Run directory contents:'
    ls *
    echo '---------------------------'
    echo 'Copying output'
    sudo cp FinalOutputs/limit_inputs.root {outputfile}
    sudo cp -r Outputs {persistentdir}
    sudo cp -r FinalOutputs {persistentdir}
    echo 'Script done'
publisher:
  publisher_type: interpolated-pub
  publish:
    selected_signal: '{outputfile}'
  glob: true
environment:
  environment_type: docker-encapsulated
  image: gitlab-registry.cern.ch/atlas-phys/exot/lpx/exot-2018-30-projects/exot-2018-30-electronanalysis
  imagetag: latest
  resources:
    - kerberos: true
muonAnalysis.yml
process:
  process_type: interpolated-script-cmd
  script: |
    /recast_auth/getkrb.sh
    source /home/atlas/release_setup.sh
    mkdir Run
    echo '-------------------'
    echo 'Directory contents:'
    ls *
    echo '-------------------'
    echo 'Setting up environment'
    source build/x86*/setup.sh
    cd Run
    echo '---------------------------'
    echo 'Copying pile-up config files'
    mkdir pileUpConfigs/
    cp {PU_config_mc16a} pileUpConfigs/PURW_mc16a.root
    cp {PU_config_mc16d} pileUpConfigs/PURW_mc16d.root
    cp {PU_config_mc16e} pileUpConfigs/PURW_mc16e.root
    echo '---------------------------'
    echo 'Start running analysis code'
    echo '---------------------------'
    runit submitDir {inputdata_xrootd} {NevtsToProcess}
    echo 'Done running analysis code'
    echo '---------------------------'
    echo 'Run directory contents:'
    ls *
    echo '---------------------------'
    echo 'Running post-processing to create limit input file'
    cp ../scripts/* ./
    python mergeMCfiles.py
    ln -s submitDir/*merge.root ./signalAnalysisOutput.root
    root -l -q -b makeLimitInputs_signalOnly.cpp
    echo '---------------------------'
    echo 'Run directory contents:'
    ls *
    echo '---------------------------'
    echo 'Copying output'
    sudo cp signal_templates_mu.root {outputfile}
    sudo cp -r submitDir {persistentdir}
    echo 'Script done'
publisher:
  publisher_type: interpolated-pub
  publish:
    selected_signal: '{outputfile}'
  glob: true
environment:
  environment_type: docker-encapsulated
  image: gitlab-registry.cern.ch/atlas-phys/exot/lpx/exot-2018-30-projects/exot-2018-30-muonanalysis
  imagetag: latest
  resources:
    - kerberos: true
statisticalAnalysis.yml
process:
  process_type: interpolated-script-cmd
  interpreter: bash
  script: |
    echo '-------------------'
    echo 'Directory contents:'
    ls *
    echo '-------------------'
    echo 'Copying inputs'
    cp {limit_inputs_el} {limit_inputs_mu} {signal_templates_el} {signal_templates_mu} trunk/input/
    echo '-------------------'
    echo 'Setting up environment'
    export BATINSTALLDIR=$PWD/BAT-1.0.0
    export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$BATINSTALLDIR/lib
    export PATH=$PATH:$BATINSTALLDIR/bin
    cd trunk
    echo '-------------------'
    echo 'trunk directory contents:'
    ls *
    echo '-------------------'
    cd limits
    echo 'Starting BAT run for the electron channel'
    ./templateLimitBATcombSys 100 0 0   0   0 1 1 0   1 0   -1 1 0 {maxSigmaSig}
    echo 'BAT run for the electron channel done'
    echo '-------------------'
    echo 'Starting BAT run for the muon channel'
    ./templateLimitBATcombSys 200 0 0   0   0 1 1 0   0 1   -1 1 0 {maxSigmaSig}
    echo 'BAT run for the muon channel done'
    echo '-------------------'
    echo 'Starting BAT run for the combined result'
    ./templateLimitBATcombSys 300 0 0   0   0 1 1 0   1 1   -1 1 0 {maxSigmaSig}
    echo 'BAT run for the combined result done'
    echo '-------------------'
    mkdir outputs
    mv run??? outputs/
    echo 'Output dir contents:'
    ls outputs
    echo '-------------------'
    echo 'Copying outputs'
    cp -r outputs {outputdir}
    cp -r ../input {persistentdir}
    echo 'Script done'
publisher:
  publisher_type: interpolated-pub
  publish:
    outputs: '{outputdir}'
environment:
  environment_type: docker-encapsulated
  image: gitlab-registry.cern.ch/atlas-phys/exot/lpx/exot-2018-30-projects/exot-2018-30-statisticalanalysis
  imagetag: latest
workflow.yml
stages:
- name: eventSelectionEl
  dependencies: [init]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      inputdata_xrootd: {step: init, output: inputdata_xrootd_el}
      outputfile: '{workdir}/signal_templates_el.root'
      persistentdir: '{workdir}'
      NevtsToProcess: {step: init, output: NevtsToProcess}
      PU_config_mc16a: {step: init, output: PU_config_el_mc16a}
      PU_config_mc16d: {step: init, output: PU_config_el_mc16d}
      PU_config_mc16e: {step: init, output: PU_config_el_mc16e}
    step: {$ref: 'specs/electronAnalysis.yml'}
- name: eventSelectionMu
  dependencies: [init]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      inputdata_xrootd: {step: init, output: inputdata_xrootd_mu}
      outputfile: '{workdir}/signal_templates_mu.root'
      persistentdir: '{workdir}'
      NevtsToProcess: {step: init, output: NevtsToProcess}
      PU_config_mc16a: {step: init, output: PU_config_mu_mc16a}
      PU_config_mc16d: {step: init, output: PU_config_mu_mc16d}
      PU_config_mc16e: {step: init, output: PU_config_mu_mc16e}
    step: {$ref: 'specs/muonAnalysis.yml'}
- name: statAnalysis
  dependencies: [eventSelectionEl,eventSelectionMu]
  scheduler:
    scheduler_type: singlestep-stage
    parameters:
      limit_inputs_el: {step: init, output: limit_inputs_el}
      limit_inputs_mu: {step: init, output: limit_inputs_mu}
      signal_templates_el: {step: eventSelectionEl, output: selected_signal}
      signal_templates_mu: {step: eventSelectionMu, output: selected_signal}
      maxSigmaSig: {step: init, output: maxSigmaSig}
      outputdir: '{workdir}/outputs'
      persistentdir: '{workdir}'
    step: {$ref: 'specs/statisticalAnalysis.yml'}
.gitlab-ci.yml
run_workflow:
  tags:
  - docker-privileged
  services:
  - docker:stable-dind
  stage: build
  image: "lukasheinrich/recastatlas"
  script:
  - eval $(recast auth write --basedir here)
  - echo $RECAST_AUTH_PASSWORD | docker login -u $RECAST_AUTH_USERNAME --password-stdin  gitlab-registry.cern.ch
  - yadage-run workdir specs/workflow.yml test/input.yml -d initdir=$PWD/test || echo "Workflow FAILED! Consult log files"
  artifacts:
    paths:
    - workdir
    expire_in: 1 day

Monojet (Danika MacDonell) In Progress

SUSY

HDBS


Last update: July 5, 2023