Skip to content

Commit

Permalink
update from parask changes
Browse files Browse the repository at this point in the history
  • Loading branch information
paraskevas committed Jan 29, 2025
1 parent 157c0ba commit 91fd982
Show file tree
Hide file tree
Showing 8 changed files with 7,093 additions and 25 deletions.
6,415 changes: 6,415 additions & 0 deletions bin/PCV_EPN.I14_ASH

Large diffs are not rendered by default.

237 changes: 237 additions & 0 deletions bin/config.template
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
##
## This is a configuration template file for ddproccess.sh
## All lines starting with the character '#' are considered
##+ comments and are *NOT* read.
##
## To set a variables, use the form :
## VAR_NAME=VAR_VALUE, or
## VAR_NAME = VAR_VALUE
## without the leading '##'
##
## Do not use the charactes '#' or ' ' in either the variable names or in
##+ in variable values
##

## set debuging mode
DEBUG_MODE = 1

## The year
YEAR = 2016

## The doy to proccess
DOY = 25

## The LOADGPS.setvar file
B_LOADGPS = ${HOME}/bern52/BERN52/GPS/EXE/LOADGPS.setvar

## The name of the campaign
CAMPAIGN = GREECE

## The solution id
SOLUTION_ID = DSO

## The directory where the products will be saved
## SAVE_DIR = ${HOME}/data/SAVEDISK

## Append suffix to saved products
# APND_SUFFIX = _test1

## json output file (OBSOLETE)
## JSON_OUT = greece.json

## The satellite system. You can choose between
##+ * "GPS",
##+ * "GLONASS"
##+ * "GALILEO"
##+ * "GPS/GAL"
##+ * "GPS/GLO"
##+ * "GAL/GLO"
SAT_SYS = GPS/GLO

## the path to 'tables' directory
TABLES_DIR = ${HOME}/tables

## Name of the analysis center for gps/gnss products
AC = COD

## Stations per cluster
STATIONS_PER_CLUSTER = 3

## Files (i.e. baselines) per cluster
FILES_PER_CLUSTER = 5

## Elevation angle in degrees (0, 90)
ELEVATION_ANGLE = 3

## The filename of the .PCF file to use
PCF_FILE = NTUA_DDP.PCF

## Use REPRO2 campaign products ('YES' or 'NO')
USE_REPRO2 = NO

## Use CODE's REPRO13 products ('YES' or 'NO')
COD_REPRO13 = YES

## Set this variable to 'YES' to skip RINEX downloading. Only the files
##+ found in the campaign's /RAW folder will be processed (either in lower or
##+ in upper case). The database will **not** be quried about the station/rinex
##+ information; station information (i.e. 'MARKER NAME' and 'MARKER NUMBER')
##+ is extracted off from the RINEX files.
## The RINEX files should comply with the naming convention ssssddd0.yy[o|O];
##+ **not** (UNIX) compressed, **not** Hatanaka.
SKIP_RNX_DOWNLOAD = NO

## Specify the a-priori coordinate file (.CRD) to be used. Only provide the
##+ filename, the extension is automatically set to '.CRD'. The search path
##+ for this file, is in ${TABLES_DIR}/crd/ directory.
## A-priori coordinates are only needed for the sites that:
## 1. are not recorded in the IGB08_R.[CRD|VEL] files, or
##+ is set to 'YES'
APRINF = REG213600
## USE_EPN_A_SSC = YES (OBSOLETE)

## Specify the station information file (.STA) to be used. Only provide the
##+ filename, the extension is automatically set to '.STA'. The search path
##+ for this file, is either ${TABLES_DIR}/sta/ or the campaign's /STA
##+ directory
STAINF = GREECE

## The filename of the atl (i.e atmospheric tidal loading) corrections file.
##+ If the values is left blank, then no atl file is going to be used
## If you do specify a file, do **not** use an extension; also the file
##+ should be placed either in the ${TABLES_DIR}/atl directory or in the
##+ campaign's /STA directory.
ATLINF =

## The filename of the blq (i.e tidal loading) corrections file.
##+ If the values is left blank, then no blq file is going to be used
## If you do specify a file, do **not** use an extension; also the file
##+ should be placed either in the ${TABLES_DIR}/blq directory or in the
##+ campaign's /STA directory.
BLQINF = NTUA

## Specify the name of the .FIX file, i.e. the name of the file where the
##+ reference stations are recorded. This file should be placed in either in
##+ the tables/fix directory or in the campaign's /STA directory.
FIXINF = IGS14

## Specify the master (i.e. reference) coordinate and velocity file. For most
##+ cases this should be the current IGS realization (e.g IGB08). In this last
##+ case, the files IGB08_R.[CRD|VEL] will be used to extrapolate the coordinates
##+ of the reference sites to the current epoch. This files are searched for in
##+ the ~tables/crd directory or in the campaign-specific STA directory.
REFINF = IGS14

## PSD corrections for ITRF2014 or any other ref frame
REFPSD = IGS14

## There various options here:
## 1. User has a PCV file ready to be used within the BPE; in this case, just
##+ specify the 'PCVINF'/'PCVEXT' variables. 'PCVINF' should be filled with
##+ the filename of the PCV file to be used. The extension of the file is
##+ assumed be 'PCVEXT'. The rundd program will search for the file in the
##+ locations:
##+ - $X/GEN, and
##+ - $TABLES_DIR/pcv
PCVINF = PCV_EPN
ATXINF =

## Extension to be used for:
##+ 1. the SATELLIT file (V_SATINF in the PCF file)
##+ 2. the PCV file (V_PCVINF in the PCF file)
PCVEXT = I14

## Observation selection (.SEL) file to be used for importing (any) Rinex v3.x
##+ files. Do not provide an extension, '.SEL' is appended automatically.
## Search folders are (in this order):
## 1. ${X}/GEN
## 2. ${TABLES_DIR}/sel
OBSSEL = OBS

## Set this option to 'YES' or 'NO' to use EUREF's weekly exclusion list.
## This exclusion list will be downloaded from the web and used to mark
##+ out (exclude from procssesing) the listed stations.
USE_EPN_EXCLUDE_LIST = YES

## Optionaly, you can specify a file with a list of stations (one 'MARKER DOME'
##+ per line) to be excluded from the processing.
EXCLUSION_LIST = /home/bpe/tables/sta/GREECE.EXC

## If you need to download forecast VMF1 grid files, you will need credentials
##+ to access the TUVienna server
TUWIEN_VMF1_USER =
TUWIEN_VMF1_PASS =

## Specify where the saved (output) files are going to be placed. If you want
##+ them to be saved at this HOST, then only set the 'SAVE_DIR_DIR' variable.
##+ Else, you also need to specify a HOST ('SAVE_DIR_HOST') and a username
##+ ('SAVE_DIR_URN') and password ('SAVE_DIR_PSS') --if needed.
## Note that the program 'ftp' will be used to do the transfer (if needed).
##
## Warning: The saved products will actually be placed **not** in the folder
##+ 'SAVE_DIR_DIR' but in ${SAVE_DIR_DIR}/YYYY/DDD
##
SAVE_DIR_HOST = 147.102.106.135
SAVE_DIR_DIR = pub/gnss/products
SAVE_DIR_URN = mitsos
SAVE_DIR_PSS = mitsos1985

## This option specifies the name of the directory where the products will
##+ be saved, i.e. how the date is resolved into directories. You can either
##+ set the save path to ${SAVE_DIR_DIR}/YYYY/DDD by setting this option to
##+ 'YDOY' or to ${SAVE_DIR_DIR}/WWWW by setting the option to 'GPSW'
## If not set, the default option is 'YDOY'
SAVE_DIR_FORMAT = GPSW

## This option specifies the filename of the product files, i.e. how the
##+ date is resolved into the filename's string. The default format for
##+ product filenames is 'cccyyddd0.*' where 'ccc' is the solution id, 'yy'
##+ is the year and 'ddd' is the day of year. This corresponds to the option
##+ 'YDOY'.
## In case you want to save the product using the gps week/day of week, then
##+ set this option to 'GPSW', in which case the file is saved as 'cccwwwwd.*'
## If not set, the default option is 'YDOY'
SAVE_PRD_FORMAT = GPSW

## The following information affect the database with which the processing
##+ will interoperate. This database will be used for:
## * RINEX file downloading and validation (rnxdwnl.py, validate_ntwrnx.py)
## * Add new products (optional)
## If you do not want to update the database with the (saved) solution files,
##+ set the variable 'UPD_DB_PROD' to 'NO'.
GNSS_DB_HOST = "147.102.110.73"
GNSS_DB_USER = "paraskevas"
GNSS_DB_PASS = "paraskey;aw"
GNSS_DB_NAME = "procsta"
UPD_DB_PROD = YES

## Reference stations rejection criteria. Set the max allowed offset for
##+ reference stations, per component. If, for any station this limit is
##+ reached, it will not be included in the refence stations (i.e. the list
##+ of stations used to realize the frame).
## If unset, the limits default to 10mm for North and East and 30mm for Up
##+ components.
## Units are millimeters.
REF_NLIMIT = 10
REF_ELIMIT = 10
REF_ULIMIT = 30

##
##
UPDATE_STA_TS = YES
TS_DESCRIPTION = "test"
PATH_TO_TS_FILES = /home/bpe/data/time-series

## Set this variable to 'YES' if you do not want to delete the files
##+ created at the campaign's directories during the ddprocess run.
SKIP_REMOVE = NO

## Send mail with a short report to the following recipients; more than one
##+ recipients can be set using a comma-seperated string.
## If no mail is to be sent, just comment the 'SEND_MAIL_TO' line or leave
##+ the variable empty
SEND_MAIL_TO = [email protected],[email protected]
MAIL_ACCOUNT_USERNAME = "[email protected]"
MAIL_ACCOUNT_PASSWORD = "gevdais;ia"

## END OF CONFIG FILE
59 changes: 44 additions & 15 deletions bin/getdcb.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
import pybern.products.fileutils.decompress as dc
import pybern.products.fileutils.compress as cc
from pybern.products.fileutils.cmpvar import is_compressed, find_os_compression_type

import time
## If only the formatter_class could be:
##+ argparse.RawTextHelpFormatter|ArgumentDefaultsHelpFormatter ....
## Seems to work with multiple inheritance!
Expand Down Expand Up @@ -129,19 +129,48 @@ class myFormatter(argparse.ArgumentDefaultsHelpFormatter,
if args.save_dir:
input_dct['save_dir'] = args.save_dir

## try downloading the dcb file; if we fail do not throw, print the error
## message and return an intger > 0 to the shell.
status = 10
try:
status, remote, local = get_dcb(**input_dct)
except Exception as e:
verboseprint("{:}".format(str(e)), file=sys.stderr)
status = 50
if not status:
print('Downloaded DCB Information File: {:} as {:}'.format(
remote, local))
sys.exit(0)
else:
print('[ERROR] Failed to download DCB product', file=sys.stderr)
# ## try downloading the dcb file; if we fail do not throw, print the error
# ## message and return an intger > 0 to the shell.
# status = 10
# try:
# status, remote, local = get_dcb(**input_dct)
# except Exception as e:
# verboseprint("{:}".format(str(e)), file=sys.stderr)
# status = 50
# if not status:
# print('Downloaded DCB Information File: {:} as {:}'.format(
# remote, local))
# sys.exit(0)
# else:
# print('[ERROR] Failed to download DCB product', file=sys.stderr)
#
# sys.exit(status)

## ---- Change the function above to try 10 times for DCB file
## Maximum retries and sleep duration in seconds
MAX_RETRIES = 10
SLEEP_DURATION = 300

def verboseprint(*args, **kwargs):
print(*args, **kwargs)

status = 10 # Default status indicating failure
for attempt in range(1, MAX_RETRIES + 1):
try:
verboseprint(f"Attempt {attempt}/{MAX_RETRIES} to download DCB product.")
status, remote, local = get_dcb(**input_dct) # Your download function
except Exception as e:
verboseprint(f"Error: {str(e)}", file=sys.stderr)
status = 50 # Error status code
else:
if not status:
print(f"Downloaded DCB Information File: {remote} as {local}")
sys.exit(0)

if attempt < MAX_RETRIES:
verboseprint(f"Retrying in {SLEEP_DURATION} seconds...", file=sys.stderr)
time.sleep(SLEEP_DURATION)

# If all retries fail
print("[ERROR] Failed to download DCB product after multiple attempts", file=sys.stderr)
sys.exit(status)
2 changes: 1 addition & 1 deletion bin/rundd.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,7 +235,7 @@ def prepare_products(dt, credentials_file, product_dict={}, product_dir=None, ve
ptypes = ['final', 'final-rapid', 'early-rapid', 'ultra-rapid', 'current']
for count,erptype in enumerate(ptypes):
try:
status, remote, local = get_erp(type=erptype, pydt=dt, span='daily', save_dir=product_dir, code_dir='bswuser52')
status, remote, local = get_erp(type=erptype, pydt=dt, span='weekly', save_dir=product_dir, code_dir='bswuser52')
verboseprint('[DEBUG] Downloaded erp file {:} of type {:} ({:})'.format(local, erptype, status))
product_dict['erp'] = {'remote': remote, 'local': local, 'type': erptype}
break
Expand Down
Loading

0 comments on commit 91fd982

Please sign in to comment.