Skip to content
Snippets Groups Projects
Commit bca04b82 authored by Jake Taylor's avatar Jake Taylor :lips:
Browse files

Merge branch 'jonah/wallet-country-list' into 'master'

Points should maintain an external list of wallet to country mappings

See merge request !10
parents e8c0c60f 6ffaf193
No related branches found
No related tags found
1 merge request!10Points should maintain an external list of wallet to country mappings
......@@ -35,7 +35,7 @@ raw_points_log = ''
#################
def main():
global xxdot_url
global xxdot_url, raw_points_log
# Process input variables and program arguments
args = get_args()
......@@ -47,6 +47,7 @@ def main():
db_name = args['db']
db_user = args['user']
db_pass = args['pass']
raw_points_log = args['raw_points_log']
# Read Mnemonic from file and create keypair
with open(wallet_path) as file:
......@@ -178,7 +179,7 @@ def round_point_computation(point_info, round_info, active_nodes):
node_multipliers[node_id] = bin_multipliers[node_bin] # Assign multiplier to node
node_wallets[node_id] = wallet_address # Add wallet association for node id
wallet_points[wallet_address] = 0
raw_points_dict[wallet_address] = 0
raw_points_dict[wallet_address] = [0, node_country]
# Calculate point information for each round
for row in round_info:
......@@ -202,7 +203,7 @@ def round_point_computation(point_info, round_info, active_nodes):
wallet = node_wallets.get(bytes(node_id))
if wallet:
wallet_points[wallet] += fail_points
raw_points_dict[wallet] += fail_points
raw_points_dict[wallet][0] += fail_points
else:
# Handle point multipliers
# NOTE: Weirdness can result here from nodes going offline between eras. Should be reviewed.
......@@ -222,7 +223,7 @@ def round_point_computation(point_info, round_info, active_nodes):
wallet = node_wallets.get(bytes(node_id))
if wallet:
wallet_points[wallet] += points
raw_points_dict[wallet] += success_points
raw_points_dict[wallet][0] += success_points
else:
log.warning(f"no wallet found for nid {bytes(node_id)}")
else:
......@@ -412,7 +413,6 @@ def get_args():
get_args controls the argparse usage for the script. It sets up and parses
arguments and returns them in dict format
"""
global raw_points_log
parser = argparse.ArgumentParser(description="Options for point assignment script")
parser.add_argument("--verbose", action="store_true",
help="Print debug logs", default=False)
......@@ -447,7 +447,6 @@ def get_args():
level=log.DEBUG if args['verbose'] else log.INFO,
datefmt='%d-%b-%y %H:%M:%S',
filename=args["log"])
raw_points_log = args['raw_points_log']
return args
......
......@@ -37,6 +37,12 @@ def get_args():
help="Path to file to upload")
parser.add_argument("--remote-path", type=str, required=True,
help="Remote location to place the file")
parser.add_argument("--upload-frequency", type=int, required=False,
help="Frequency of file uploads (in seconds)",
default=60)
parser.add_argument("--truncate-size", type=int, required=False,
help="Maximum size of file before it is truncated (in MB)",
default=0)
args = vars(parser.parse_args())
log.basicConfig(format='[%(levelname)s] %(asctime)s: %(message)s',
......@@ -99,7 +105,11 @@ def main():
args = get_args()
log.info("Running with configuration: {}".format(args))
upload_frequency = 60
# Size of one megabyte in bytes
MEGABYTE = 1048576
upload_frequency = args["upload_frequency"]
truncate_size = args["truncate_size"] * MEGABYTE
s3_bucket_name = args["s3_bucket"]
s3_access_key_id = args["s3_access_key"]
s3_access_key_secret = args["s3_secret"]
......@@ -112,14 +122,34 @@ def main():
while not os.path.exists(local_path):
time.sleep(upload_frequency)
# Determine whether file truncation should be performed
is_truncate_enabled = truncate_size != 0
log.info(f"Beginning file upload every {upload_frequency} seconds. Truncation is set to {is_truncate_enabled}")
# Keep track of the current file hash
current_hash = ""
# Keep track of a unique file timestamp if truncation is enabled
file_timestamp = f"-{int(time.time())}" if is_truncate_enabled else ""
while True:
try:
new_hash = get_hash(local_path)
log.debug(f"Current Hash: {current_hash}, New Hash: {new_hash}")
# If file has changed, upload the new file
if current_hash != new_hash:
upload(local_path, remote_path, s3_bucket_name,
upload(local_path, f"{remote_path}{file_timestamp}", s3_bucket_name,
s3_bucket_region, s3_access_key_id, s3_access_key_secret)
if is_truncate_enabled:
# Check if the log file is too large
file_size = os.path.getsize(local_path)
log.debug(f"Current File Size: {file_size}")
if file_size > truncate_size:
# Truncate the file
log.info("File has reached maximum size. Clearing...")
with open(local_path, "w+"):
log.info(f"File has been truncated. New Size: {os.path.getsize(local_path)}")
file_timestamp = f"-{int(time.time())}"
except Exception as e:
log.error(f"Unhandled exception occurred: {e}", exc_info=True)
time.sleep(upload_frequency)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment