kryox64 commited on
Commit
dbae0ca
1 Parent(s): 60bcd75

Remove EMARSI param

Browse files

Signed-off-by: Aadhitya A <[email protected]>

Files changed (1) hide show
  1. app.py +6 -16
app.py CHANGED
@@ -106,8 +106,8 @@ def objective(trial, X_train, y_train, X_test, y_test):
106
  def modelCNNLSTM(csv_file, prax):
107
  # Read the data
108
  df = csv_file
109
- temp_data = df.iloc[0:3000, 1:24]
110
- trek = df.iloc[3000:,1:24]
111
  #print(temp_data)
112
  data = temp_data
113
  sc = MinMaxScaler()
@@ -217,7 +217,7 @@ def modelCNNLSTM(csv_file, prax):
217
  #print(f'> Loss: {np.mean(loss_per_fold)}')
218
  #print('------------------------------------------------------------------------')
219
 
220
- trek = df.iloc[0:len(df), 1:24]
221
  Y = trek[0:len(trek)]
222
  YP = trek[1:len(trek)]
223
  Y1 = Y['Close']
@@ -474,7 +474,7 @@ def modelTFT(csv_file, prax):
474
  time_varying_known_reals=["time_idx"],
475
  time_varying_unknown_categoricals=[],
476
  time_varying_unknown_reals=[
477
- 'Open','High','Low','Close','OI','RSI14','RSI44','HHRSI','EMARSI','Rsi Weekly','LLCHHV','white','Vap44','Vap14','BV11','SV11','Ema5','Ema20','Ema50','Ema200'
478
  ],
479
  target_normalizer=GroupNormalizer(
480
  groups=['Ticker'], transformation="softplus"
@@ -507,11 +507,6 @@ def modelTFT(csv_file, prax):
507
 
508
  print(f"Median loss for naive prediction on validation: {sm.loss(actuals, baseline_predictions).mean(axis = 1).median().item()}")
509
 
510
- """<a id ="4"></a><h3 style="background:#0554f2; border:0; border-radius: 4px; color:#f5f6f7">Training and Evaluation</h3>
511
-
512
- I'm not particularly interested in performances, rather in showing how easy it is to use the library.
513
- """
514
-
515
  early_stop_callback = EarlyStopping(monitor="train_loss", min_delta=1e-2, patience=PATIENCE, verbose=False, mode="min")
516
  lr_logger = LearningRateMonitor() # log the learning rate
517
  logger = TensorBoardLogger("lightning_logs") # logging results to a tensorboard
@@ -711,7 +706,7 @@ def modelTFT_OpenGap(csv_file, prax):
711
  time_varying_known_reals=["time_idx"],
712
  time_varying_unknown_categoricals=[],
713
  time_varying_unknown_reals=[
714
- 'Open','High','Low','Close','OI','RSI14','RSI44','HHRSI','EMARSI','Rsi Weekly','LLCHHV','white','Vap44','Vap14','BV11','SV11','Ema5','Ema20','Ema50','Ema200', 'O-C'
715
  ],
716
  target_normalizer=GroupNormalizer(
717
  groups=['Ticker'], transformation="softplus"
@@ -744,11 +739,6 @@ def modelTFT_OpenGap(csv_file, prax):
744
 
745
  print(f"Median loss for naive prediction on validation: {sm.loss(actuals, baseline_predictions).mean(axis = 1).median().item()}")
746
 
747
- """<a id ="4"></a><h3 style="background:#0554f2; border:0; border-radius: 4px; color:#f5f6f7">Training and Evaluation</h3>
748
-
749
- I'm not particularly interested in performances, rather in showing how easy it is to use the library.
750
- """
751
-
752
  early_stop_callback = EarlyStopping(monitor="train_loss", min_delta=1e-2, patience=PATIENCE, verbose=False, mode="min")
753
  lr_logger = LearningRateMonitor() # log the learning rate
754
  logger = TensorBoardLogger("lightning_logs") # logging results to a tensorboard
@@ -911,7 +901,7 @@ def main(files):
911
  prax[0] = df['Ticker'][0]
912
  prax[1] = df['Close'][len(df)-1]
913
  print('------------------')
914
- # df = df.drop(['Volume'], axis=1)
915
  #df['Date/Time'] = pd.to_datetime(df['Date/Time'])
916
  for i in range(len(df)):
917
  x = guess_date(df['Date/Time'][i])
 
106
  def modelCNNLSTM(csv_file, prax):
107
  # Read the data
108
  df = csv_file
109
+ temp_data = df.iloc[0:3000, 1:23]
110
+ trek = df.iloc[3000:,1:23]
111
  #print(temp_data)
112
  data = temp_data
113
  sc = MinMaxScaler()
 
217
  #print(f'> Loss: {np.mean(loss_per_fold)}')
218
  #print('------------------------------------------------------------------------')
219
 
220
+ trek = df.iloc[0:len(df), 1:23]
221
  Y = trek[0:len(trek)]
222
  YP = trek[1:len(trek)]
223
  Y1 = Y['Close']
 
474
  time_varying_known_reals=["time_idx"],
475
  time_varying_unknown_categoricals=[],
476
  time_varying_unknown_reals=[
477
+ 'Open','High','Low','Close','OI','RSI14','RSI44','HHRSI','Rsi Weekly','LLCHHV','white','Vap44','Vap14','BV11','SV11','Ema5','Ema20','Ema50','Ema200'
478
  ],
479
  target_normalizer=GroupNormalizer(
480
  groups=['Ticker'], transformation="softplus"
 
507
 
508
  print(f"Median loss for naive prediction on validation: {sm.loss(actuals, baseline_predictions).mean(axis = 1).median().item()}")
509
 
 
 
 
 
 
510
  early_stop_callback = EarlyStopping(monitor="train_loss", min_delta=1e-2, patience=PATIENCE, verbose=False, mode="min")
511
  lr_logger = LearningRateMonitor() # log the learning rate
512
  logger = TensorBoardLogger("lightning_logs") # logging results to a tensorboard
 
706
  time_varying_known_reals=["time_idx"],
707
  time_varying_unknown_categoricals=[],
708
  time_varying_unknown_reals=[
709
+ 'Open','High','Low','Close','OI','RSI14','RSI44','HHRSI','Rsi Weekly','LLCHHV','white','Vap44','Vap14','BV11','SV11','Ema5','Ema20','Ema50','Ema200', 'O-C'
710
  ],
711
  target_normalizer=GroupNormalizer(
712
  groups=['Ticker'], transformation="softplus"
 
739
 
740
  print(f"Median loss for naive prediction on validation: {sm.loss(actuals, baseline_predictions).mean(axis = 1).median().item()}")
741
 
 
 
 
 
 
742
  early_stop_callback = EarlyStopping(monitor="train_loss", min_delta=1e-2, patience=PATIENCE, verbose=False, mode="min")
743
  lr_logger = LearningRateMonitor() # log the learning rate
744
  logger = TensorBoardLogger("lightning_logs") # logging results to a tensorboard
 
901
  prax[0] = df['Ticker'][0]
902
  prax[1] = df['Close'][len(df)-1]
903
  print('------------------')
904
+ df = df.drop(['EMARSI'], axis=1)
905
  #df['Date/Time'] = pd.to_datetime(df['Date/Time'])
906
  for i in range(len(df)):
907
  x = guess_date(df['Date/Time'][i])