Annonce
Réduire
Aucune annonce.
Ads
Réduire
Matlab2017a vers Ninjatrader8 : Erreur
Réduire
X
 
  • Filtre
  • Heure
  • Afficher
Tout nettoyer
nouveaux messages

  • Matlab2017a vers Ninjatrader8 : Erreur

    Salut, je m'appel greg,
    J'essaye de connecter Matlab avec Ninjatrader dans une strategy.
    J'ai ajouter la référence "Interop.MLApp.dll" dans ninjatrader, mais j'ai toujours une erreur qui dit :"Error on calling 'OnBarUpdate' method on bar 19: The type innitializer for 'Globals' Threw an exception."
    J'ai essayé plein de chose différente, mais j'ai toujours cette erreur.

    Je suis sur Matlab2017a et Ninjatrader 8.

    Voici le code de ma stratégie :

    Code:
    #region Using declarations
    using System;
    using System.Collections.Generic;
    using System.ComponentModel;
    using System.ComponentModel.DataAnnotations;
    using System.Linq;
    using System.Text;
    using System.Threading.Tasks;
    using System.Windows;
    using System.Windows.Input;
    using System.Windows.Media;
    using System.Xml.Serialization;
    using NinjaTrader.Cbi;
    using NinjaTrader.Gui;
    using NinjaTrader.Gui.Chart;
    using NinjaTrader.Gui.SuperDom;
    using NinjaTrader.Gui.Tools;
    using NinjaTrader.Data;
    using NinjaTrader.NinjaScript;
    using NinjaTrader.Core.FloatingPoint;
    using NinjaTrader.NinjaScript.Indicators;
    using NinjaTrader.NinjaScript.DrawingTools;
    using System.IO;
    using MLApp;
    #endregion
    
    //This namespace holds Strategies in this folder and is required. Do not change it. 
    namespace NinjaTrader.NinjaScript.Strategies
    {
    	public class MyCustomStrategy2 : Strategy
    	{
    		protected override void OnStateChange()
    		{
    			if (State == State.SetDefaults)
    			{
    				Description									= @"Enter the description for your new custom Strategy here.";
    				Name										= "MyCustomStrategy2";
    				Calculate									= Calculate.OnBarClose;
    				EntriesPerDirection							= 1;
    				EntryHandling								= EntryHandling.AllEntries;
    				IsExitOnSessionCloseStrategy				= true;
    				ExitOnSessionCloseSeconds					= 30;
    				IsFillLimitOnTouch							= false;
    				MaximumBarsLookBack							= MaximumBarsLookBack.TwoHundredFiftySix;
    				OrderFillResolution							= OrderFillResolution.Standard;
    				Slippage									= 0;
    				StartBehavior								= StartBehavior.ImmediatelySubmitSynchronizeAccount;
    				TimeInForce									= TimeInForce.Gtc;
    				TraceOrders									= false;
    				RealtimeErrorHandling						= RealtimeErrorHandling.StopCancelClose;
    				StopTargetHandling							= StopTargetHandling.PerEntryExecution;
    				BarsRequiredToTrade							= 20;
    				// Disable this property for performance gains in Strategy Analyzer optimizations
    				// See the Help Guide for additional information
    				IsInstantiatedOnEachOptimizationIteration	= true;
    			}
    			else if (State == State.Configure)
    			{
    			}
    		}
    		
    		public static class Globals
    		{
    			public static MLApp.MLApp matlab = new MLApp.MLApp();
    		}
    
    		protected override void OnBarUpdate()
    		{
    			//Add your custom strategy logic here.
    			//PrintTo = PrintTo.OutputTab1; 
    			//Print("Not real time");
    			if (State == State.Realtime)
    			{
    				PrintTo = PrintTo.OutputTab1; 
    				Print("updated2.8");
    				string openfile = @"C:\Users\Manthan1412\Documents\MATLAB\open.csv";
    				string closefile = @"C:\Users\Manthan1412\Documents\MATLAB\close.csv";
    				//string logprefix = Time[0].Month.ToString("00") + "/" + Time[0].Day.ToString("00") + "/" + Time[0].Year + ", " + Time[0].Hour.ToString("00") + ", " + Time[0].Minute.ToString("00") + ", ";
    			
    				if(File.Exists(openfile))
    				{
    					File.Delete(openfile);
    				}
    				if (File.Exists(closefile))
    				{
    					File.Delete(closefile);
    				}
    				StreamWriter openLog;
    				StreamWriter closeLog;
    				openLog = File.AppendText(openfile);
    				closeLog = File.AppendText(closefile);
    				//log.WriteLine(logprefix + Open[0].ToString("0.00") + ", "  + Close[0].ToString("0.00"));
                	//log.Close();
    				for(int barIndex = 0; barIndex <= ChartBars.Count; barIndex++)
    				{
    					// get the volume value at the selected bar index value
    					long volumeValue = Bars.GetVolume(barIndex);
    					double openValue = Bars.GetOpen(barIndex);
    					double closeValue = Bars.GetClose(barIndex);
    					PrintTo = PrintTo.OutputTab1; 
    					Print("Bar #" + barIndex + " Open: " + openValue + " Close: " + closeValue + " Volume: " + volumeValue);
    					openLog.WriteLine(openValue.ToString("0.00") + "," + volumeValue);
    					closeLog.WriteLine(closeValue.ToString("0.00"));
    				}
    				openLog.Close();
    				closeLog.Close();
    				Globals.matlab.Execute(@"cd C:\Users\Manthan1412\Documents\MATLAB");
    
    	            object result = null;
    				
    				PrintTo = PrintTo.OutputTab2;
    				Print("Matlab function is called");
    
    	            Globals.matlab.Feval("GoldzFunctionScript", 0, out result);
    
    	            //object[] res = result as object[];
    				PrintTo = PrintTo.OutputTab2; 
    				Print("call completed");
    			}
    			
    		}
    	}
    }
    Et le script matlab que j'appel dans ma stratégie :
    Code:
    % Solve an Autoregression Problem with External Input with a NARX Neural Network
    % Script generated by Neural Time Series app
    % Created 11-Oct-2017 07:30:12
    %
    % This script assumes these variables are defined:
    %
    %   Input - input time series.
    %   Output - feedback time series.
    
    X = tonndata(Input,false,false);
    T = tonndata(Output,false,false);
    
    % Choose a Training Function
    % For a list of all training functions type: help nntrain
    % 'trainlm' is usually fastest.
    % 'trainbr' takes longer but may be better for challenging problems.
    % 'trainscg' uses less memory. Suitable in low memory situations.
    trainFcn = 'trainbr';  % Bayesian Regularization backpropagation.
    
    % Create a Nonlinear Autoregressive Network with External Input
    inputDelays = 1:2;
    feedbackDelays = 1:2;
    hiddenLayerSize = 5;
    net = narxnet(inputDelays,feedbackDelays,hiddenLayerSize,'open',trainFcn);
    
    % Choose Input and Feedback Pre/Post-Processing Functions
    % Settings for feedback input are automatically applied to feedback output
    % For a list of all processing functions type: help nnprocess
    % Customize input parameters at: net.inputs{i}.processParam
    % Customize output parameters at: net.outputs{i}.processParam
    net.inputs{1}.processFcns = {'removeconstantrows','mapminmax'};
    net.inputs{2}.processFcns = {'removeconstantrows','mapminmax'};
    
    % Prepare the Data for Training and Simulation
    % The function PREPARETS prepares timeseries data for a particular network,
    % shifting time by the minimum amount to fill input states and layer
    % states. Using PREPARETS allows you to keep your original time series data
    % unchanged, while easily customizing it for networks with differing
    % numbers of delays, with open loop or closed loop feedback modes.
    [x,xi,ai,t] = preparets(net,X,{},T);
    
    % Setup Division of Data for Training, Validation, Testing
    % For a list of all data division functions type: help nndivide
    net.divideFcn = 'dividerand';  % Divide data randomly
    net.divideMode = 'time';  % Divide up every sample
    net.divideParam.trainRatio = 70/100;
    net.divideParam.valRatio = 15/100;
    net.divideParam.testRatio = 15/100;
    
    % Choose a Performance Function
    % For a list of all performance functions type: help nnperformance
    net.performFcn = 'mse';  % Mean Squared Error
    
    % Choose Plot Functions
    % For a list of all plot functions type: help nnplot
    net.plotFcns = {'plotperform','plottrainstate', 'ploterrhist', ...
        'plotregression', 'plotresponse', 'ploterrcorr', 'plotinerrcorr'};
    
    % Train the Network
    [net,tr] = train(net,x,t,xi,ai);
    
    % Test the Network
    y = net(x,xi,ai);
    e = gsubtract(t,y);
    performance = perform(net,t,y)
    
    % Recalculate Training, Validation and Test Performance
    trainTargets = gmultiply(t,tr.trainMask);
    valTargets = gmultiply(t,tr.valMask);
    testTargets = gmultiply(t,tr.testMask);
    trainPerformance = perform(net,trainTargets,y)
    valPerformance = perform(net,valTargets,y)
    testPerformance = perform(net,testTargets,y)
    
    % View the Network
    view(net)
    
    % Plots
    % Uncomment these lines to enable various plots.
    %figure, plotperform(tr)
    %figure, plottrainstate(tr)
    %figure, ploterrhist(e)
    %figure, plotregression(t,y)
    %figure, plotresponse(t,y)
    %figure, ploterrcorr(e)
    %figure, plotinerrcorr(x,e)
    
    % Closed Loop Network
    % Use this network to do multi-step prediction.
    % The function CLOSELOOP replaces the feedback input with a direct
    % connection from the outout layer.
    netc = closeloop(net);
    netc.name = [net.name ' - Closed Loop'];
    view(netc)
    [xc,xic,aic,tc] = preparets(netc,X,{},T);
    yc = netc(xc,xic,aic);
    closedLoopPerformance = perform(net,tc,yc)
    
    % Multi-step Prediction
    % Sometimes it is useful to simulate a network in open-loop form for as
    % long as there is known output data, and then switch to closed-loop form
    % to perform multistep prediction while providing only the external input.
    % Here all but 5 timesteps of the input series and target series are used
    % to simulate the network in open-loop form, taking advantage of the higher
    % accuracy that providing the target series produces:
    numTimesteps = size(x,2);
    knownOutputTimesteps = 1:(numTimesteps-5);
    predictOutputTimesteps = (numTimesteps-4):numTimesteps;
    X1 = X(:,knownOutputTimesteps);
    T1 = T(:,knownOutputTimesteps);
    [x1,xio,aio] = preparets(net,X1,{},T1);
    [y1,xfo,afo] = net(x1,xio,aio);
    % Next the the network and its final states will be converted to
    % closed-loop form to make five predictions with only the five inputs
    % provided.
    x2 = X(1,predictOutputTimesteps);
    [netc,xic,aic] = closeloop(net,xfo,afo);
    [y2,xfc,afc] = netc(x2,xic,aic);
    multiStepPerformance = perform(net,T(1,predictOutputTimesteps),y2)
    % Alternate predictions can be made for different values of x2, or further
    % predictions can be made by continuing simulation with additional external
    % inputs and the last closed-loop states xfc and afc.
    
    % Step-Ahead Prediction Network
    % For some applications it helps to get the prediction a timestep early.
    % The original network returns predicted y(t+1) at the same time it is
    % given y(t+1). For some applications such as decision making, it would
    % help to have predicted y(t+1) once y(t) is available, but before the
    % actual y(t+1) occurs. The network can be made to return its output a
    % timestep early by removing one delay so that its minimal tap delay is now
    % 0 instead of 1. The new network returns the same outputs as the original
    % network, but outputs are shifted left one timestep.
    nets = removedelay(net);
    nets.name = [net.name ' - Predict One Step Ahead'];
    view(nets)
    [xs,xis,ais,ts] = preparets(nets,X,{},T);
    ys = nets(xs,xis,ais);
    stepAheadPerformance = perform(nets,ts,ys)
    
    % Deployment
    % Change the (false) values to (true) to enable the following code blocks.
    % See the help for each generation function for more information.
    if (false)
        % Generate MATLAB function for neural network for application
        % deployment in MATLAB scripts or with MATLAB Compiler and Builder
        % tools, or simply to examine the calculations your trained neural
        % network performs.
        genFunction(net,'myNeuralNetworkFunction');
        y = myNeuralNetworkFunction(x,xi,ai);
    end
    if (false)
        % Generate a matrix-only MATLAB function for neural network code
        % generation with MATLAB Coder tools.
        genFunction(net,'myNeuralNetworkFunction','MatrixOnly','yes');
        x1 = cell2mat(x(1,:));
        x2 = cell2mat(x(2,:));
        xi1 = cell2mat(xi(1,:));
        xi2 = cell2mat(xi(2,:));
        y = myNeuralNetworkFunction(x1,x2,xi1,xi2);
    end
    if (false)
        % Generate a Simulink diagram for simulation or deployment with.
        % Simulink Coder tools.
        gensim(net);
    end
    Qu'est ce que j'ai mal fait?

  • #2
    Bonjour

    as tu essayé une recherche sous Google , j'ai trouvé ça si ça peut t'aider !

    Perso j'utilise Ninjatrader8 sur le FOREX , et sur le calcul des Moyennes Mobiles , il n'y a dans le Set Up que l'option Close , il manque l'Open , High et Low , je les ai contacté , mais il faut joindre le support . Pour moi c'est fondamental .... mais bon !



    si tu as une idée , je suis preneur ,un script ...... ou ???? ...... j'ai besoin d'afficher plein de choses .....en dessous de la minute ....!

    Pour toi j'ai ça

    Commentaire


    • #3
      Merci de ta participation Passetan,
      En fait, j'utilise Ninjatrader 8 en version s’essaie pour le moment, je sais pas si le support va prendre en compte mon problème vu que je ne suis pas client.
      Je vais essayer quand même on sait jamais.
      Par contre j'ai pas trop compris ce que tu veux faire, tu veux afficher chaque tick?
      J'avais fait un petit script il y a quelque temps, ça enregistrait tout les ticks dans un fichier csv, je sais pas trop si c'est ce que tu cherche...

      Commentaire

      Chargement...
      X