Search Results

Search found 5279 results on 212 pages for 'execution counter'.

Page 12/212 | < Previous Page | 8 9 10 11 12 13 14 15 16 17 18 19  | Next Page >

  • Recalculate Counter Cache of 120k Records [Rails / ActiveRecord]

    - by Sebastian
    The following situation: I have a poi model, which has many pictures (1:n). I want to recalculate the counter_cache column, because the values are inconsistent. I've tried to iterate within ruby over each record, but this takes much too long and quits sometimes with some "segmentation fault" bugs. So i wonder, if its possible to do this with a raw sql query?

    Read the article

  • c# Counter requires 2 button clicks to update

    - by marko.ivanovski.nz
    Hi, I have a problem that has been bugging me all day. In my code I have the following: private int rowCount { get { return (int)ViewState["rowCount"]; } set { ViewState["rowCount"] = value; } } and a button event protected void addRow_Click(object sender, EventArgs e) { rowCount = rowCount + 1; } Then on Page_Load I read that value and create controls accordingly. I understand the button event fires AFTER the Page_Load fires so the value isn't updated until the next postback. Real nightmare. Here's the entire code: protected void Page_Load(object sender, EventArgs e) { string xmlValue = ""; //To read a value from a database if (xmlValue.Length > 0) { if (!Page.IsPostBack) { DataSet ds = XMLToDataSet(xmlValue); Table dimensionsTable = DataSetToTable(ds); tablePanel.Controls.Add(dimensionsTable); DataTable dt = ds.Tables["Dimensions"]; rowCount = dt.Rows.Count; colCount = dt.Columns.Count; } else { tablePanel.Controls.Add(DataSetToTable(DefaultDataSet(rowCount, colCount))); } } else { if (!Page.IsPostBack) { rowCount = 2; colCount = 4; } tablePanel.Controls.Add(DataSetToTable(DefaultDataSet(rowCount, colCount))); } } protected void submit_Click(object sender, EventArgs e) { resultsLabel.Text = Server.HtmlEncode(DataSetToStringXML(TableToDataSet((Table)tablePanel.Controls[0]))); } protected void addColumn_Click(object sender, EventArgs e) { colCount = colCount + 1; } protected void addRow_Click(object sender, EventArgs e) { rowCount = rowCount + 1; } public DataSet TableToDataSet(Table table) { DataSet ds = new DataSet(); DataTable dt = new DataTable("Dimensions"); ds.Tables.Add(dt); //Add headers for (int i = 0; i < table.Rows[0].Cells.Count; i++) { DataColumn col = new DataColumn(); TextBox headerTxtBox = (TextBox)table.Rows[0].Cells[i].Controls[0]; col.ColumnName = headerTxtBox.Text; col.Caption = headerTxtBox.Text; dt.Columns.Add(col); } for (int i = 0; i < table.Rows.Count; i++) { DataRow valueRow = dt.NewRow(); for (int x = 0; x < table.Rows[i].Cells.Count; x++) { TextBox valueTextBox = (TextBox)table.Rows[i].Cells[x].Controls[0]; valueRow[x] = valueTextBox.Text; } dt.Rows.Add(valueRow); } return ds; } public Table DataSetToTable(DataSet ds) { DataTable dt = ds.Tables["Dimensions"]; Table newTable = new Table(); //Add headers TableRow headerRow = new TableRow(); for (int i = 0; i < dt.Columns.Count; i++) { TableCell headerCell = new TableCell(); TextBox headerTxtBox = new TextBox(); headerTxtBox.ID = "HeadersTxtBox" + i.ToString(); headerTxtBox.Font.Bold = true; headerTxtBox.Text = dt.Columns[i].ColumnName; headerCell.Controls.Add(headerTxtBox); headerRow.Cells.Add(headerCell); } newTable.Rows.Add(headerRow); //Add value rows for (int i = 0; i < dt.Rows.Count; i++) { TableRow valueRow = new TableRow(); for (int x = 0; x < dt.Columns.Count; x++) { TableCell valueCell = new TableCell(); TextBox valueTxtBox = new TextBox(); valueTxtBox.ID = "ValueTxtBox" + i.ToString() + i + x + x.ToString(); valueTxtBox.Text = dt.Rows[i][x].ToString(); valueCell.Controls.Add(valueTxtBox); valueRow.Cells.Add(valueCell); } newTable.Rows.Add(valueRow); } return newTable; } public DataSet DefaultDataSet(int rows, int cols) { DataSet ds = new DataSet(); DataTable dt = new DataTable("Dimensions"); ds.Tables.Add(dt); DataColumn nameCol = new DataColumn(); nameCol.Caption = "Name"; nameCol.ColumnName = "Name"; nameCol.DataType = System.Type.GetType("System.String"); dt.Columns.Add(nameCol); DataColumn widthCol = new DataColumn(); widthCol.Caption = "Width"; widthCol.ColumnName = "Width"; widthCol.DataType = System.Type.GetType("System.String"); dt.Columns.Add(widthCol); if (cols > 2) { DataColumn heightCol = new DataColumn(); heightCol.Caption = "Height"; heightCol.ColumnName = "Height"; heightCol.DataType = System.Type.GetType("System.String"); dt.Columns.Add(heightCol); } if (cols > 3) { DataColumn depthCol = new DataColumn(); depthCol.Caption = "Depth"; depthCol.ColumnName = "Depth"; depthCol.DataType = System.Type.GetType("System.String"); dt.Columns.Add(depthCol); } if (cols > 4) { int newColCount = cols - 4; for (int i = 0; i < newColCount; i++) { DataColumn newCol = new DataColumn(); newCol.Caption = "New " + i.ToString(); newCol.ColumnName = "New " + i.ToString(); newCol.DataType = System.Type.GetType("System.String"); dt.Columns.Add(newCol); } } for (int i = 0; i < rows; i++) { DataRow newRow = dt.NewRow(); newRow["Name"] = "Name " + i.ToString(); newRow["Width"] = "Width " + i.ToString(); if (cols > 2) { newRow["Height"] = "Height " + i.ToString(); } if (cols > 3) { newRow["Depth"] = "Depth " + i.ToString(); } dt.Rows.Add(newRow); } return ds; } public DataSet XMLToDataSet(string xml) { StringReader sr = new StringReader(xml); DataSet ds = new DataSet(); ds.ReadXml(sr); return ds; } public string DataSetToStringXML(DataSet ds) { XmlDocument _XMLDoc = new XmlDocument(); _XMLDoc.LoadXml(ds.GetXml()); StringWriter sw = new StringWriter(); XmlTextWriter xw = new XmlTextWriter(sw); XmlDocument xml = _XMLDoc; xml.WriteTo(xw); return sw.ToString(); } private int rowCount { get { return (int)ViewState["rowCount"]; } set { ViewState["rowCount"] = value; } } private int colCount { get { return (int)ViewState["colCount"]; } set { ViewState["colCount"] = value; } } Thanks in advance, Marko

    Read the article

  • Writing a simple incrementing counter in rails

    - by Trip
    For every Card, I would like to attach a special number to them that increments by one. I assume I can do this all in the controller. def create @card = Card.new(params[:card]) @card.SpecNum = @card.SpecNum ++ ... end Or. I can be blatantly retarded. And maybe the best bet is to add an auto-incremement table to mysql. The problem is the number has to start at a specific number, 1020. Any ideas?

    Read the article

  • RewriteRule and php download counter

    - by rcourtna
    (1) I have a site that serves up MP3 files: http://domain/files/1234567890.mp3 (2) I have a php script that tracks file download counts: http://domain/modules/download_counter.php?file=/files/1234567890.mp3 After download_counter.php records the download, it redirects to the original file: Header("Location: $FQDN_url"); (3) I'd like all my public links to be presented as the direct file urls from (1). I'm trying to use Apache to redirect the requests to download_counter.php: RewriteRule ^files/(.+\.mp3)$ /modules/download_counter.php?file=/files/$1 [L] I'm currently stuck on (3), as it results in a redirect loop, since download_counter.php simply redirects the request back to the original file (rather than streaming the file contents). I'm also motivated to use download_counter.php as is (without modifying it's redirect behaviour). This is because the script is part of a larger CMS module, and I'd like to avoid complicating my upgrade path. Perhaps there is no solution to my problem (other than modifying the download_counter script). WDYT?

    Read the article

  • Time to start a counter on client-side.

    - by Felipe
    Hi everybody, I'm developing an web application using asp.net mvc, and i need to do a stopwatch (chronometer) (with 30 seconds preprogrammed to start in a certain moment) on client-side using the time of the server, by the way, the client's clock can't be as the server's clock. So, i'm using Jquery to call the server by JSon and get the time, but it's very stress because each one second I call the server to get time, something like this: $(function() { GetTimeByServer(); }); function GetTimeByServer() { $.getJSon('/Home/Time', null, function(result) { if (result.SecondsPending < 30) { // call another function to start an chronometer } else { window.SetTimeout(GetTimeByServer, 1000); //call again each 1 second! } }); } It works fine, but when I have more than 3 or 4 call like this, the browser slowly but works! I'd like to know, how improve more performace in client side, or if is there any way to do this... is there any way to client listen the server like a "socket" to know if the chronometer should start... PS: Sorry for my english! thanks Cheers

    Read the article

  • I need some ideas on my algortihm for a Hit Counter

    - by stckvrflw
    My algorithm is for a hit count, I am tring to not count for the same person twice if that person came to the site twice in a time internval (For example if he comes twice in 5 minutes, I want to count it as 1 for this person) Here how my database looks like UserIp UserId Date of user came 127.0.0.1 new.user.akb 26.03.2010 10:15:44 127.0.0.1 new.user.akb 26.03.2010 10:16:44 127.0.0.1 new.user.akb 26.03.2010 10:17:44 127.0.0.1 new.user.akb 26.03.2010 10:18:44 127.0.0.1 new.user.akb 26.03.2010 10:19:44 127.0.0.1 new.user.akb 26.03.2010 10:20:44 127.0.0.1 new.user.akb 26.03.2010 10:21:44 127.0.0.1 new.user.akb 26.03.2010 10:22:44 127.0.0.1 new.user.akb 26.03.2010 10:23:44 What I need to do is get number of distinct UserIPs from the table above that occured within a time interval. For example if I set the time interval for 5 minutes, and let say that is starts at 26.03.2010 10:15:44 Then I will get 2 as the results, since 1 distinct value between 10:15 to 10:20 and , 1 distinct value from 10:20 to 10:23, For example if my interval is 3 minutes than the return result will be 3 Thanks.

    Read the article

  • Row/column counter in 'apply' functions

    - by mortalitysequence
    What if one wants to apply a functon i.e. to each row of a matrix, but also wants to use as an argument for this function the number of that row. As an example, suppose you wanted to get the n-th root of the numbers in each row of a matrix, where n is the row number. Is there another way (using apply only) than column-binding the row numbers to the initial matrix, like this? test <- data.frame(x=c(26,21,20),y=c(34,29,28)) t(apply(cbind(as.numeric(rownames(test)),test),1,function(x) x[2:3]^(1/x[1]))) P.S. Actually if test was really a matrix : test <- matrix(c(26,21,20,34,29,28),nrow=3) , rownames(test) doesn't help :( Thank you.

    Read the article

  • Writing a simple incrementer counter in rails

    - by Trip
    For every Card, I would like to attach a special number to them that increments by one. I assume I can do this all in the controller. def create @card = Card.new(params[:card]) @card.SpecNum = @card.SpecNum ++ ... end Or. I can be blatantly retarded. And maybe the best bet is to add an auto-incremement table to mysql. The problem is the number has to start at a specific number, 1020. Any ideas?

    Read the article

  • C# character counter when writing to new line

    - by Mike
    Basically i'm trying to read a really big text file and when the charecters of the line reach X amount write to a new line, but i can't seem to get the charecter count to work. Any help is appricated! using (FileStream fs = new FileStream(betaFilePath,FileMode.Open)) using (StreamReader rdr = new StreamReader(fs)) { while (!rdr.EndOfStream) { string betaFileLine = rdr.ReadLine(); int stringline = 0; if (betaFileLine.Contains("þTEMP")) { //sb.AppendLine(@"C:\chawkster\workfiles\New Folder\GEL_ALL_PRODUCTS_CONCORD2.DAT"); string checkline = betaFileLine.Length.ToString(); foreach (string cl in checkline) { stringline++; File.AppendAllText(@"C:\chawkster\workfiles\New Folder\GEL_ALL_PRODUCTS_CONCORD3.DAT", cl); if(stringline == 1200) { File.AppendAllText(@"C:\chawkster\workfiles\New Folder\GEL_ALL_PRODUCTS_CONCORD3.DAT","\n"); stringline = 0; } } } } foreach (string cl in checkline) Error 1 Cannot convert type 'char' to 'string'

    Read the article

  • Word Counter Implementation

    - by kenny
    Is there a better way than the following brute foce implementation of a c# word counting class? UPDATED CODE: Sorry! /// <summary> /// A word counting class. /// </summary> public class WordCounter { Dictionary<string, int> dictTest = new Dictionary<string, int> (); /// <summary> /// Enters a word and returns the current number of times that word was found. /// </summary> /// <param name="word">The word or string found.</param> /// <returns>Count of times Found() was called with provided word.</returns> public int Found ( string word ) { int count = 1; return dictTest.TryGetValue ( word, out count ) ? ++dictTest[word] : dictTest[word] = 1; } }

    Read the article

  • mysql row counter

    - by David19801
    Hello, I have a mysql table. It has auto increment on the id. but I regularly delete rows so the numbers are all over the place. I need to get the last n rows out, but because of deletions, the common way of using the max of the autoincremented id column doesn't work well... 1 - Is their another way to get the bottom 50? 2 - Is their a way to get rows by actual row number? so if I have 4 rows labelled 1,2,3,4 delete row 2 then it will become 1,2,3 rather than 1,3,4?

    Read the article

  • How can a Perfmon "% Processor Time" counter be over 100%?

    - by Bill Paetzke
    The counter, Process: % Processor Time (sqlservr), is hovering around 300% on one of my database servers. This counter reflects the percent of total time SQL Server spent running on CPU (user mode + privilege mode). The book, Sql Server 2008 Internals and Troubleshooting, says that anything greater than 80% is a problem. How is it possible for that counter to be over 100%?

    Read the article

  • why is LZMA SDK (7-zip) so slow

    - by Tono Nam
    I found 7-zip great and I will like to use it on .net applications. I have a 10MB file (a.001) and it takes: 2 seconds to encode. Now it will be nice if I could do the same thing on c#. I have downloaded http://www.7-zip.org/sdk.html LZMA SDK c# source code. I basically copied the CS directory into a console application in visual studio: Then I compiled and eveything compiled smoothly. So on the output directory I placed the file a.001 which is 10MB of size. On the main method that came on the source code I placed: [STAThread] static int Main(string[] args) { // e stands for encode args = "e a.001 output.7z".Split(' '); // added this line for debug try { return Main2(args); } catch (Exception e) { Console.WriteLine("{0} Caught exception #1.", e); // throw e; return 1; } } when I execute the console application the application works great and I get the output a.7z on the working directory. The problem is that it takes so long. It takes about 15 seconds to execute! I have also tried http://stackoverflow.com/a/8775927/637142 approach and it also takes very long. Why is it 10 times slower than the actual program ? Also Even if I set to use only one thread: It still takes much less time (3 seconds vs 15): (Edit) Another Possibility Could it be because C# is slower than assembly or C ? I notice that the algorithm does a lot of heavy operations. For example compare these two blocks of code. They both do the same thing: C void main() { time_t now; int i,j,k,x; long counter ; counter = 0; now = time(NULL); /* LOOP */ for(x=0; x<10; x++) { counter = -1234567890 + x+2; for (j = 0; j < 10000; j++) for(i = 0; i< 1000; i++) for(k =0; k<1000; k++) { if(counter > 10000) counter = counter - 9999; else counter= counter +1; } printf (" %d \n", time(NULL) - now); // display elapsed time } printf("counter = %d\n\n",counter); // display result of counter printf ("Elapsed time = %d seconds ", time(NULL) - now); gets("Wait"); } output c# static void Main(string[] args) { DateTime now; int i, j, k, x; long counter; counter = 0; now = DateTime.Now; /* LOOP */ for (x = 0; x < 10; x++) { counter = -1234567890 + x + 2; for (j = 0; j < 10000; j++) for (i = 0; i < 1000; i++) for (k = 0; k < 1000; k++) { if (counter > 10000) counter = counter - 9999; else counter = counter + 1; } Console.WriteLine((DateTime.Now - now).Seconds.ToString()); } Console.Write("counter = {0} \n", counter.ToString()); Console.Write("Elapsed time = {0} seconds", DateTime.Now - now); Console.Read(); } Output Note how much slower was c#. Both programs where run from outside visual studio on release mode. Maybe that is the reason why it takes so much longer in .net than on c++. Conclusion I cannot seem to know what is causing the problem. I guess I will use 7z.dll and invoke the necessary methods from c#. A library that does that is at: http://sevenzipsharp.codeplex.com/ and that way I am using the same library that 7zip is using as: // dont forget to add reference to SevenZipSharp located on the link I provided static void Main(string[] args) { // load the dll SevenZip.SevenZipCompressor.SetLibraryPath(@"C:\Program Files (x86)\7-Zip\7z.dll"); SevenZip.SevenZipCompressor compress = new SevenZip.SevenZipCompressor(); compress.CompressDirectory("MyFolderToArchive", "output.7z"); }

    Read the article

  • Change Data Capture Webinar

    I am going to be doing a webinar with our friends at Attunity on Change Data Capture.  Attunity have a good story around this technology and you can use it in your SSIS loads to great effect. Join Attunity and Konesans/SQLIS for a Webinar on 17 September Space is limited. Reserve your Webinar seat now at: https://www1.gotomeeting.com/register/693735512 Want increased efficiency and real-time speed when conducting ETL loads? Need lower implementation costs while minimizing system impact? Learn how change data capture (CDC) technologies can reduce ETL load times. Allan Mitchell, Principal Consultant at Konesans and SQLServer MVP specialising in ETL, will explain CDC concepts and benefits and how CDC can dramatically reduce ETL load times. Ian Archibald, Pre-Sales Director EMEA for Attunity, will present and demonstrate Attunity's award-winning Oracle-CDC for SSIS, a fully-integrated SSIS solution for designing, deploying and managing Oracle CDC processes. Title: Change Data Capture - Reducing ETL Load Times Date: Thursday, September 17, 2009 Time: 10:00 AM - 11:00 AM BST ABOUT THE SPEAKERS: Allan Mitchell is the joint owner of Konesans Ltd, a UK based consultancy specializing in SQL Server, and most importantly SQL Server Integration Services. Having been working with SQL Server from 6.5 onwards, he has extensive experience in many aspects of SQL Server, but now focuses on the BI suite of tools. He is a SQL Server MVP, a frequent poster on the MS SSIS/DTS newsgroups, and runs the sqldts.com and sqlis.com resource sites. Ian Archibald, Attunity Pre-Sales Director EMEA, has worked in Attunity’s UK Office for 17 years. An expert in Attunity solutions, Ian has extensive knowledge of Attunity’s products and data integration & CDC technologies. After registering you will receive a confirmation email containing information about joining the Webinar. System Requirements PC-based attendees Required: Windows® 2000, XP Home, XP Pro, 2003 Server, Vista Macintosh®-based attendees Required: Mac OS® X 10.4 (Tiger®) or newer

    Read the article

  • Change Data Capture Webinar

    I am going to be doing a webinar with our friends at Attunity on Change Data Capture.  Attunity have a good story around this technology and you can use it in your SSIS loads to great effect. Join Attunity and Konesans/SQLIS for a Webinar on 17 September Space is limited. Reserve your Webinar seat now at: https://www1.gotomeeting.com/register/693735512 Want increased efficiency and real-time speed when conducting ETL loads? Need lower implementation costs while minimizing system impact? Learn how change data capture (CDC) technologies can reduce ETL load times. Allan Mitchell, Principal Consultant at Konesans and SQLServer MVP specialising in ETL, will explain CDC concepts and benefits and how CDC can dramatically reduce ETL load times. Ian Archibald, Pre-Sales Director EMEA for Attunity, will present and demonstrate Attunity's award-winning Oracle-CDC for SSIS, a fully-integrated SSIS solution for designing, deploying and managing Oracle CDC processes. Title: Change Data Capture - Reducing ETL Load Times Date: Thursday, September 17, 2009 Time: 10:00 AM - 11:00 AM BST ABOUT THE SPEAKERS: Allan Mitchell is the joint owner of Konesans Ltd, a UK based consultancy specializing in SQL Server, and most importantly SQL Server Integration Services. Having been working with SQL Server from 6.5 onwards, he has extensive experience in many aspects of SQL Server, but now focuses on the BI suite of tools. He is a SQL Server MVP, a frequent poster on the MS SSIS/DTS newsgroups, and runs the sqldts.com and sqlis.com resource sites. Ian Archibald, Attunity Pre-Sales Director EMEA, has worked in Attunity’s UK Office for 17 years. An expert in Attunity solutions, Ian has extensive knowledge of Attunity’s products and data integration & CDC technologies. After registering you will receive a confirmation email containing information about joining the Webinar. System Requirements PC-based attendees Required: Windows® 2000, XP Home, XP Pro, 2003 Server, Vista Macintosh®-based attendees Required: Mac OS® X 10.4 (Tiger®) or newer

    Read the article

  • Issuing Current Time Increments in StreamInsight (A Practical Example)

    The issuing of a Current Time Increment, Cti, in StreamInsight is very definitely one of the most important concepts to learn if you want your Streams to be responsive. A full discussion of how to issue Ctis is beyond the scope of this article but a very good explanation in addition to Books Online can be found in these three articles by a member of the StreamInsight team at Microsoft, Ciprian Gerea. Time in StreamInsight Series http://blogs.msdn.com/b/streaminsight/archive/2010/07/23/time-in-streaminsight-i.aspx http://blogs.msdn.com/b/streaminsight/archive/2010/07/30/time-in-streaminsight-ii.aspx http://blogs.msdn.com/b/streaminsight/archive/2010/08/03/time-in-streaminsight-iii.aspx A lot of the problems I see with unresponsive or stuck streams on the MSDN Forums are to do with how Ctis are enqueued or in a lot of cases not enqueued. If you enqueue events and never enqueue a Cti then StreamInsight will be perfectly happy. You, on the other hand, will never see data on the output as you have not told StreamInsight to flush the stream. This article deals with a specific implementation problem I had recently whilst working on a StreamInsight project. I look at some possible options and discuss why they would not work before showing the way I solved the problem. The stream of data I was dealing with on this project was very bursty that is to say when events were flowing they came through very quickly and in large numbers (1000 events/sec), but when the stream calmed down it could be a few seconds between each event. When enqueuing events into the StreamInsight engne it is best practice to do so with a StartTime that is given to you by the system producing the event . StreamInsight processes events and it doesn't matter whether those events are being pushed into the engine by a source system or the events are being read from something like a flat file in a directory somewhere. You can apply the same logic and temporal algebra to both situations. Reading from a file is an excellent example of where the time of the event on the source itself is very important. We could be reading that file a long time after it was written. Being able to read the StartTime from the events allows us to define windows that will hold the correct sets of events. I was able to do this with my stream but this is where my problems started. Below is a very simple script to create a SQL Server table and populate it with sample data that will show exactly the problem I had. CREATE TABLE [dbo].[t] ( [c1] [int] PRIMARY KEY, [c2] [datetime] NULL ) INSERT t VALUES (1,'20100810'),(2,'20100810'),(3,'20100810') Column c2 defines the StartTime of the event on the source and as you can see the values in all 3 rows of data is the same. If we read Ciprian’s articles we know that we can define how Ctis get injected into the stream in 3 different places The Stream Definition The Input Factory The Input Adapter I personally have always been a fan of enqueing Ctis through the factory. Below is code typical of what I would use to do this On the class itself I do some inheriting public class SimpleInputFactory : ITypedInputAdapterFactory<SimpleInputConfig>, ITypedDeclareAdvanceTimeProperties<SimpleInputConfig> And then I implement the following function public AdapterAdvanceTimeSettings DeclareAdvanceTimeProperties<TPayload>(SimpleInputConfig configInfo, EventShape eventShape) { return new AdapterAdvanceTimeSettings( new AdvanceTimeGenerationSettings(configInfo.CtiFrequency, TimeSpan.FromTicks(-1)), AdvanceTimePolicy.Adjust); } The configInfo .CtiFrequency property is a value I pass through to define after how many events I want a Cti to be injected and this in turn will flush through the stream of data. I usually pass a value of 1 for this setting. The second parameter determines the CTI timestamp in terms of a delay relative to the events. -1 ticks in the past results in 1 tick in the future, i.e., ahead of the event. The problem with this method though is that if consecutive events have the same StartTime then only one of those events will be enqueued. In this example I use the following to define how I assign the StartTime of my events currEvent.StartTime = (DateTimeOffset)dt.c2; If I go ahead and run my StreamInsight process with this configuration i can see on the output adapter that two events have been removed To see this in a little more depth I can use the StreamInsight Debugger and see what happens internally. What is happening here is that the first event arrives and a Cti is injected with a time of 1 tick after the StartTime of that event (Also the EndTime of the event). The second event arrives and it has a StartTime of before the Cti and even though we specified AdvanceTimePolicy.Adjust on the factory we know that a point event can never be adjusted like this and the event is dropped. The same happens for the third event as well (The second and third events get trumped by the Cti). For a more detailed discussion of why this happens look here http://www.sqlis.com/sqlis/post/AdvanceTimePolicy-and-Point-Event-Streams-In-StreamInsight.aspx We end up with a single event being pushed into the output adapter and our result now makes sense. The next way I tried to solve this problem by changing the value of the second parameter to TimeSpan.Zero Here is how my factory code now looks public AdapterAdvanceTimeSettings DeclareAdvanceTimeProperties<TPayload>(SimpleInputConfig configInfo, EventShape eventShape) { return new AdapterAdvanceTimeSettings( new AdvanceTimeGenerationSettings(configInfo.CtiFrequency, TimeSpan.Zero), AdvanceTimePolicy.Adjust); } What I am doing here is declaring a policy that says inject a Cti together with every event and stamp it with a StartTime that is equal to the start time of the event itself (TimeSpan.Zero). This method has plus points as well as a downside. The upside is that no events will be lost by having the same StartTime as previous events. The Downside is that because the Cti is declared with the StartTime of the event itself then it does not actually flush that particular event because in the StreamInsight algebra, a Cti commits only those events that occurred strictly before them. To flush the events we need a Cti to be enqueued with a greater StartTime than the events themselves. Here is what happened when I ran this configuration As you can see all we got through was the Cti and none of the events. The debugger output shows the stamps on the Cti and the events themselves. Because the Cti issued has the same timestamp (StartTime) as the events then none of the events get flushed. I was nearly there but not quite. Because my stream was bursty it was possible that the next event would not come along for a few seconds and this was far too long for an event to be enqueued and not be flushed to the output adapter. I needed another solution. Two possible solutions crossed my mind although only one of them made sense when I explored it some more. Where multiple events have the same StartTime I could add 1 tick to the first event, two to the second, three to third etc thereby giving them unique StartTime values. Add a timer to manually inject Ctis The problem with the first implementation is that I would be giving the events a new StartTime. This would cause me the following problems If I want to define windows over the stream then some events may not be captured in the right windows and therefore any calculations on those windows I did would be wrong What would happen if we had 10,000 events with the same StartTime? I would enqueue them with StartTime + n ticks. Along comes a genuine event with a StartTime of the very first event + 1 tick. It is now too far in the past as far as my stream is concerned and it would be dropped. Not what I would want to do at all. I decided then to look at the Timer based solution I created a timer on my input adapter that elapsed every 200ms. private Timer tmr; public SimpleInputAdapter(SimpleInputConfig configInfo) { ctx = new SimpleTimeExtractDataContext(configInfo.ConnectionString); this.configInfo = configInfo; tmr = new Timer(200); tmr.Elapsed += new ElapsedEventHandler(t_Elapsed); tmr.Enabled = true; } void t_Elapsed(object sender, ElapsedEventArgs e) { ts = DateTime.Now - dtCtiIssued; if (ts.TotalMilliseconds >= 200 && TimerIssuedCti == false) { EnqueueCtiEvent(System.DateTime.Now.AddTicks(-100)); TimerIssuedCti = true; } }   In the t_Elapsed event handler I find out the difference in time between now and when the last event was processed (dtCtiIssued). I then check to see if that is greater than or equal to 200ms and if the last issuing of a Cti was done by the timer or by a genuine event (TimerIssuedCti). If I didn’t do this check then I would enqueue a Cti every time the timer elapsed which is not something I wanted. If the difference between the two times is greater than or equal to 500ms and the last event enqueued was by a real event then I issue a Cti through the timer to flush the event Queue, otherwise I do nothing. When I enqueue the Ctis into my stream in my ProduceEvents method I also set the values of dtCtiIssued and TimerIssuedCti   currEvent = CreateInsertEvent(); currEvent.StartTime = (DateTimeOffset)dt.c2; TimerIssuedCti = false; dtCtiIssued = currEvent.StartTime; If I go ahead and run this configuration I see the following in my output. As we can see the first Cti gets enqueued as before but then another is enqueued by the timer and because this has a later timestamp it flushes the enqueued events through the engine. Conclusion Hopefully this has shown how the enqueuing of Ctis can have a dramatic effect on the responsiveness of your output in StreamInsight. Understanding the temporal nature of the product is for me one of the most important things you can learn. I have attached my solution for the demos. It is all in one project and testing each variation is a simple matter of commenting and un-commenting the parts in the code we have been dealing with here.

    Read the article

  • StreamInsight and Reactive Framework Challenge

    In his blogpost Roman from the StreamInsight team asked if we could create a Reactive Framework version of what he had done in the post using StreamInsight.  For those who don’t know, the Reactive Framework or Rx to its friends is a library for composing asynchronous and event-based programs using observable collections in the .Net framework.  Yes, there is some overlap between StreamInsight and the Reactive Extensions but StreamInsight has more flexibility and power in its temporal algebra (Windowing, Alteration of event headers) Well here are two alternate ways of doing what Roman did. The first example is a mix of StreamInsight and Rx var rnd = new Random(); var RandomValue = 0; var interval = Observable.Interval(TimeSpan.FromMilliseconds((Int32)rnd.Next(500,3000))) .Select(i => { RandomValue = rnd.Next(300); return RandomValue; }); Server s = Server.Create("Default"); Microsoft.ComplexEventProcessing.Application a = s.CreateApplication("Rx SI Mischung"); var inputStream = interval.ToPointStream(a, evt => PointEvent.CreateInsert( System.DateTime.Now.ToLocalTime(), new { RandomValue = evt}), AdvanceTimeSettings.IncreasingStartTime, "Rx Sample"); var r = from evt in inputStream select new { runningVal = evt.RandomValue }; foreach (var x in r.ToPointEnumerable().Where(e => e.EventKind != EventKind.Cti)) { Console.WriteLine(x.Payload.ToString()); } This next version though uses the Reactive Extensions Only   var rnd = new Random(); var RandomValue = 0; Observable.Interval(TimeSpan.FromMilliseconds((Int32)rnd.Next(500, 3000))) .Select(i => { RandomValue = rnd.Next(300); return RandomValue; }).Subscribe(Console.WriteLine, () => Console.WriteLine("Completed")); Console.ReadKey();   These are very simple examples but both technologies allow us to do a lot more.  The ICEPObservable() design pattern was reintroduced in StreamInsight 1.1 and the more I use it the more I like it.  It is a very useful pattern when wanting to show StreamInsight samples as is the IEnumerable() pattern.

    Read the article

  • ... i just avoid GUID

    - by Tomaz.tsql
    Our partner was explaining to me that they are using GUID as primary key on all the tables. My immediate reaction was - why? and couple of basic doubts were: - since I can read uniqueidentifier, it does not tell me absolutely anything - if I will use my relational table, i sure will use other columns to get the information out - SQL is terrible when setting up clustered index on GUID columns (and hence performance problems) - why not use INT? it will save you space on disk, optimizer will be able...(read more)

    Read the article

  • Webcast: 12.2.4 Advanced Planning Command Center Enhancements

    - by ChristineS-Oracle
    Webcast: 12.2.4 Advanced Planning Command Center Enhancements Date: June 12, 2014 at 11:00 am ET, 10:00 am CT, 9:00 am MT, 8:00 am PT, 8:30 pm, India Time (Mumbai, GMT+05:30) This advisor webcast helps Functional Users and IT Analysts understand the new features introduced in Advanced Planning Command Center (APCC) as part of 12.2.4 release. These include custom hierarchies, custom measures, additional measures like projected on hand etc. Other new features include new reports like Build Plan, Order Details. It also includes new integration capabilities between APCC and DRP and support for Trade Planning in APCC. Topics will include: New Feature Introduction Feature Overview and Setup Steps Implementation Tips & Best Practices Details & Registration: Doc ID 1670447.1

    Read the article

  • ARC write-up on the OTM SIG

    - by John Murphy
    ARC write-up on the recent OTM SIG event. The Oracle Transportation Management Special Interest Group (OTM SIG) hosted its 6th annual user conference in Philadelphia, Pennsylvania, August 13-15, 2012. This independently run conference drew almost 400 attendees, predominantly Oracle Transportation Management (OTM) users. It featured four concurrent tracks that included both functionally and technically focused presentations. The tracks included a number of informative presentations by OTM users from various industries. These discussed the users' implementations, current usage, and future plans for OTM within their organizations. ARC Advisory Group found ConAgra's and Mutual Materials' presentations on OTM adoption and Kraft's presentation on the company's use of Fusion Transportation Intelligence particularly informative. Complete ARC write-up

    Read the article

  • SQL Sentry Plan Explorer : Version 1.1!

    - by AaronBertrand
    Last week, Microsoft offered up an early Christmas present: SQL Server 2005 SP4 . This week, it's SQL Sentry 's turn to play Santa Claus: several new features and fixes have been packaged up into SQL Sentry Plan Explorer 1.1 (build 6.0.67.0). So, what's new? Several wish list items have been fulfilled (hey, it is Christmas, after all). You can see the full change list here ; but I'll talk briefly about a few of my favorites: Parallel distribution The Plan Tree tab for a parallel operator now shows...(read more)

    Read the article

  • DCOGS Balance Breakup Diagnostic in OPM Financials

    - by ChristineS-Oracle
    Purpose of this diagnostic (OPMDCOGSDiag.sql) is to identify the sales orders which constitute the Deferred COGS account balance.This will help to get the detailed transaction information for Sales Order/s Order Management, Account Receivables, Inventory and OPM financials sub ledger at the Organization level.  This script is applicable for various scenarios of Standard Sales Order, Return Orders (RMA) coupled with all the applicable OPM costing methods like Standard, Actual and Lot costing.  OBJECTIVE: The sales order(s) which are at different stages of their life cycle in one spreadsheet at one go. To collect the information of: This will help in: Lesser time for data collection. Faster diagnosis of the issue. Easy collaboration across different modules like  Order Management, Accounts Receivables, Inventory and Cost Management.  You can download the script from Doc ID 1617599.1 DCOGS Balance Breakup (SO/RMA) and Diagnostic Analyzer in OPM Financials.

    Read the article

  • Webcast: Introduction To Causal Factors

    - by ChristineS-Oracle
    Webcast: Introduction To Causal Factors Date: June 11, 2014 at 11:00 am ET, 10:00 am CT, 9:00 am MT, 8:00 am PT, 8:30 pm, India Time (Mumbai, GMT+05:30) This one hour advisor webcast will provide an introduction to causal factors for Demand Management and AFDM. Pre-seeded causal factors will be discussed as well as when they are not appropriate. Scenarios of when to add causal factors will be covered and best practice method of adding and using. Topics will include: Causal factors in DM and AFDM Pre-seeded causal factors When to modify causal factor settings Best practice when working with causal factors Details & Registration: Doc ID 1664606.1

    Read the article

  • New Procurement Report for Transportation Sourcing

    - by John Murphy
    Welcome to our fourth annual transportation procurement benchmark report. American Shipper, in partnership with the Council of Supply Chain Management Professionals (CSCMP) and the Retail Industry Leaders Association (RILA), surveyed roughly 275 transportation buyers and sellers on procurement practices, processes, technologies and results. Some key findings: • Manual, spreadsheet-based procurement processes remain the most prevalent among transportation buyers, with 42 percent of the total • Another 25 percent of respondents use a hybrid platform, which presumably means these buyers are using spreadsheets for at least one mode and/or geography • Only 23 percent of buyers are using a completely systems-based approach of some kind • Shippers were in a holding pattern with regards to investment in procurement systems the past year • Roughly three-quarters of survey respondents report that transportation spend has increased in 2012, although the pace has declined slightly from last year’s increases • Nearly every survey respondent purchases multiple modes of transportation • The number of respondents with plans to address technology to support the procurement process has increased in 2012. About one quarter of respondents who do not have a system report they have a budget for this investment in the next two years.

    Read the article

< Previous Page | 8 9 10 11 12 13 14 15 16 17 18 19  | Next Page >